diff --git a/.core_files.yaml b/.core_files.yaml index 067a6a2b41d..3f92ed87a84 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -49,6 +49,7 @@ base_platforms: &base_platforms - homeassistant/components/tts/** - homeassistant/components/update/** - homeassistant/components/vacuum/** + - homeassistant/components/valve/** - homeassistant/components/water_heater/** - homeassistant/components/weather/** @@ -145,6 +146,7 @@ requirements: &requirements - homeassistant/package_constraints.txt - requirements*.txt - pyproject.toml + - script/licenses.py any: - *base_platforms diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 003b4908b17..00000000000 --- a/.coveragerc +++ /dev/null @@ -1,1738 +0,0 @@ -# Sorted by hassfest. -# -# To sort, run python3 -m script.hassfest -p coverage - -[run] -source = homeassistant -omit = - homeassistant/__main__.py - homeassistant/helpers/backports/aiohttp_resolver.py - homeassistant/helpers/signal.py - homeassistant/scripts/__init__.py - homeassistant/scripts/benchmark/__init__.py - homeassistant/scripts/check_config.py - homeassistant/scripts/ensure_config.py - homeassistant/scripts/macos/__init__.py - - # omit pieces of code that rely on external devices being present - homeassistant/components/acer_projector/* - homeassistant/components/acmeda/__init__.py - homeassistant/components/acmeda/base.py - homeassistant/components/acmeda/cover.py - homeassistant/components/acmeda/errors.py - homeassistant/components/acmeda/helpers.py - homeassistant/components/acmeda/hub.py - homeassistant/components/acmeda/sensor.py - homeassistant/components/actiontec/const.py - homeassistant/components/actiontec/device_tracker.py - homeassistant/components/actiontec/model.py - homeassistant/components/adax/__init__.py - homeassistant/components/adax/climate.py - homeassistant/components/adguard/__init__.py - homeassistant/components/adguard/entity.py - homeassistant/components/adguard/sensor.py - homeassistant/components/adguard/switch.py - homeassistant/components/ads/* - homeassistant/components/aftership/__init__.py - homeassistant/components/aftership/sensor.py - homeassistant/components/agent_dvr/alarm_control_panel.py - homeassistant/components/agent_dvr/camera.py - homeassistant/components/agent_dvr/helpers.py - homeassistant/components/airnow/__init__.py - homeassistant/components/airnow/coordinator.py - homeassistant/components/airnow/sensor.py - homeassistant/components/airq/__init__.py - homeassistant/components/airq/coordinator.py - homeassistant/components/airq/sensor.py - homeassistant/components/airthings/__init__.py - homeassistant/components/airthings/sensor.py - homeassistant/components/airthings_ble/__init__.py - homeassistant/components/airthings_ble/sensor.py - homeassistant/components/airtouch4/__init__.py - homeassistant/components/airtouch4/climate.py - homeassistant/components/airtouch4/coordinator.py - homeassistant/components/airtouch5/__init__.py - homeassistant/components/airtouch5/climate.py - homeassistant/components/airtouch5/entity.py - homeassistant/components/airvisual/__init__.py - homeassistant/components/airvisual/sensor.py - homeassistant/components/airvisual_pro/__init__.py - homeassistant/components/airvisual_pro/sensor.py - homeassistant/components/aladdin_connect/__init__.py - homeassistant/components/aladdin_connect/api.py - homeassistant/components/aladdin_connect/application_credentials.py - homeassistant/components/aladdin_connect/cover.py - homeassistant/components/aladdin_connect/sensor.py - homeassistant/components/alarmdecoder/__init__.py - homeassistant/components/alarmdecoder/alarm_control_panel.py - homeassistant/components/alarmdecoder/binary_sensor.py - homeassistant/components/alarmdecoder/entity.py - homeassistant/components/alarmdecoder/sensor.py - homeassistant/components/alpha_vantage/sensor.py - homeassistant/components/amazon_polly/* - homeassistant/components/ambient_station/__init__.py - homeassistant/components/ambient_station/binary_sensor.py - homeassistant/components/ambient_station/entity.py - homeassistant/components/ambient_station/sensor.py - homeassistant/components/amcrest/* - homeassistant/components/ampio/* - homeassistant/components/android_ip_webcam/switch.py - homeassistant/components/anel_pwrctrl/switch.py - homeassistant/components/anthemav/media_player.py - homeassistant/components/apple_tv/__init__.py - homeassistant/components/apple_tv/browse_media.py - homeassistant/components/apple_tv/media_player.py - homeassistant/components/apple_tv/remote.py - homeassistant/components/aprilaire/__init__.py - homeassistant/components/aprilaire/climate.py - homeassistant/components/aprilaire/coordinator.py - homeassistant/components/aprilaire/entity.py - homeassistant/components/aprilaire/sensor.py - homeassistant/components/apsystems/__init__.py - homeassistant/components/apsystems/coordinator.py - homeassistant/components/apsystems/entity.py - homeassistant/components/apsystems/number.py - homeassistant/components/apsystems/sensor.py - homeassistant/components/aqualogic/* - homeassistant/components/aquostv/media_player.py - homeassistant/components/arcam_fmj/__init__.py - homeassistant/components/arcam_fmj/media_player.py - homeassistant/components/arest/binary_sensor.py - homeassistant/components/arest/sensor.py - homeassistant/components/arest/switch.py - homeassistant/components/arris_tg2492lg/* - homeassistant/components/aruba/device_tracker.py - homeassistant/components/arwn/sensor.py - homeassistant/components/aseko_pool_live/__init__.py - homeassistant/components/aseko_pool_live/binary_sensor.py - homeassistant/components/aseko_pool_live/coordinator.py - homeassistant/components/aseko_pool_live/entity.py - homeassistant/components/aseko_pool_live/sensor.py - homeassistant/components/asterisk_cdr/mailbox.py - homeassistant/components/asterisk_mbox/mailbox.py - homeassistant/components/aten_pe/* - homeassistant/components/atome/* - homeassistant/components/aurora/__init__.py - homeassistant/components/aurora/binary_sensor.py - homeassistant/components/aurora/coordinator.py - homeassistant/components/aurora/entity.py - homeassistant/components/aurora/sensor.py - homeassistant/components/avea/light.py - homeassistant/components/avion/light.py - homeassistant/components/awair/coordinator.py - homeassistant/components/azure_service_bus/* - homeassistant/components/baf/__init__.py - homeassistant/components/baf/binary_sensor.py - homeassistant/components/baf/climate.py - homeassistant/components/baf/entity.py - homeassistant/components/baf/fan.py - homeassistant/components/baf/light.py - homeassistant/components/baf/number.py - homeassistant/components/baf/sensor.py - homeassistant/components/baf/switch.py - homeassistant/components/baidu/tts.py - homeassistant/components/bang_olufsen/entity.py - homeassistant/components/bang_olufsen/media_player.py - homeassistant/components/bang_olufsen/util.py - homeassistant/components/bang_olufsen/websocket.py - homeassistant/components/bbox/device_tracker.py - homeassistant/components/bbox/sensor.py - homeassistant/components/beewi_smartclim/sensor.py - homeassistant/components/bitcoin/sensor.py - homeassistant/components/bizkaibus/sensor.py - homeassistant/components/blink/__init__.py - homeassistant/components/blink/alarm_control_panel.py - homeassistant/components/blink/binary_sensor.py - homeassistant/components/blink/camera.py - homeassistant/components/blink/sensor.py - homeassistant/components/blink/switch.py - homeassistant/components/blinksticklight/light.py - homeassistant/components/blockchain/sensor.py - homeassistant/components/bloomsky/* - homeassistant/components/bluesound/* - homeassistant/components/bluetooth_tracker/* - homeassistant/components/bmw_connected_drive/notify.py - homeassistant/components/bosch_shc/__init__.py - homeassistant/components/bosch_shc/binary_sensor.py - homeassistant/components/bosch_shc/cover.py - homeassistant/components/bosch_shc/entity.py - homeassistant/components/bosch_shc/sensor.py - homeassistant/components/bosch_shc/switch.py - homeassistant/components/braviatv/button.py - homeassistant/components/braviatv/coordinator.py - homeassistant/components/braviatv/media_player.py - homeassistant/components/braviatv/remote.py - homeassistant/components/bring/coordinator.py - homeassistant/components/bring/todo.py - homeassistant/components/broadlink/climate.py - homeassistant/components/broadlink/light.py - homeassistant/components/broadlink/remote.py - homeassistant/components/broadlink/switch.py - homeassistant/components/broadlink/updater.py - homeassistant/components/brottsplatskartan/sensor.py - homeassistant/components/browser/* - homeassistant/components/brunt/__init__.py - homeassistant/components/brunt/cover.py - homeassistant/components/bsblan/climate.py - homeassistant/components/bt_home_hub_5/device_tracker.py - homeassistant/components/bt_smarthub/device_tracker.py - homeassistant/components/buienradar/sensor.py - homeassistant/components/buienradar/util.py - homeassistant/components/buienradar/weather.py - homeassistant/components/canary/camera.py - homeassistant/components/cert_expiry/helper.py - homeassistant/components/channels/* - homeassistant/components/cisco_ios/device_tracker.py - homeassistant/components/cisco_mobility_express/device_tracker.py - homeassistant/components/cisco_webex_teams/notify.py - homeassistant/components/citybikes/sensor.py - homeassistant/components/clementine/media_player.py - homeassistant/components/clickatell/notify.py - homeassistant/components/clicksend/notify.py - homeassistant/components/clicksend_tts/notify.py - homeassistant/components/cmus/media_player.py - homeassistant/components/coinbase/sensor.py - homeassistant/components/comed_hourly_pricing/sensor.py - homeassistant/components/comelit/__init__.py - homeassistant/components/comelit/alarm_control_panel.py - homeassistant/components/comelit/climate.py - homeassistant/components/comelit/coordinator.py - homeassistant/components/comelit/cover.py - homeassistant/components/comelit/humidifier.py - homeassistant/components/comelit/light.py - homeassistant/components/comelit/sensor.py - homeassistant/components/comelit/switch.py - homeassistant/components/comfoconnect/fan.py - homeassistant/components/concord232/alarm_control_panel.py - homeassistant/components/concord232/binary_sensor.py - homeassistant/components/control4/__init__.py - homeassistant/components/control4/director_utils.py - homeassistant/components/control4/light.py - homeassistant/components/control4/media_player.py - homeassistant/components/coolmaster/coordinator.py - homeassistant/components/cppm_tracker/device_tracker.py - homeassistant/components/crownstone/__init__.py - homeassistant/components/crownstone/devices.py - homeassistant/components/crownstone/entry_manager.py - homeassistant/components/crownstone/helpers.py - homeassistant/components/crownstone/light.py - homeassistant/components/crownstone/listeners.py - homeassistant/components/cups/sensor.py - homeassistant/components/currencylayer/sensor.py - homeassistant/components/daikin/climate.py - homeassistant/components/daikin/sensor.py - homeassistant/components/daikin/switch.py - homeassistant/components/danfoss_air/* - homeassistant/components/ddwrt/device_tracker.py - homeassistant/components/decora/light.py - homeassistant/components/decora_wifi/light.py - homeassistant/components/delijn/* - homeassistant/components/deluge/__init__.py - homeassistant/components/deluge/coordinator.py - homeassistant/components/deluge/sensor.py - homeassistant/components/deluge/switch.py - homeassistant/components/denon/media_player.py - homeassistant/components/denonavr/__init__.py - homeassistant/components/denonavr/media_player.py - homeassistant/components/denonavr/receiver.py - homeassistant/components/digital_ocean/* - homeassistant/components/discogs/sensor.py - homeassistant/components/discord/__init__.py - homeassistant/components/discord/notify.py - homeassistant/components/dlib_face_detect/image_processing.py - homeassistant/components/dlib_face_identify/image_processing.py - homeassistant/components/dlink/data.py - homeassistant/components/dominos/* - homeassistant/components/doods/* - homeassistant/components/doorbird/__init__.py - homeassistant/components/doorbird/button.py - homeassistant/components/doorbird/camera.py - homeassistant/components/doorbird/device.py - homeassistant/components/doorbird/entity.py - homeassistant/components/doorbird/util.py - homeassistant/components/doorbird/view.py - homeassistant/components/dormakaba_dkey/__init__.py - homeassistant/components/dormakaba_dkey/binary_sensor.py - homeassistant/components/dormakaba_dkey/entity.py - homeassistant/components/dormakaba_dkey/lock.py - homeassistant/components/dormakaba_dkey/sensor.py - homeassistant/components/dovado/* - homeassistant/components/downloader/__init__.py - homeassistant/components/dte_energy_bridge/sensor.py - homeassistant/components/dublin_bus_transport/sensor.py - homeassistant/components/dunehd/__init__.py - homeassistant/components/dunehd/media_player.py - homeassistant/components/duotecno/__init__.py - homeassistant/components/duotecno/binary_sensor.py - homeassistant/components/duotecno/climate.py - homeassistant/components/duotecno/cover.py - homeassistant/components/duotecno/entity.py - homeassistant/components/duotecno/light.py - homeassistant/components/duotecno/switch.py - homeassistant/components/dwd_weather_warnings/coordinator.py - homeassistant/components/dwd_weather_warnings/sensor.py - homeassistant/components/dweet/* - homeassistant/components/ebox/sensor.py - homeassistant/components/ebusd/* - homeassistant/components/ecoal_boiler/* - homeassistant/components/ecobee/__init__.py - homeassistant/components/ecobee/binary_sensor.py - homeassistant/components/ecobee/climate.py - homeassistant/components/ecobee/notify.py - homeassistant/components/ecobee/sensor.py - homeassistant/components/ecobee/weather.py - homeassistant/components/ecoforest/__init__.py - homeassistant/components/ecoforest/coordinator.py - homeassistant/components/ecoforest/entity.py - homeassistant/components/ecoforest/number.py - homeassistant/components/ecoforest/sensor.py - homeassistant/components/ecoforest/switch.py - homeassistant/components/econet/__init__.py - homeassistant/components/econet/binary_sensor.py - homeassistant/components/econet/climate.py - homeassistant/components/econet/sensor.py - homeassistant/components/econet/water_heater.py - homeassistant/components/ecovacs/controller.py - homeassistant/components/ecovacs/entity.py - homeassistant/components/ecovacs/image.py - homeassistant/components/ecovacs/number.py - homeassistant/components/ecovacs/util.py - homeassistant/components/ecovacs/vacuum.py - homeassistant/components/ecowitt/__init__.py - homeassistant/components/ecowitt/binary_sensor.py - homeassistant/components/ecowitt/entity.py - homeassistant/components/ecowitt/sensor.py - homeassistant/components/eddystone_temperature/sensor.py - homeassistant/components/edimax/switch.py - homeassistant/components/edl21/__init__.py - homeassistant/components/edl21/sensor.py - homeassistant/components/egardia/* - homeassistant/components/electrasmart/__init__.py - homeassistant/components/electrasmart/climate.py - homeassistant/components/electric_kiwi/__init__.py - homeassistant/components/electric_kiwi/api.py - homeassistant/components/electric_kiwi/coordinator.py - homeassistant/components/electric_kiwi/oauth2.py - homeassistant/components/electric_kiwi/select.py - homeassistant/components/eliqonline/sensor.py - homeassistant/components/elkm1/__init__.py - homeassistant/components/elkm1/alarm_control_panel.py - homeassistant/components/elkm1/binary_sensor.py - homeassistant/components/elkm1/climate.py - homeassistant/components/elkm1/light.py - homeassistant/components/elkm1/sensor.py - homeassistant/components/elkm1/switch.py - homeassistant/components/elmax/__init__.py - homeassistant/components/elmax/alarm_control_panel.py - homeassistant/components/elmax/binary_sensor.py - homeassistant/components/elmax/coordinator.py - homeassistant/components/elmax/cover.py - homeassistant/components/elmax/switch.py - homeassistant/components/elv/* - homeassistant/components/elvia/__init__.py - homeassistant/components/elvia/importer.py - homeassistant/components/emby/media_player.py - homeassistant/components/emoncms/sensor.py - homeassistant/components/emoncms_history/* - homeassistant/components/emonitor/__init__.py - homeassistant/components/emonitor/sensor.py - homeassistant/components/enigma2/media_player.py - homeassistant/components/enocean/__init__.py - homeassistant/components/enocean/binary_sensor.py - homeassistant/components/enocean/device.py - homeassistant/components/enocean/dongle.py - homeassistant/components/enocean/light.py - homeassistant/components/enocean/sensor.py - homeassistant/components/enocean/switch.py - homeassistant/components/enphase_envoy/__init__.py - homeassistant/components/enphase_envoy/binary_sensor.py - homeassistant/components/enphase_envoy/coordinator.py - homeassistant/components/enphase_envoy/entity.py - homeassistant/components/enphase_envoy/number.py - homeassistant/components/enphase_envoy/select.py - homeassistant/components/enphase_envoy/sensor.py - homeassistant/components/enphase_envoy/switch.py - homeassistant/components/entur_public_transport/* - homeassistant/components/environment_canada/__init__.py - homeassistant/components/environment_canada/camera.py - homeassistant/components/environment_canada/sensor.py - homeassistant/components/environment_canada/weather.py - homeassistant/components/envisalink/* - homeassistant/components/ephember/climate.py - homeassistant/components/epic_games_store/__init__.py - homeassistant/components/epic_games_store/coordinator.py - homeassistant/components/epion/__init__.py - homeassistant/components/epion/coordinator.py - homeassistant/components/epion/sensor.py - homeassistant/components/epson/__init__.py - homeassistant/components/epson/media_player.py - homeassistant/components/eq3btsmart/__init__.py - homeassistant/components/eq3btsmart/climate.py - homeassistant/components/eq3btsmart/entity.py - homeassistant/components/eq3btsmart/models.py - homeassistant/components/escea/__init__.py - homeassistant/components/escea/climate.py - homeassistant/components/escea/discovery.py - homeassistant/components/etherscan/sensor.py - homeassistant/components/eufy/* - homeassistant/components/eufylife_ble/__init__.py - homeassistant/components/eufylife_ble/sensor.py - homeassistant/components/everlights/light.py - homeassistant/components/evohome/* - homeassistant/components/ezviz/__init__.py - homeassistant/components/ezviz/alarm_control_panel.py - homeassistant/components/ezviz/binary_sensor.py - homeassistant/components/ezviz/button.py - homeassistant/components/ezviz/camera.py - homeassistant/components/ezviz/coordinator.py - homeassistant/components/ezviz/entity.py - homeassistant/components/ezviz/image.py - homeassistant/components/ezviz/light.py - homeassistant/components/ezviz/number.py - homeassistant/components/ezviz/select.py - homeassistant/components/ezviz/sensor.py - homeassistant/components/ezviz/siren.py - homeassistant/components/ezviz/switch.py - homeassistant/components/ezviz/update.py - homeassistant/components/faa_delays/__init__.py - homeassistant/components/faa_delays/binary_sensor.py - homeassistant/components/faa_delays/coordinator.py - homeassistant/components/familyhub/camera.py - homeassistant/components/ffmpeg/camera.py - homeassistant/components/fibaro/__init__.py - homeassistant/components/fibaro/binary_sensor.py - homeassistant/components/fibaro/climate.py - homeassistant/components/fibaro/cover.py - homeassistant/components/fibaro/event.py - homeassistant/components/fibaro/light.py - homeassistant/components/fibaro/lock.py - homeassistant/components/fibaro/sensor.py - homeassistant/components/fibaro/switch.py - homeassistant/components/fints/sensor.py - homeassistant/components/fireservicerota/__init__.py - homeassistant/components/fireservicerota/binary_sensor.py - homeassistant/components/fireservicerota/sensor.py - homeassistant/components/fireservicerota/switch.py - homeassistant/components/firmata/__init__.py - homeassistant/components/firmata/binary_sensor.py - homeassistant/components/firmata/board.py - homeassistant/components/firmata/entity.py - homeassistant/components/firmata/light.py - homeassistant/components/firmata/pin.py - homeassistant/components/firmata/sensor.py - homeassistant/components/firmata/switch.py - homeassistant/components/fivem/__init__.py - homeassistant/components/fivem/binary_sensor.py - homeassistant/components/fivem/coordinator.py - homeassistant/components/fivem/entity.py - homeassistant/components/fivem/sensor.py - homeassistant/components/fixer/sensor.py - homeassistant/components/fjaraskupan/__init__.py - homeassistant/components/fjaraskupan/binary_sensor.py - homeassistant/components/fjaraskupan/coordinator.py - homeassistant/components/fjaraskupan/fan.py - homeassistant/components/fjaraskupan/light.py - homeassistant/components/fjaraskupan/number.py - homeassistant/components/fjaraskupan/sensor.py - homeassistant/components/fleetgo/device_tracker.py - homeassistant/components/flexit/climate.py - homeassistant/components/flexit_bacnet/climate.py - homeassistant/components/flic/binary_sensor.py - homeassistant/components/flick_electric/__init__.py - homeassistant/components/flick_electric/sensor.py - homeassistant/components/flock/notify.py - homeassistant/components/flume/__init__.py - homeassistant/components/flume/binary_sensor.py - homeassistant/components/flume/coordinator.py - homeassistant/components/flume/entity.py - homeassistant/components/flume/sensor.py - homeassistant/components/flume/util.py - homeassistant/components/folder_watcher/__init__.py - homeassistant/components/foobot/sensor.py - homeassistant/components/fortios/device_tracker.py - homeassistant/components/foscam/__init__.py - homeassistant/components/foscam/camera.py - homeassistant/components/foscam/coordinator.py - homeassistant/components/foscam/entity.py - homeassistant/components/foursquare/* - homeassistant/components/free_mobile/notify.py - homeassistant/components/freebox/camera.py - homeassistant/components/freebox/home_base.py - homeassistant/components/freebox/switch.py - homeassistant/components/fritz/coordinator.py - homeassistant/components/fritz/entity.py - homeassistant/components/fritz/services.py - homeassistant/components/fritz/switch.py - homeassistant/components/fritzbox_callmonitor/__init__.py - homeassistant/components/fritzbox_callmonitor/base.py - homeassistant/components/fritzbox_callmonitor/sensor.py - homeassistant/components/frontier_silicon/__init__.py - homeassistant/components/frontier_silicon/browse_media.py - homeassistant/components/frontier_silicon/media_player.py - homeassistant/components/futurenow/light.py - homeassistant/components/garadget/cover.py - homeassistant/components/garages_amsterdam/__init__.py - homeassistant/components/garages_amsterdam/binary_sensor.py - homeassistant/components/garages_amsterdam/entity.py - homeassistant/components/garages_amsterdam/sensor.py - homeassistant/components/gc100/* - homeassistant/components/geniushub/* - homeassistant/components/geocaching/__init__.py - homeassistant/components/geocaching/coordinator.py - homeassistant/components/geocaching/oauth.py - homeassistant/components/geocaching/sensor.py - homeassistant/components/github/coordinator.py - homeassistant/components/gitlab_ci/sensor.py - homeassistant/components/gitter/sensor.py - homeassistant/components/glances/sensor.py - homeassistant/components/goodwe/__init__.py - homeassistant/components/goodwe/button.py - homeassistant/components/goodwe/coordinator.py - homeassistant/components/goodwe/number.py - homeassistant/components/goodwe/select.py - homeassistant/components/goodwe/sensor.py - homeassistant/components/google_cloud/tts.py - homeassistant/components/google_maps/device_tracker.py - homeassistant/components/google_pubsub/__init__.py - homeassistant/components/gpsd/__init__.py - homeassistant/components/gpsd/sensor.py - homeassistant/components/greenwave/light.py - homeassistant/components/growatt_server/__init__.py - homeassistant/components/growatt_server/sensor.py - homeassistant/components/growatt_server/sensor_types/* - homeassistant/components/gstreamer/media_player.py - homeassistant/components/gtfs/sensor.py - homeassistant/components/guardian/__init__.py - homeassistant/components/guardian/binary_sensor.py - homeassistant/components/guardian/button.py - homeassistant/components/guardian/coordinator.py - homeassistant/components/guardian/sensor.py - homeassistant/components/guardian/switch.py - homeassistant/components/guardian/util.py - homeassistant/components/guardian/valve.py - homeassistant/components/habitica/__init__.py - homeassistant/components/habitica/coordinator.py - homeassistant/components/habitica/sensor.py - homeassistant/components/harman_kardon_avr/media_player.py - homeassistant/components/harmony/data.py - homeassistant/components/harmony/remote.py - homeassistant/components/harmony/util.py - homeassistant/components/haveibeenpwned/sensor.py - homeassistant/components/heatmiser/climate.py - homeassistant/components/hikvision/binary_sensor.py - homeassistant/components/hikvisioncam/switch.py - homeassistant/components/hisense_aehw4a1/__init__.py - homeassistant/components/hisense_aehw4a1/climate.py - homeassistant/components/hitron_coda/device_tracker.py - homeassistant/components/hive/__init__.py - homeassistant/components/hive/alarm_control_panel.py - homeassistant/components/hive/binary_sensor.py - homeassistant/components/hive/climate.py - homeassistant/components/hive/light.py - homeassistant/components/hive/sensor.py - homeassistant/components/hive/switch.py - homeassistant/components/hive/water_heater.py - homeassistant/components/hko/__init__.py - homeassistant/components/hko/coordinator.py - homeassistant/components/hko/weather.py - homeassistant/components/hlk_sw16/__init__.py - homeassistant/components/hlk_sw16/switch.py - homeassistant/components/home_connect/entity.py - homeassistant/components/home_connect/light.py - homeassistant/components/home_connect/switch.py - homeassistant/components/homematic/__init__.py - homeassistant/components/homematic/binary_sensor.py - homeassistant/components/homematic/climate.py - homeassistant/components/homematic/cover.py - homeassistant/components/homematic/entity.py - homeassistant/components/homematic/light.py - homeassistant/components/homematic/lock.py - homeassistant/components/homematic/notify.py - homeassistant/components/homematic/sensor.py - homeassistant/components/homematic/switch.py - homeassistant/components/horizon/media_player.py - homeassistant/components/hp_ilo/sensor.py - homeassistant/components/huawei_lte/__init__.py - homeassistant/components/huawei_lte/binary_sensor.py - homeassistant/components/huawei_lte/device_tracker.py - homeassistant/components/huawei_lte/notify.py - homeassistant/components/huawei_lte/sensor.py - homeassistant/components/huawei_lte/switch.py - homeassistant/components/hunterdouglas_powerview/__init__.py - homeassistant/components/hunterdouglas_powerview/button.py - homeassistant/components/hunterdouglas_powerview/coordinator.py - homeassistant/components/hunterdouglas_powerview/cover.py - homeassistant/components/hunterdouglas_powerview/entity.py - homeassistant/components/hunterdouglas_powerview/number.py - homeassistant/components/hunterdouglas_powerview/select.py - homeassistant/components/hunterdouglas_powerview/sensor.py - homeassistant/components/hunterdouglas_powerview/shade_data.py - homeassistant/components/hunterdouglas_powerview/util.py - homeassistant/components/huum/__init__.py - homeassistant/components/huum/climate.py - homeassistant/components/hvv_departures/__init__.py - homeassistant/components/hvv_departures/binary_sensor.py - homeassistant/components/hvv_departures/sensor.py - homeassistant/components/ialarm/alarm_control_panel.py - homeassistant/components/iammeter/const.py - homeassistant/components/iammeter/sensor.py - homeassistant/components/iaqualink/binary_sensor.py - homeassistant/components/iaqualink/climate.py - homeassistant/components/iaqualink/light.py - homeassistant/components/iaqualink/sensor.py - homeassistant/components/iaqualink/switch.py - homeassistant/components/icloud/__init__.py - homeassistant/components/icloud/account.py - homeassistant/components/icloud/device_tracker.py - homeassistant/components/icloud/sensor.py - homeassistant/components/idteck_prox/* - homeassistant/components/ifttt/__init__.py - homeassistant/components/ifttt/alarm_control_panel.py - homeassistant/components/iglo/light.py - homeassistant/components/ihc/* - homeassistant/components/incomfort/__init__.py - homeassistant/components/incomfort/climate.py - homeassistant/components/incomfort/water_heater.py - homeassistant/components/insteon/binary_sensor.py - homeassistant/components/insteon/climate.py - homeassistant/components/insteon/cover.py - homeassistant/components/insteon/fan.py - homeassistant/components/insteon/insteon_entity.py - homeassistant/components/insteon/light.py - homeassistant/components/insteon/schemas.py - homeassistant/components/insteon/switch.py - homeassistant/components/insteon/utils.py - homeassistant/components/intellifire/__init__.py - homeassistant/components/intellifire/binary_sensor.py - homeassistant/components/intellifire/climate.py - homeassistant/components/intellifire/coordinator.py - homeassistant/components/intellifire/entity.py - homeassistant/components/intellifire/fan.py - homeassistant/components/intellifire/light.py - homeassistant/components/intellifire/number.py - homeassistant/components/intellifire/sensor.py - homeassistant/components/intellifire/switch.py - homeassistant/components/intesishome/* - homeassistant/components/ios/__init__.py - homeassistant/components/ios/notify.py - homeassistant/components/ios/sensor.py - homeassistant/components/iperf3/* - homeassistant/components/iqvia/__init__.py - homeassistant/components/iqvia/sensor.py - homeassistant/components/irish_rail_transport/sensor.py - homeassistant/components/iss/__init__.py - homeassistant/components/iss/sensor.py - homeassistant/components/ista_ecotrend/coordinator.py - homeassistant/components/isy994/__init__.py - homeassistant/components/isy994/binary_sensor.py - homeassistant/components/isy994/button.py - homeassistant/components/isy994/climate.py - homeassistant/components/isy994/cover.py - homeassistant/components/isy994/entity.py - homeassistant/components/isy994/fan.py - homeassistant/components/isy994/helpers.py - homeassistant/components/isy994/light.py - homeassistant/components/isy994/lock.py - homeassistant/components/isy994/models.py - homeassistant/components/isy994/number.py - homeassistant/components/isy994/select.py - homeassistant/components/isy994/sensor.py - homeassistant/components/isy994/services.py - homeassistant/components/isy994/switch.py - homeassistant/components/isy994/util.py - homeassistant/components/itach/remote.py - homeassistant/components/itunes/media_player.py - homeassistant/components/izone/__init__.py - homeassistant/components/izone/climate.py - homeassistant/components/izone/discovery.py - homeassistant/components/joaoapps_join/* - homeassistant/components/juicenet/__init__.py - homeassistant/components/juicenet/device.py - homeassistant/components/juicenet/entity.py - homeassistant/components/juicenet/number.py - homeassistant/components/juicenet/sensor.py - homeassistant/components/juicenet/switch.py - homeassistant/components/justnimbus/coordinator.py - homeassistant/components/justnimbus/entity.py - homeassistant/components/justnimbus/sensor.py - homeassistant/components/kaiterra/* - homeassistant/components/kankun/switch.py - homeassistant/components/keba/* - homeassistant/components/keenetic_ndms2/__init__.py - homeassistant/components/keenetic_ndms2/binary_sensor.py - homeassistant/components/keenetic_ndms2/device_tracker.py - homeassistant/components/keenetic_ndms2/router.py - homeassistant/components/kef/* - homeassistant/components/keyboard/* - homeassistant/components/keyboard_remote/* - homeassistant/components/keymitt_ble/__init__.py - homeassistant/components/keymitt_ble/coordinator.py - homeassistant/components/keymitt_ble/entity.py - homeassistant/components/keymitt_ble/switch.py - homeassistant/components/kitchen_sink/weather.py - homeassistant/components/kiwi/lock.py - homeassistant/components/kodi/__init__.py - homeassistant/components/kodi/browse_media.py - homeassistant/components/kodi/media_player.py - homeassistant/components/kodi/notify.py - homeassistant/components/konnected/__init__.py - homeassistant/components/konnected/panel.py - homeassistant/components/konnected/switch.py - homeassistant/components/kostal_plenticore/__init__.py - homeassistant/components/kostal_plenticore/coordinator.py - homeassistant/components/kostal_plenticore/helper.py - homeassistant/components/kostal_plenticore/select.py - homeassistant/components/kostal_plenticore/sensor.py - homeassistant/components/kostal_plenticore/switch.py - homeassistant/components/kwb/sensor.py - homeassistant/components/lacrosse/sensor.py - homeassistant/components/lannouncer/notify.py - homeassistant/components/launch_library/__init__.py - homeassistant/components/launch_library/sensor.py - homeassistant/components/lcn/climate.py - homeassistant/components/lcn/helpers.py - homeassistant/components/lcn/services.py - homeassistant/components/ld2410_ble/__init__.py - homeassistant/components/ld2410_ble/binary_sensor.py - homeassistant/components/ld2410_ble/coordinator.py - homeassistant/components/ld2410_ble/sensor.py - homeassistant/components/led_ble/__init__.py - homeassistant/components/led_ble/light.py - homeassistant/components/lg_netcast/media_player.py - homeassistant/components/lg_soundbar/__init__.py - homeassistant/components/lg_soundbar/media_player.py - homeassistant/components/lightwave/* - homeassistant/components/limitlessled/light.py - homeassistant/components/linksys_smart/device_tracker.py - homeassistant/components/linode/* - homeassistant/components/linux_battery/sensor.py - homeassistant/components/lirc/* - homeassistant/components/livisi/__init__.py - homeassistant/components/livisi/binary_sensor.py - homeassistant/components/livisi/climate.py - homeassistant/components/livisi/coordinator.py - homeassistant/components/livisi/entity.py - homeassistant/components/livisi/switch.py - homeassistant/components/llamalab_automate/notify.py - homeassistant/components/logi_circle/__init__.py - homeassistant/components/logi_circle/camera.py - homeassistant/components/logi_circle/sensor.py - homeassistant/components/london_underground/sensor.py - homeassistant/components/lookin/__init__.py - homeassistant/components/lookin/climate.py - homeassistant/components/lookin/coordinator.py - homeassistant/components/lookin/entity.py - homeassistant/components/lookin/light.py - homeassistant/components/lookin/media_player.py - homeassistant/components/lookin/sensor.py - homeassistant/components/loqed/sensor.py - homeassistant/components/luci/device_tracker.py - homeassistant/components/lupusec/__init__.py - homeassistant/components/lupusec/alarm_control_panel.py - homeassistant/components/lupusec/binary_sensor.py - homeassistant/components/lupusec/entity.py - homeassistant/components/lupusec/switch.py - homeassistant/components/lutron/__init__.py - homeassistant/components/lutron/binary_sensor.py - homeassistant/components/lutron/cover.py - homeassistant/components/lutron/entity.py - homeassistant/components/lutron/event.py - homeassistant/components/lutron/fan.py - homeassistant/components/lutron/light.py - homeassistant/components/lutron/switch.py - homeassistant/components/lutron_caseta/__init__.py - homeassistant/components/lutron_caseta/binary_sensor.py - homeassistant/components/lutron_caseta/cover.py - homeassistant/components/lutron_caseta/fan.py - homeassistant/components/lutron_caseta/light.py - homeassistant/components/lutron_caseta/switch.py - homeassistant/components/lw12wifi/light.py - homeassistant/components/lyric/__init__.py - homeassistant/components/lyric/api.py - homeassistant/components/lyric/climate.py - homeassistant/components/lyric/sensor.py - homeassistant/components/mailgun/notify.py - homeassistant/components/mastodon/notify.py - homeassistant/components/matrix/__init__.py - homeassistant/components/matrix/notify.py - homeassistant/components/matter/__init__.py - homeassistant/components/matter/fan.py - homeassistant/components/meater/__init__.py - homeassistant/components/meater/sensor.py - homeassistant/components/medcom_ble/__init__.py - homeassistant/components/medcom_ble/sensor.py - homeassistant/components/mediaroom/media_player.py - homeassistant/components/melcloud/__init__.py - homeassistant/components/melcloud/climate.py - homeassistant/components/melcloud/sensor.py - homeassistant/components/melcloud/water_heater.py - homeassistant/components/melnor/__init__.py - homeassistant/components/message_bird/notify.py - homeassistant/components/met/weather.py - homeassistant/components/met_eireann/__init__.py - homeassistant/components/met_eireann/weather.py - homeassistant/components/meteo_france/__init__.py - homeassistant/components/meteo_france/sensor.py - homeassistant/components/meteo_france/weather.py - homeassistant/components/meteoalarm/* - homeassistant/components/meteoclimatic/__init__.py - homeassistant/components/meteoclimatic/sensor.py - homeassistant/components/meteoclimatic/weather.py - homeassistant/components/microbees/__init__.py - homeassistant/components/microbees/api.py - homeassistant/components/microbees/application_credentials.py - homeassistant/components/microbees/binary_sensor.py - homeassistant/components/microbees/button.py - homeassistant/components/microbees/climate.py - homeassistant/components/microbees/coordinator.py - homeassistant/components/microbees/cover.py - homeassistant/components/microbees/entity.py - homeassistant/components/microbees/light.py - homeassistant/components/microbees/sensor.py - homeassistant/components/microbees/switch.py - homeassistant/components/microsoft/tts.py - homeassistant/components/mikrotik/coordinator.py - homeassistant/components/mill/climate.py - homeassistant/components/mill/sensor.py - homeassistant/components/minio/minio_helper.py - homeassistant/components/mjpeg/camera.py - homeassistant/components/mjpeg/util.py - homeassistant/components/mochad/__init__.py - homeassistant/components/mochad/light.py - homeassistant/components/mochad/switch.py - homeassistant/components/modem_callerid/button.py - homeassistant/components/modem_callerid/sensor.py - homeassistant/components/moehlenhoff_alpha2/climate.py - homeassistant/components/moehlenhoff_alpha2/coordinator.py - homeassistant/components/monzo/__init__.py - homeassistant/components/monzo/api.py - homeassistant/components/motion_blinds/__init__.py - homeassistant/components/motion_blinds/coordinator.py - homeassistant/components/motion_blinds/cover.py - homeassistant/components/motion_blinds/entity.py - homeassistant/components/motion_blinds/sensor.py - homeassistant/components/motionblinds_ble/__init__.py - homeassistant/components/motionblinds_ble/button.py - homeassistant/components/motionblinds_ble/cover.py - homeassistant/components/motionblinds_ble/entity.py - homeassistant/components/motionblinds_ble/select.py - homeassistant/components/motionblinds_ble/sensor.py - homeassistant/components/motionmount/__init__.py - homeassistant/components/motionmount/binary_sensor.py - homeassistant/components/motionmount/entity.py - homeassistant/components/motionmount/number.py - homeassistant/components/motionmount/select.py - homeassistant/components/motionmount/sensor.py - homeassistant/components/mpd/media_player.py - homeassistant/components/mqtt_room/sensor.py - homeassistant/components/msteams/notify.py - homeassistant/components/mullvad/__init__.py - homeassistant/components/mullvad/binary_sensor.py - homeassistant/components/mutesync/__init__.py - homeassistant/components/mutesync/binary_sensor.py - homeassistant/components/mvglive/sensor.py - homeassistant/components/mycroft/* - homeassistant/components/mysensors/__init__.py - homeassistant/components/mysensors/climate.py - homeassistant/components/mysensors/cover.py - homeassistant/components/mysensors/gateway.py - homeassistant/components/mysensors/handler.py - homeassistant/components/mysensors/helpers.py - homeassistant/components/mysensors/light.py - homeassistant/components/mysensors/switch.py - homeassistant/components/mystrom/binary_sensor.py - homeassistant/components/mystrom/light.py - homeassistant/components/mystrom/sensor.py - homeassistant/components/mystrom/switch.py - homeassistant/components/myuplink/__init__.py - homeassistant/components/myuplink/api.py - homeassistant/components/myuplink/application_credentials.py - homeassistant/components/myuplink/coordinator.py - homeassistant/components/myuplink/entity.py - homeassistant/components/myuplink/helpers.py - homeassistant/components/myuplink/sensor.py - homeassistant/components/nad/media_player.py - homeassistant/components/nanoleaf/__init__.py - homeassistant/components/nanoleaf/button.py - homeassistant/components/nanoleaf/coordinator.py - homeassistant/components/nanoleaf/entity.py - homeassistant/components/nanoleaf/event.py - homeassistant/components/nanoleaf/light.py - homeassistant/components/neato/__init__.py - homeassistant/components/neato/api.py - homeassistant/components/neato/button.py - homeassistant/components/neato/camera.py - homeassistant/components/neato/entity.py - homeassistant/components/neato/hub.py - homeassistant/components/neato/sensor.py - homeassistant/components/neato/switch.py - homeassistant/components/neato/vacuum.py - homeassistant/components/nederlandse_spoorwegen/sensor.py - homeassistant/components/netdata/sensor.py - homeassistant/components/netgear/__init__.py - homeassistant/components/netgear/button.py - homeassistant/components/netgear/device_tracker.py - homeassistant/components/netgear/entity.py - homeassistant/components/netgear/router.py - homeassistant/components/netgear/sensor.py - homeassistant/components/netgear/switch.py - homeassistant/components/netgear/update.py - homeassistant/components/netgear_lte/__init__.py - homeassistant/components/netgear_lte/notify.py - homeassistant/components/netio/switch.py - homeassistant/components/neurio_energy/sensor.py - homeassistant/components/nexia/climate.py - homeassistant/components/nexia/entity.py - homeassistant/components/nexia/switch.py - homeassistant/components/nextcloud/__init__.py - homeassistant/components/nextcloud/binary_sensor.py - homeassistant/components/nextcloud/coordinator.py - homeassistant/components/nextcloud/entity.py - homeassistant/components/nextcloud/sensor.py - homeassistant/components/nextcloud/update.py - homeassistant/components/nfandroidtv/__init__.py - homeassistant/components/nfandroidtv/notify.py - homeassistant/components/nibe_heatpump/__init__.py - homeassistant/components/nibe_heatpump/binary_sensor.py - homeassistant/components/nibe_heatpump/select.py - homeassistant/components/nibe_heatpump/sensor.py - homeassistant/components/nibe_heatpump/switch.py - homeassistant/components/nibe_heatpump/water_heater.py - homeassistant/components/niko_home_control/light.py - homeassistant/components/nilu/air_quality.py - homeassistant/components/nissan_leaf/* - homeassistant/components/nmap_tracker/__init__.py - homeassistant/components/nmap_tracker/device_tracker.py - homeassistant/components/nmbs/sensor.py - homeassistant/components/noaa_tides/sensor.py - homeassistant/components/nobo_hub/__init__.py - homeassistant/components/nobo_hub/climate.py - homeassistant/components/nobo_hub/select.py - homeassistant/components/nobo_hub/sensor.py - homeassistant/components/norway_air/air_quality.py - homeassistant/components/notify_events/notify.py - homeassistant/components/notion/__init__.py - homeassistant/components/notion/binary_sensor.py - homeassistant/components/notion/coordinator.py - homeassistant/components/notion/sensor.py - homeassistant/components/notion/util.py - homeassistant/components/nsw_fuel_station/sensor.py - homeassistant/components/nuki/__init__.py - homeassistant/components/nuki/coordinator.py - homeassistant/components/nuki/lock.py - homeassistant/components/nx584/alarm_control_panel.py - homeassistant/components/oasa_telematics/sensor.py - homeassistant/components/obihai/__init__.py - homeassistant/components/obihai/button.py - homeassistant/components/obihai/connectivity.py - homeassistant/components/obihai/sensor.py - homeassistant/components/octoprint/__init__.py - homeassistant/components/octoprint/coordinator.py - homeassistant/components/oem/climate.py - homeassistant/components/ohmconnect/sensor.py - homeassistant/components/ombi/* - homeassistant/components/omnilogic/__init__.py - homeassistant/components/omnilogic/coordinator.py - homeassistant/components/omnilogic/sensor.py - homeassistant/components/omnilogic/switch.py - homeassistant/components/ondilo_ico/__init__.py - homeassistant/components/ondilo_ico/api.py - homeassistant/components/ondilo_ico/coordinator.py - homeassistant/components/ondilo_ico/sensor.py - homeassistant/components/onkyo/media_player.py - homeassistant/components/onvif/__init__.py - homeassistant/components/onvif/binary_sensor.py - homeassistant/components/onvif/camera.py - homeassistant/components/onvif/device.py - homeassistant/components/onvif/event.py - homeassistant/components/onvif/parsers.py - homeassistant/components/onvif/sensor.py - homeassistant/components/onvif/util.py - homeassistant/components/open_meteo/weather.py - homeassistant/components/openevse/sensor.py - homeassistant/components/openexchangerates/__init__.py - homeassistant/components/openexchangerates/coordinator.py - homeassistant/components/openexchangerates/sensor.py - homeassistant/components/opengarage/__init__.py - homeassistant/components/opengarage/binary_sensor.py - homeassistant/components/opengarage/cover.py - homeassistant/components/opengarage/entity.py - homeassistant/components/opengarage/sensor.py - homeassistant/components/openhardwaremonitor/sensor.py - homeassistant/components/openhome/__init__.py - homeassistant/components/openhome/media_player.py - homeassistant/components/opensensemap/air_quality.py - homeassistant/components/opentherm_gw/__init__.py - homeassistant/components/opentherm_gw/binary_sensor.py - homeassistant/components/opentherm_gw/climate.py - homeassistant/components/opentherm_gw/sensor.py - homeassistant/components/openuv/__init__.py - homeassistant/components/openuv/binary_sensor.py - homeassistant/components/openuv/coordinator.py - homeassistant/components/openuv/sensor.py - homeassistant/components/openweathermap/__init__.py - homeassistant/components/openweathermap/coordinator.py - homeassistant/components/openweathermap/repairs.py - homeassistant/components/openweathermap/sensor.py - homeassistant/components/openweathermap/weather.py - homeassistant/components/opnsense/__init__.py - homeassistant/components/opnsense/device_tracker.py - homeassistant/components/opower/__init__.py - homeassistant/components/opower/coordinator.py - homeassistant/components/opower/sensor.py - homeassistant/components/opple/light.py - homeassistant/components/oru/* - homeassistant/components/orvibo/switch.py - homeassistant/components/osoenergy/__init__.py - homeassistant/components/osoenergy/binary_sensor.py - homeassistant/components/osoenergy/entity.py - homeassistant/components/osoenergy/sensor.py - homeassistant/components/osoenergy/water_heater.py - homeassistant/components/osramlightify/light.py - homeassistant/components/otp/sensor.py - homeassistant/components/overkiz/__init__.py - homeassistant/components/overkiz/alarm_control_panel.py - homeassistant/components/overkiz/binary_sensor.py - homeassistant/components/overkiz/button.py - homeassistant/components/overkiz/climate.py - homeassistant/components/overkiz/climate_entities/* - homeassistant/components/overkiz/coordinator.py - homeassistant/components/overkiz/cover.py - homeassistant/components/overkiz/cover_entities/* - homeassistant/components/overkiz/entity.py - homeassistant/components/overkiz/executor.py - homeassistant/components/overkiz/light.py - homeassistant/components/overkiz/lock.py - homeassistant/components/overkiz/number.py - homeassistant/components/overkiz/select.py - homeassistant/components/overkiz/sensor.py - homeassistant/components/overkiz/siren.py - homeassistant/components/overkiz/switch.py - homeassistant/components/overkiz/water_heater.py - homeassistant/components/overkiz/water_heater_entities/* - homeassistant/components/ovo_energy/__init__.py - homeassistant/components/ovo_energy/sensor.py - homeassistant/components/panasonic_bluray/media_player.py - homeassistant/components/panasonic_viera/media_player.py - homeassistant/components/pandora/media_player.py - homeassistant/components/pencom/switch.py - homeassistant/components/permobil/__init__.py - homeassistant/components/permobil/binary_sensor.py - homeassistant/components/permobil/coordinator.py - homeassistant/components/permobil/entity.py - homeassistant/components/permobil/sensor.py - homeassistant/components/philips_js/__init__.py - homeassistant/components/philips_js/coordinator.py - homeassistant/components/philips_js/light.py - homeassistant/components/philips_js/media_player.py - homeassistant/components/philips_js/remote.py - homeassistant/components/philips_js/switch.py - homeassistant/components/pi_hole/sensor.py - homeassistant/components/picotts/tts.py - homeassistant/components/pilight/base_class.py - homeassistant/components/pilight/binary_sensor.py - homeassistant/components/pilight/light.py - homeassistant/components/pilight/switch.py - homeassistant/components/ping/__init__.py - homeassistant/components/ping/helpers.py - homeassistant/components/pioneer/media_player.py - homeassistant/components/plaato/__init__.py - homeassistant/components/plaato/binary_sensor.py - homeassistant/components/plaato/entity.py - homeassistant/components/plaato/sensor.py - homeassistant/components/plex/cast.py - homeassistant/components/plex/media_player.py - homeassistant/components/plex/view.py - homeassistant/components/plum_lightpad/light.py - homeassistant/components/pocketcasts/sensor.py - homeassistant/components/point/__init__.py - homeassistant/components/point/alarm_control_panel.py - homeassistant/components/point/binary_sensor.py - homeassistant/components/point/sensor.py - homeassistant/components/powerwall/__init__.py - homeassistant/components/progettihwsw/__init__.py - homeassistant/components/progettihwsw/binary_sensor.py - homeassistant/components/progettihwsw/switch.py - homeassistant/components/proliphix/climate.py - homeassistant/components/prowl/notify.py - homeassistant/components/proxmoxve/* - homeassistant/components/proxy/camera.py - homeassistant/components/pulseaudio_loopback/switch.py - homeassistant/components/purpleair/coordinator.py - homeassistant/components/pushbullet/api.py - homeassistant/components/pushbullet/notify.py - homeassistant/components/pushbullet/sensor.py - homeassistant/components/pushover/notify.py - homeassistant/components/pushsafer/notify.py - homeassistant/components/qbittorrent/__init__.py - homeassistant/components/qbittorrent/coordinator.py - homeassistant/components/qbittorrent/sensor.py - homeassistant/components/qnap/__init__.py - homeassistant/components/qnap/coordinator.py - homeassistant/components/qnap/sensor.py - homeassistant/components/qrcode/image_processing.py - homeassistant/components/quantum_gateway/device_tracker.py - homeassistant/components/qvr_pro/* - homeassistant/components/rabbitair/__init__.py - homeassistant/components/rabbitair/coordinator.py - homeassistant/components/rabbitair/entity.py - homeassistant/components/rabbitair/fan.py - homeassistant/components/rachio/__init__.py - homeassistant/components/rachio/binary_sensor.py - homeassistant/components/rachio/coordinator.py - homeassistant/components/rachio/device.py - homeassistant/components/rachio/entity.py - homeassistant/components/rachio/switch.py - homeassistant/components/rachio/webhooks.py - homeassistant/components/radio_browser/__init__.py - homeassistant/components/radiotherm/__init__.py - homeassistant/components/radiotherm/climate.py - homeassistant/components/radiotherm/coordinator.py - homeassistant/components/radiotherm/data.py - homeassistant/components/radiotherm/entity.py - homeassistant/components/radiotherm/switch.py - homeassistant/components/radiotherm/util.py - homeassistant/components/raincloud/* - homeassistant/components/rainmachine/__init__.py - homeassistant/components/rainmachine/binary_sensor.py - homeassistant/components/rainmachine/button.py - homeassistant/components/rainmachine/coordinator.py - homeassistant/components/rainmachine/select.py - homeassistant/components/rainmachine/sensor.py - homeassistant/components/rainmachine/switch.py - homeassistant/components/rainmachine/update.py - homeassistant/components/rainmachine/util.py - homeassistant/components/raspyrfm/* - homeassistant/components/recollect_waste/sensor.py - homeassistant/components/recorder/repack.py - homeassistant/components/recswitch/switch.py - homeassistant/components/reddit/sensor.py - homeassistant/components/refoss/__init__.py - homeassistant/components/refoss/bridge.py - homeassistant/components/refoss/coordinator.py - homeassistant/components/refoss/entity.py - homeassistant/components/refoss/sensor.py - homeassistant/components/refoss/switch.py - homeassistant/components/refoss/util.py - homeassistant/components/rejseplanen/sensor.py - homeassistant/components/remember_the_milk/__init__.py - homeassistant/components/remote_rpi_gpio/* - homeassistant/components/renson/__init__.py - homeassistant/components/renson/binary_sensor.py - homeassistant/components/renson/button.py - homeassistant/components/renson/coordinator.py - homeassistant/components/renson/entity.py - homeassistant/components/renson/fan.py - homeassistant/components/renson/number.py - homeassistant/components/renson/sensor.py - homeassistant/components/renson/switch.py - homeassistant/components/renson/time.py - homeassistant/components/reolink/binary_sensor.py - homeassistant/components/reolink/button.py - homeassistant/components/reolink/camera.py - homeassistant/components/reolink/entity.py - homeassistant/components/reolink/host.py - homeassistant/components/reolink/light.py - homeassistant/components/reolink/number.py - homeassistant/components/reolink/select.py - homeassistant/components/reolink/sensor.py - homeassistant/components/reolink/siren.py - homeassistant/components/reolink/switch.py - homeassistant/components/reolink/update.py - homeassistant/components/repetier/__init__.py - homeassistant/components/repetier/sensor.py - homeassistant/components/rest/notify.py - homeassistant/components/rest/switch.py - homeassistant/components/ridwell/__init__.py - homeassistant/components/ridwell/calendar.py - homeassistant/components/ridwell/coordinator.py - homeassistant/components/ridwell/switch.py - homeassistant/components/ring/camera.py - homeassistant/components/ripple/sensor.py - homeassistant/components/roborock/coordinator.py - homeassistant/components/rocketchat/notify.py - homeassistant/components/romy/__init__.py - homeassistant/components/romy/binary_sensor.py - homeassistant/components/romy/coordinator.py - homeassistant/components/romy/entity.py - homeassistant/components/romy/sensor.py - homeassistant/components/romy/vacuum.py - homeassistant/components/roomba/__init__.py - homeassistant/components/roomba/binary_sensor.py - homeassistant/components/roomba/braava.py - homeassistant/components/roomba/irobot_base.py - homeassistant/components/roomba/roomba.py - homeassistant/components/roomba/sensor.py - homeassistant/components/roomba/vacuum.py - homeassistant/components/roon/__init__.py - homeassistant/components/roon/event.py - homeassistant/components/roon/media_browser.py - homeassistant/components/roon/media_player.py - homeassistant/components/roon/server.py - homeassistant/components/route53/* - homeassistant/components/rpi_camera/* - homeassistant/components/rtorrent/sensor.py - homeassistant/components/russound_rio/media_player.py - homeassistant/components/russound_rnet/media_player.py - homeassistant/components/ruuvi_gateway/__init__.py - homeassistant/components/ruuvi_gateway/bluetooth.py - homeassistant/components/ruuvi_gateway/coordinator.py - homeassistant/components/rympro/__init__.py - homeassistant/components/rympro/coordinator.py - homeassistant/components/rympro/sensor.py - homeassistant/components/sabnzbd/__init__.py - homeassistant/components/sabnzbd/coordinator.py - homeassistant/components/sabnzbd/sensor.py - homeassistant/components/saj/sensor.py - homeassistant/components/satel_integra/* - homeassistant/components/schluter/* - homeassistant/components/screenlogic/binary_sensor.py - homeassistant/components/screenlogic/climate.py - homeassistant/components/screenlogic/coordinator.py - homeassistant/components/screenlogic/entity.py - homeassistant/components/screenlogic/light.py - homeassistant/components/screenlogic/number.py - homeassistant/components/screenlogic/sensor.py - homeassistant/components/screenlogic/switch.py - homeassistant/components/scsgate/* - homeassistant/components/sendgrid/notify.py - homeassistant/components/sense/__init__.py - homeassistant/components/sense/binary_sensor.py - homeassistant/components/sense/sensor.py - homeassistant/components/senz/__init__.py - homeassistant/components/senz/api.py - homeassistant/components/senz/climate.py - homeassistant/components/serial/sensor.py - homeassistant/components/serial_pm/sensor.py - homeassistant/components/sesame/lock.py - homeassistant/components/seven_segments/image_processing.py - homeassistant/components/shodan/sensor.py - homeassistant/components/sia/__init__.py - homeassistant/components/sia/alarm_control_panel.py - homeassistant/components/sia/binary_sensor.py - homeassistant/components/sia/hub.py - homeassistant/components/sia/sia_entity_base.py - homeassistant/components/sia/utils.py - homeassistant/components/simplepush/__init__.py - homeassistant/components/simplepush/notify.py - homeassistant/components/simplisafe/__init__.py - homeassistant/components/simplisafe/alarm_control_panel.py - homeassistant/components/simplisafe/binary_sensor.py - homeassistant/components/simplisafe/button.py - homeassistant/components/simplisafe/lock.py - homeassistant/components/simplisafe/sensor.py - homeassistant/components/sinch/* - homeassistant/components/sisyphus/* - homeassistant/components/sky_hub/* - homeassistant/components/skybeacon/sensor.py - homeassistant/components/skybell/__init__.py - homeassistant/components/skybell/camera.py - homeassistant/components/skybell/light.py - homeassistant/components/skybell/sensor.py - homeassistant/components/skybell/switch.py - homeassistant/components/slack/__init__.py - homeassistant/components/slack/notify.py - homeassistant/components/slack/sensor.py - homeassistant/components/slide/* - homeassistant/components/slimproto/__init__.py - homeassistant/components/slimproto/media_player.py - homeassistant/components/sma/__init__.py - homeassistant/components/sma/sensor.py - homeassistant/components/smappee/__init__.py - homeassistant/components/smappee/api.py - homeassistant/components/smappee/binary_sensor.py - homeassistant/components/smappee/sensor.py - homeassistant/components/smappee/switch.py - homeassistant/components/smarty/* - homeassistant/components/sms/__init__.py - homeassistant/components/sms/coordinator.py - homeassistant/components/sms/gateway.py - homeassistant/components/sms/notify.py - homeassistant/components/sms/sensor.py - homeassistant/components/smtp/notify.py - homeassistant/components/snapcast/__init__.py - homeassistant/components/snapcast/media_player.py - homeassistant/components/snapcast/server.py - homeassistant/components/snmp/device_tracker.py - homeassistant/components/snmp/sensor.py - homeassistant/components/snmp/switch.py - homeassistant/components/snooz/__init__.py - homeassistant/components/solaredge/__init__.py - homeassistant/components/solaredge/coordinator.py - homeassistant/components/solaredge_local/sensor.py - homeassistant/components/solax/__init__.py - homeassistant/components/solax/sensor.py - homeassistant/components/soma/__init__.py - homeassistant/components/soma/cover.py - homeassistant/components/soma/sensor.py - homeassistant/components/soma/utils.py - homeassistant/components/somfy_mylink/__init__.py - homeassistant/components/somfy_mylink/cover.py - homeassistant/components/sonos/__init__.py - homeassistant/components/sonos/alarms.py - homeassistant/components/sonos/entity.py - homeassistant/components/sonos/favorites.py - homeassistant/components/sonos/helpers.py - homeassistant/components/sonos/household_coordinator.py - homeassistant/components/sonos/media.py - homeassistant/components/sonos/media_browser.py - homeassistant/components/sonos/media_player.py - homeassistant/components/sonos/speaker.py - homeassistant/components/sonos/switch.py - homeassistant/components/sony_projector/switch.py - homeassistant/components/spc/__init__.py - homeassistant/components/spc/alarm_control_panel.py - homeassistant/components/spc/binary_sensor.py - homeassistant/components/spider/__init__.py - homeassistant/components/spider/climate.py - homeassistant/components/spider/sensor.py - homeassistant/components/spider/switch.py - homeassistant/components/splunk/* - homeassistant/components/spotify/__init__.py - homeassistant/components/spotify/browse_media.py - homeassistant/components/spotify/media_player.py - homeassistant/components/spotify/system_health.py - homeassistant/components/spotify/util.py - homeassistant/components/squeezebox/__init__.py - homeassistant/components/squeezebox/browse_media.py - homeassistant/components/squeezebox/media_player.py - homeassistant/components/starline/__init__.py - homeassistant/components/starline/account.py - homeassistant/components/starline/binary_sensor.py - homeassistant/components/starline/button.py - homeassistant/components/starline/device_tracker.py - homeassistant/components/starline/entity.py - homeassistant/components/starline/lock.py - homeassistant/components/starline/sensor.py - homeassistant/components/starline/switch.py - homeassistant/components/starlingbank/sensor.py - homeassistant/components/starlink/__init__.py - homeassistant/components/starlink/binary_sensor.py - homeassistant/components/starlink/button.py - homeassistant/components/starlink/coordinator.py - homeassistant/components/starlink/device_tracker.py - homeassistant/components/starlink/sensor.py - homeassistant/components/starlink/switch.py - homeassistant/components/starlink/time.py - homeassistant/components/steam_online/sensor.py - homeassistant/components/stiebel_eltron/* - homeassistant/components/stookalert/__init__.py - homeassistant/components/stookalert/binary_sensor.py - homeassistant/components/stookwijzer/__init__.py - homeassistant/components/stookwijzer/sensor.py - homeassistant/components/stream/__init__.py - homeassistant/components/stream/core.py - homeassistant/components/stream/fmp4utils.py - homeassistant/components/stream/hls.py - homeassistant/components/stream/worker.py - homeassistant/components/streamlabswater/__init__.py - homeassistant/components/streamlabswater/binary_sensor.py - homeassistant/components/streamlabswater/coordinator.py - homeassistant/components/streamlabswater/sensor.py - homeassistant/components/suez_water/__init__.py - homeassistant/components/suez_water/sensor.py - homeassistant/components/supervisord/sensor.py - homeassistant/components/supla/* - homeassistant/components/surepetcare/__init__.py - homeassistant/components/surepetcare/binary_sensor.py - homeassistant/components/surepetcare/coordinator.py - homeassistant/components/surepetcare/entity.py - homeassistant/components/surepetcare/sensor.py - homeassistant/components/swiss_hydrological_data/sensor.py - homeassistant/components/swiss_public_transport/__init__.py - homeassistant/components/swiss_public_transport/coordinator.py - homeassistant/components/swiss_public_transport/sensor.py - homeassistant/components/swisscom/device_tracker.py - homeassistant/components/switchbee/__init__.py - homeassistant/components/switchbee/button.py - homeassistant/components/switchbee/climate.py - homeassistant/components/switchbee/coordinator.py - homeassistant/components/switchbee/cover.py - homeassistant/components/switchbee/entity.py - homeassistant/components/switchbee/light.py - homeassistant/components/switchbee/switch.py - homeassistant/components/switchbot/__init__.py - homeassistant/components/switchbot/binary_sensor.py - homeassistant/components/switchbot/coordinator.py - homeassistant/components/switchbot/cover.py - homeassistant/components/switchbot/entity.py - homeassistant/components/switchbot/humidifier.py - homeassistant/components/switchbot/light.py - homeassistant/components/switchbot/lock.py - homeassistant/components/switchbot/sensor.py - homeassistant/components/switchbot/switch.py - homeassistant/components/switchbot_cloud/climate.py - homeassistant/components/switchbot_cloud/coordinator.py - homeassistant/components/switchbot_cloud/entity.py - homeassistant/components/switchbot_cloud/sensor.py - homeassistant/components/switchbot_cloud/switch.py - homeassistant/components/switchmate/switch.py - homeassistant/components/syncthing/__init__.py - homeassistant/components/syncthing/sensor.py - homeassistant/components/syncthru/__init__.py - homeassistant/components/syncthru/sensor.py - homeassistant/components/synology_chat/notify.py - homeassistant/components/synology_dsm/__init__.py - homeassistant/components/synology_dsm/binary_sensor.py - homeassistant/components/synology_dsm/button.py - homeassistant/components/synology_dsm/camera.py - homeassistant/components/synology_dsm/common.py - homeassistant/components/synology_dsm/coordinator.py - homeassistant/components/synology_dsm/entity.py - homeassistant/components/synology_dsm/sensor.py - homeassistant/components/synology_dsm/service.py - homeassistant/components/synology_dsm/switch.py - homeassistant/components/synology_dsm/update.py - homeassistant/components/synology_srm/device_tracker.py - homeassistant/components/syslog/notify.py - homeassistant/components/system_bridge/__init__.py - homeassistant/components/system_bridge/binary_sensor.py - homeassistant/components/system_bridge/coordinator.py - homeassistant/components/system_bridge/entity.py - homeassistant/components/system_bridge/media_player.py - homeassistant/components/system_bridge/notify.py - homeassistant/components/system_bridge/sensor.py - homeassistant/components/system_bridge/update.py - homeassistant/components/tado/__init__.py - homeassistant/components/tado/binary_sensor.py - homeassistant/components/tado/climate.py - homeassistant/components/tado/device_tracker.py - homeassistant/components/tado/sensor.py - homeassistant/components/tado/water_heater.py - homeassistant/components/tami4/button.py - homeassistant/components/tank_utility/sensor.py - homeassistant/components/tapsaff/binary_sensor.py - homeassistant/components/tautulli/__init__.py - homeassistant/components/tautulli/coordinator.py - homeassistant/components/tautulli/sensor.py - homeassistant/components/ted5000/sensor.py - homeassistant/components/telegram/notify.py - homeassistant/components/telegram_bot/__init__.py - homeassistant/components/telegram_bot/polling.py - homeassistant/components/telegram_bot/webhooks.py - homeassistant/components/tellduslive/__init__.py - homeassistant/components/tellduslive/binary_sensor.py - homeassistant/components/tellduslive/cover.py - homeassistant/components/tellduslive/entry.py - homeassistant/components/tellduslive/light.py - homeassistant/components/tellduslive/sensor.py - homeassistant/components/tellduslive/switch.py - homeassistant/components/tellstick/* - homeassistant/components/telnet/switch.py - homeassistant/components/temper/sensor.py - homeassistant/components/tensorflow/image_processing.py - homeassistant/components/tfiac/climate.py - homeassistant/components/thermoworks_smoke/sensor.py - homeassistant/components/thingspeak/* - homeassistant/components/thinkingcleaner/* - homeassistant/components/thomson/device_tracker.py - homeassistant/components/tibber/__init__.py - homeassistant/components/tibber/coordinator.py - homeassistant/components/tibber/sensor.py - homeassistant/components/tikteck/light.py - homeassistant/components/tile/__init__.py - homeassistant/components/tile/device_tracker.py - homeassistant/components/time_date/sensor.py - homeassistant/components/tmb/sensor.py - homeassistant/components/todoist/calendar.py - homeassistant/components/tolo/__init__.py - homeassistant/components/tolo/binary_sensor.py - homeassistant/components/tolo/button.py - homeassistant/components/tolo/climate.py - homeassistant/components/tolo/fan.py - homeassistant/components/tolo/light.py - homeassistant/components/tolo/number.py - homeassistant/components/tolo/select.py - homeassistant/components/tolo/sensor.py - homeassistant/components/tolo/switch.py - homeassistant/components/toon/__init__.py - homeassistant/components/toon/binary_sensor.py - homeassistant/components/toon/climate.py - homeassistant/components/toon/coordinator.py - homeassistant/components/toon/helpers.py - homeassistant/components/toon/models.py - homeassistant/components/toon/oauth2.py - homeassistant/components/toon/sensor.py - homeassistant/components/toon/switch.py - homeassistant/components/torque/sensor.py - homeassistant/components/totalconnect/__init__.py - homeassistant/components/touchline/climate.py - homeassistant/components/tplink_lte/* - homeassistant/components/tplink_omada/__init__.py - homeassistant/components/tplink_omada/binary_sensor.py - homeassistant/components/tplink_omada/controller.py - homeassistant/components/tplink_omada/update.py - homeassistant/components/traccar/device_tracker.py - homeassistant/components/traccar_server/__init__.py - homeassistant/components/traccar_server/coordinator.py - homeassistant/components/traccar_server/device_tracker.py - homeassistant/components/traccar_server/entity.py - homeassistant/components/traccar_server/helpers.py - homeassistant/components/traccar_server/sensor.py - homeassistant/components/tradfri/__init__.py - homeassistant/components/tradfri/base_class.py - homeassistant/components/tradfri/coordinator.py - homeassistant/components/tradfri/cover.py - homeassistant/components/tradfri/fan.py - homeassistant/components/tradfri/light.py - homeassistant/components/tradfri/sensor.py - homeassistant/components/tradfri/switch.py - homeassistant/components/trafikverket_weatherstation/__init__.py - homeassistant/components/trafikverket_weatherstation/coordinator.py - homeassistant/components/trafikverket_weatherstation/sensor.py - homeassistant/components/transmission/__init__.py - homeassistant/components/transmission/coordinator.py - homeassistant/components/transmission/sensor.py - homeassistant/components/transmission/switch.py - homeassistant/components/travisci/sensor.py - homeassistant/components/tuya/__init__.py - homeassistant/components/tuya/alarm_control_panel.py - homeassistant/components/tuya/base.py - homeassistant/components/tuya/binary_sensor.py - homeassistant/components/tuya/button.py - homeassistant/components/tuya/camera.py - homeassistant/components/tuya/climate.py - homeassistant/components/tuya/cover.py - homeassistant/components/tuya/fan.py - homeassistant/components/tuya/humidifier.py - homeassistant/components/tuya/light.py - homeassistant/components/tuya/number.py - homeassistant/components/tuya/select.py - homeassistant/components/tuya/sensor.py - homeassistant/components/tuya/siren.py - homeassistant/components/tuya/switch.py - homeassistant/components/tuya/util.py - homeassistant/components/tuya/vacuum.py - homeassistant/components/twilio_call/notify.py - homeassistant/components/twilio_sms/notify.py - homeassistant/components/twitter/notify.py - homeassistant/components/ubus/device_tracker.py - homeassistant/components/ue_smart_radio/media_player.py - homeassistant/components/ukraine_alarm/__init__.py - homeassistant/components/ukraine_alarm/binary_sensor.py - homeassistant/components/unifi_direct/__init__.py - homeassistant/components/unifi_direct/device_tracker.py - homeassistant/components/unifiled/* - homeassistant/components/upb/__init__.py - homeassistant/components/upb/light.py - homeassistant/components/upc_connect/* - homeassistant/components/upcloud/__init__.py - homeassistant/components/upcloud/binary_sensor.py - homeassistant/components/upcloud/switch.py - homeassistant/components/upnp/__init__.py - homeassistant/components/upnp/device.py - homeassistant/components/upnp/sensor.py - homeassistant/components/v2c/__init__.py - homeassistant/components/v2c/binary_sensor.py - homeassistant/components/v2c/coordinator.py - homeassistant/components/v2c/entity.py - homeassistant/components/v2c/number.py - homeassistant/components/v2c/switch.py - homeassistant/components/vallox/__init__.py - homeassistant/components/vallox/coordinator.py - homeassistant/components/vasttrafik/sensor.py - homeassistant/components/velbus/__init__.py - homeassistant/components/velbus/binary_sensor.py - homeassistant/components/velbus/button.py - homeassistant/components/velbus/climate.py - homeassistant/components/velbus/cover.py - homeassistant/components/velbus/entity.py - homeassistant/components/velbus/light.py - homeassistant/components/velbus/select.py - homeassistant/components/velbus/sensor.py - homeassistant/components/velbus/switch.py - homeassistant/components/velux/__init__.py - homeassistant/components/velux/cover.py - homeassistant/components/velux/light.py - homeassistant/components/venstar/climate.py - homeassistant/components/venstar/coordinator.py - homeassistant/components/venstar/sensor.py - homeassistant/components/verisure/__init__.py - homeassistant/components/verisure/alarm_control_panel.py - homeassistant/components/verisure/binary_sensor.py - homeassistant/components/verisure/camera.py - homeassistant/components/verisure/coordinator.py - homeassistant/components/verisure/lock.py - homeassistant/components/verisure/sensor.py - homeassistant/components/verisure/switch.py - homeassistant/components/versasense/* - homeassistant/components/vesync/__init__.py - homeassistant/components/vesync/fan.py - homeassistant/components/vesync/light.py - homeassistant/components/vesync/sensor.py - homeassistant/components/vesync/switch.py - homeassistant/components/viaggiatreno/sensor.py - homeassistant/components/vicare/__init__.py - homeassistant/components/vicare/button.py - homeassistant/components/vicare/climate.py - homeassistant/components/vicare/entity.py - homeassistant/components/vicare/number.py - homeassistant/components/vicare/sensor.py - homeassistant/components/vicare/types.py - homeassistant/components/vicare/utils.py - homeassistant/components/vicare/water_heater.py - homeassistant/components/vilfo/__init__.py - homeassistant/components/vilfo/sensor.py - homeassistant/components/vivotek/camera.py - homeassistant/components/vlc/media_player.py - homeassistant/components/vlc_telnet/__init__.py - homeassistant/components/vlc_telnet/media_player.py - homeassistant/components/vodafone_station/__init__.py - homeassistant/components/vodafone_station/button.py - homeassistant/components/vodafone_station/coordinator.py - homeassistant/components/vodafone_station/device_tracker.py - homeassistant/components/vodafone_station/sensor.py - homeassistant/components/volkszaehler/sensor.py - homeassistant/components/volumio/__init__.py - homeassistant/components/volumio/browse_media.py - homeassistant/components/volumio/media_player.py - homeassistant/components/volvooncall/__init__.py - homeassistant/components/volvooncall/binary_sensor.py - homeassistant/components/volvooncall/device_tracker.py - homeassistant/components/volvooncall/lock.py - homeassistant/components/volvooncall/sensor.py - homeassistant/components/volvooncall/switch.py - homeassistant/components/vulcan/__init__.py - homeassistant/components/vulcan/calendar.py - homeassistant/components/vulcan/fetch_data.py - homeassistant/components/w800rf32/* - homeassistant/components/waqi/sensor.py - homeassistant/components/waterfurnace/* - homeassistant/components/watson_iot/* - homeassistant/components/watson_tts/tts.py - homeassistant/components/watttime/__init__.py - homeassistant/components/watttime/sensor.py - homeassistant/components/weatherflow/__init__.py - homeassistant/components/weatherflow/sensor.py - homeassistant/components/weatherflow_cloud/__init__.py - homeassistant/components/weatherflow_cloud/coordinator.py - homeassistant/components/weatherflow_cloud/weather.py - homeassistant/components/wiffi/__init__.py - homeassistant/components/wiffi/binary_sensor.py - homeassistant/components/wiffi/sensor.py - homeassistant/components/wiffi/wiffi_strings.py - homeassistant/components/wirelesstag/* - homeassistant/components/wolflink/__init__.py - homeassistant/components/wolflink/sensor.py - homeassistant/components/worldtidesinfo/sensor.py - homeassistant/components/worxlandroid/sensor.py - homeassistant/components/x10/light.py - homeassistant/components/xbox/__init__.py - homeassistant/components/xbox/api.py - homeassistant/components/xbox/base_sensor.py - homeassistant/components/xbox/binary_sensor.py - homeassistant/components/xbox/browse_media.py - homeassistant/components/xbox/coordinator.py - homeassistant/components/xbox/media_player.py - homeassistant/components/xbox/remote.py - homeassistant/components/xbox/sensor.py - homeassistant/components/xeoma/camera.py - homeassistant/components/xiaomi/camera.py - homeassistant/components/xiaomi_aqara/__init__.py - homeassistant/components/xiaomi_aqara/binary_sensor.py - homeassistant/components/xiaomi_aqara/cover.py - homeassistant/components/xiaomi_aqara/light.py - homeassistant/components/xiaomi_aqara/lock.py - homeassistant/components/xiaomi_aqara/sensor.py - homeassistant/components/xiaomi_aqara/switch.py - homeassistant/components/xiaomi_miio/__init__.py - homeassistant/components/xiaomi_miio/air_quality.py - homeassistant/components/xiaomi_miio/alarm_control_panel.py - homeassistant/components/xiaomi_miio/binary_sensor.py - homeassistant/components/xiaomi_miio/button.py - homeassistant/components/xiaomi_miio/device.py - homeassistant/components/xiaomi_miio/device_tracker.py - homeassistant/components/xiaomi_miio/fan.py - homeassistant/components/xiaomi_miio/gateway.py - homeassistant/components/xiaomi_miio/humidifier.py - homeassistant/components/xiaomi_miio/light.py - homeassistant/components/xiaomi_miio/number.py - homeassistant/components/xiaomi_miio/remote.py - homeassistant/components/xiaomi_miio/sensor.py - homeassistant/components/xiaomi_miio/switch.py - homeassistant/components/xiaomi_tv/media_player.py - homeassistant/components/xmpp/notify.py - homeassistant/components/xs1/* - homeassistant/components/yale_smart_alarm/__init__.py - homeassistant/components/yale_smart_alarm/alarm_control_panel.py - homeassistant/components/yale_smart_alarm/entity.py - homeassistant/components/yalexs_ble/__init__.py - homeassistant/components/yalexs_ble/binary_sensor.py - homeassistant/components/yalexs_ble/entity.py - homeassistant/components/yalexs_ble/lock.py - homeassistant/components/yalexs_ble/sensor.py - homeassistant/components/yalexs_ble/util.py - homeassistant/components/yamaha_musiccast/__init__.py - homeassistant/components/yamaha_musiccast/media_player.py - homeassistant/components/yamaha_musiccast/number.py - homeassistant/components/yamaha_musiccast/select.py - homeassistant/components/yamaha_musiccast/switch.py - homeassistant/components/yandex_transport/sensor.py - homeassistant/components/yardian/__init__.py - homeassistant/components/yardian/coordinator.py - homeassistant/components/yardian/switch.py - homeassistant/components/yeelightsunflower/light.py - homeassistant/components/yi/camera.py - homeassistant/components/yolink/__init__.py - homeassistant/components/yolink/api.py - homeassistant/components/yolink/binary_sensor.py - homeassistant/components/yolink/climate.py - homeassistant/components/yolink/coordinator.py - homeassistant/components/yolink/cover.py - homeassistant/components/yolink/entity.py - homeassistant/components/yolink/light.py - homeassistant/components/yolink/lock.py - homeassistant/components/yolink/number.py - homeassistant/components/yolink/sensor.py - homeassistant/components/yolink/services.py - homeassistant/components/yolink/siren.py - homeassistant/components/yolink/switch.py - homeassistant/components/yolink/valve.py - homeassistant/components/youless/__init__.py - homeassistant/components/youless/sensor.py - homeassistant/components/zabbix/* - homeassistant/components/zamg/coordinator.py - homeassistant/components/zengge/light.py - homeassistant/components/zeroconf/models.py - homeassistant/components/zeroconf/usage.py - homeassistant/components/zestimate/sensor.py - homeassistant/components/zha/core/cluster_handlers/* - homeassistant/components/zha/core/device.py - homeassistant/components/zha/core/gateway.py - homeassistant/components/zha/core/helpers.py - homeassistant/components/zha/light.py - homeassistant/components/zha/websocket_api.py - homeassistant/components/zhong_hong/climate.py - homeassistant/components/ziggo_mediabox_xl/media_player.py - homeassistant/components/zoneminder/* - homeassistant/components/zwave_me/__init__.py - homeassistant/components/zwave_me/binary_sensor.py - homeassistant/components/zwave_me/button.py - homeassistant/components/zwave_me/climate.py - homeassistant/components/zwave_me/cover.py - homeassistant/components/zwave_me/fan.py - homeassistant/components/zwave_me/helpers.py - homeassistant/components/zwave_me/light.py - homeassistant/components/zwave_me/lock.py - homeassistant/components/zwave_me/number.py - homeassistant/components/zwave_me/sensor.py - homeassistant/components/zwave_me/siren.py - homeassistant/components/zwave_me/switch.py - - -[report] -# Regexes for lines to exclude from consideration -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - - # Don't complain about missing debug-only code: - def __repr__ - - # Don't complain if tests don't hit defensive assertion code: - raise AssertionError - raise NotImplementedError - - # TYPE_CHECKING and @overload blocks are never executed during pytest run - if TYPE_CHECKING: - @overload diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index d69b1ac0c7d..23365feffb7 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -74,7 +74,6 @@ If the code communicates with devices, web services, or third-party tools: - [ ] New or updated dependencies have been added to `requirements_all.txt`. Updated by running `python3 -m script.gen_requirements_all`. - [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description. -- [ ] Untested files have been added to `.coveragerc`. ssdp_confirm(None) --> ssdp_confirm({}) --> create_entry() # 2: user(None): scan --> user({...}) --> create_entry() + @staticmethod + @callback + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + """Get the options flow for this handler.""" + return UpnpOptionsFlowHandler(config_entry) + @property def _discoveries(self) -> dict[str, SsdpServiceInfo]: """Get current discoveries.""" @@ -249,9 +264,14 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): CONFIG_ENTRY_HOST: discovery.ssdp_headers["_host"], CONFIG_ENTRY_LOCATION: get_preferred_location(discovery.ssdp_all_locations), } + options = { + CONFIG_ENTRY_FORCE_POLL: False, + } await self.async_set_unique_id(user_input["unique_id"], raise_on_progress=False) - return self.async_create_entry(title=user_input["title"], data=data) + return self.async_create_entry( + title=user_input["title"], data=data, options=options + ) async def _async_create_entry_from_discovery( self, @@ -273,4 +293,30 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): CONFIG_ENTRY_MAC_ADDRESS: mac_address, CONFIG_ENTRY_HOST: discovery.ssdp_headers["_host"], } - return self.async_create_entry(title=title, data=data) + options = { + CONFIG_ENTRY_FORCE_POLL: False, + } + return self.async_create_entry(title=title, data=data, options=options) + + +class UpnpOptionsFlowHandler(OptionsFlowWithConfigEntry): + """Handle an options flow.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle options flow.""" + if user_input is not None: + return self.async_create_entry(title="", data=user_input) + + data_schema = vol.Schema( + { + vol.Optional( + CONFIG_ENTRY_FORCE_POLL, + default=self.options.get( + CONFIG_ENTRY_FORCE_POLL, DEFAULT_CONFIG_ENTRY_FORCE_POLL + ), + ): bool, + } + ) + return self.async_show_form(step_id="init", data_schema=data_schema) diff --git a/homeassistant/components/upnp/const.py b/homeassistant/components/upnp/const.py index e7b44329546..d85675d8a4d 100644 --- a/homeassistant/components/upnp/const.py +++ b/homeassistant/components/upnp/const.py @@ -21,8 +21,10 @@ TIMESTAMP = "timestamp" DATA_PACKETS = "packets" DATA_RATE_PACKETS_PER_SECOND = f"{DATA_PACKETS}/{UnitOfTime.SECONDS}" WAN_STATUS = "wan_status" +PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4 = "port_mapping_number_of_entries" ROUTER_IP = "ip" ROUTER_UPTIME = "uptime" +CONFIG_ENTRY_FORCE_POLL = "force_poll" CONFIG_ENTRY_ST = "st" CONFIG_ENTRY_UDN = "udn" CONFIG_ENTRY_ORIGINAL_UDN = "original_udn" @@ -32,5 +34,6 @@ CONFIG_ENTRY_HOST = "host" IDENTIFIER_HOST = "upnp_host" IDENTIFIER_SERIAL_NUMBER = "upnp_serial_number" DEFAULT_SCAN_INTERVAL = timedelta(seconds=30).total_seconds() +DEFAULT_CONFIG_ENTRY_FORCE_POLL = False ST_IGD_V1 = "urn:schemas-upnp-org:device:InternetGatewayDevice:1" ST_IGD_V2 = "urn:schemas-upnp-org:device:InternetGatewayDevice:2" diff --git a/homeassistant/components/upnp/coordinator.py b/homeassistant/components/upnp/coordinator.py index 72e14ecc4ff..37ff700bfe2 100644 --- a/homeassistant/components/upnp/coordinator.py +++ b/homeassistant/components/upnp/coordinator.py @@ -1,5 +1,7 @@ """UPnP/IGD coordinator.""" +from collections import defaultdict +from collections.abc import Callable from datetime import datetime, timedelta from async_upnp_client.exceptions import UpnpCommunicationError @@ -27,6 +29,7 @@ class UpnpDataUpdateCoordinator( """Initialize.""" self.device = device self.device_entry = device_entry + self._features_by_entity_id: defaultdict[str, set[str]] = defaultdict(set) super().__init__( hass, @@ -35,12 +38,34 @@ class UpnpDataUpdateCoordinator( update_interval=update_interval, ) + def register_entity(self, key: str, entity_id: str) -> Callable[[], None]: + """Register an entity.""" + self._features_by_entity_id[key].add(entity_id) + + def unregister_entity() -> None: + """Unregister entity.""" + self._features_by_entity_id[key].remove(entity_id) + + if not self._features_by_entity_id[key]: + del self._features_by_entity_id[key] + + return unregister_entity + + @property + def _entity_description_keys(self) -> list[str] | None: + """Return a list of entity description keys for which data is required.""" + if not self._features_by_entity_id: + # Must be the first update, no entities attached/enabled yet. + return None + + return list(self._features_by_entity_id) + async def _async_update_data( self, ) -> dict[str, str | datetime | int | float | None]: """Update data.""" try: - return await self.device.async_get_data() + return await self.device.async_get_data(self._entity_description_keys) except UpnpCommunicationError as exception: LOGGER.debug( "Caught exception when updating device: %s, exception: %s", diff --git a/homeassistant/components/upnp/device.py b/homeassistant/components/upnp/device.py index 0b9eecb1b15..923d4828879 100644 --- a/homeassistant/components/upnp/device.py +++ b/homeassistant/components/upnp/device.py @@ -8,9 +8,12 @@ from ipaddress import ip_address from typing import Any from urllib.parse import urlparse -from async_upnp_client.aiohttp import AiohttpSessionRequester +from async_upnp_client.aiohttp import AiohttpNotifyServer, AiohttpSessionRequester from async_upnp_client.client_factory import UpnpFactory -from async_upnp_client.profiles.igd import IgdDevice +from async_upnp_client.const import AddressTupleVXType +from async_upnp_client.exceptions import UpnpConnectionError +from async_upnp_client.profiles.igd import IgdDevice, IgdStateItem +from async_upnp_client.utils import async_get_local_ip from getmac import get_mac_address from homeassistant.core import HomeAssistant @@ -27,12 +30,28 @@ from .const import ( PACKETS_PER_SEC_SENT, PACKETS_RECEIVED, PACKETS_SENT, + PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4, ROUTER_IP, ROUTER_UPTIME, TIMESTAMP, WAN_STATUS, ) +TYPE_STATE_ITEM_MAPPING = { + BYTES_RECEIVED: IgdStateItem.BYTES_RECEIVED, + BYTES_SENT: IgdStateItem.BYTES_SENT, + KIBIBYTES_PER_SEC_RECEIVED: IgdStateItem.KIBIBYTES_PER_SEC_RECEIVED, + KIBIBYTES_PER_SEC_SENT: IgdStateItem.KIBIBYTES_PER_SEC_SENT, + PACKETS_PER_SEC_RECEIVED: IgdStateItem.PACKETS_PER_SEC_RECEIVED, + PACKETS_PER_SEC_SENT: IgdStateItem.PACKETS_PER_SEC_SENT, + PACKETS_RECEIVED: IgdStateItem.PACKETS_RECEIVED, + PACKETS_SENT: IgdStateItem.PACKETS_SENT, + ROUTER_IP: IgdStateItem.EXTERNAL_IP_ADDRESS, + ROUTER_UPTIME: IgdStateItem.UPTIME, + WAN_STATUS: IgdStateItem.CONNECTION_STATUS, + PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4: IgdStateItem.PORT_MAPPING_NUMBER_OF_ENTRIES, +} + def get_preferred_location(locations: set[str]) -> str: """Get the preferred location (an IPv4 location) from a set of locations.""" @@ -64,26 +83,43 @@ async def async_get_mac_address_from_host(hass: HomeAssistant, host: str) -> str return mac_address -async def async_create_device(hass: HomeAssistant, location: str) -> Device: +async def async_create_device( + hass: HomeAssistant, location: str, force_poll: bool +) -> Device: """Create UPnP/IGD device.""" session = async_get_clientsession(hass, verify_ssl=False) requester = AiohttpSessionRequester(session, with_sleep=True, timeout=20) + # Create UPnP device. factory = UpnpFactory(requester, non_strict=True) upnp_device = await factory.async_create_device(location) + # Create notify server. + _, local_ip = await async_get_local_ip(location) + source: AddressTupleVXType = (local_ip, 0) + notify_server = AiohttpNotifyServer( + requester=requester, + source=source, + ) + await notify_server.async_start_server() + _LOGGER.debug("Started event handler at %s", notify_server.callback_url) + # Create profile wrapper. - igd_device = IgdDevice(upnp_device, None) - return Device(hass, igd_device) + igd_device = IgdDevice(upnp_device, notify_server.event_handler) + return Device(hass, igd_device, force_poll) class Device: """Home Assistant representation of a UPnP/IGD device.""" - def __init__(self, hass: HomeAssistant, igd_device: IgdDevice) -> None: + def __init__( + self, hass: HomeAssistant, igd_device: IgdDevice, force_poll: bool + ) -> None: """Initialize UPnP/IGD device.""" self.hass = hass self._igd_device = igd_device + self._force_poll = force_poll + self.coordinator: ( DataUpdateCoordinator[dict[str, str | datetime | int | float | None]] | None ) = None @@ -151,17 +187,55 @@ class Device: """Get string representation.""" return f"IGD Device: {self.name}/{self.udn}::{self.device_type}" - async def async_get_data(self) -> dict[str, str | datetime | int | float | None]: + @property + def force_poll(self) -> bool: + """Get force_poll.""" + return self._force_poll + + async def async_set_force_poll(self, force_poll: bool) -> None: + """Set force_poll, and (un)subscribe if needed.""" + self._force_poll = force_poll + + if self._force_poll: + # No need for subscriptions, as eventing will never be used. + await self.async_unsubscribe_services() + elif not self._force_poll and not self._igd_device.is_subscribed: + await self.async_subscribe_services() + + async def async_subscribe_services(self) -> None: + """Subscribe to services.""" + try: + await self._igd_device.async_subscribe_services(auto_resubscribe=True) + except UpnpConnectionError as ex: + _LOGGER.debug( + "Error subscribing to services, falling back to forced polling: %s", ex + ) + await self.async_set_force_poll(True) + + async def async_unsubscribe_services(self) -> None: + """Unsubscribe from services.""" + await self._igd_device.async_unsubscribe_services() + + async def async_get_data( + self, entity_description_keys: list[str] | None + ) -> dict[str, str | datetime | int | float | None]: """Get all data from device.""" - _LOGGER.debug("Getting data for device: %s", self) - igd_state = await self._igd_device.async_get_traffic_and_status_data() - status_info = igd_state.status_info - if status_info is not None and not isinstance(status_info, BaseException): - wan_status = status_info.connection_status - router_uptime = status_info.uptime + if not entity_description_keys: + igd_state_items = None else: - wan_status = None - router_uptime = None + igd_state_items = { + TYPE_STATE_ITEM_MAPPING[key] for key in entity_description_keys + } + + _LOGGER.debug( + "Getting data for device: %s, state_items: %s, force_poll: %s", + self, + igd_state_items, + self._force_poll, + ) + igd_state = await self._igd_device.async_get_traffic_and_status_data( + igd_state_items, force_poll=self._force_poll + ) def get_value(value: Any) -> Any: if value is None or isinstance(value, BaseException): @@ -175,11 +249,14 @@ class Device: BYTES_SENT: get_value(igd_state.bytes_sent), PACKETS_RECEIVED: get_value(igd_state.packets_received), PACKETS_SENT: get_value(igd_state.packets_sent), - WAN_STATUS: wan_status, - ROUTER_UPTIME: router_uptime, + WAN_STATUS: get_value(igd_state.connection_status), + ROUTER_UPTIME: get_value(igd_state.uptime), ROUTER_IP: get_value(igd_state.external_ip_address), KIBIBYTES_PER_SEC_RECEIVED: igd_state.kibibytes_per_sec_received, KIBIBYTES_PER_SEC_SENT: igd_state.kibibytes_per_sec_sent, PACKETS_PER_SEC_RECEIVED: igd_state.packets_per_sec_received, PACKETS_PER_SEC_SENT: igd_state.packets_per_sec_sent, + PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4: get_value( + igd_state.port_mapping_number_of_entries + ), } diff --git a/homeassistant/components/upnp/icons.json b/homeassistant/components/upnp/icons.json index 1d4ebaf183d..b6451f0fca8 100644 --- a/homeassistant/components/upnp/icons.json +++ b/homeassistant/components/upnp/icons.json @@ -33,6 +33,9 @@ }, "packet_upload_speed": { "default": "mdi:server-network" + }, + "port_mapping_number_of_entries_ipv4": { + "default": "mdi:server-network" } } } diff --git a/homeassistant/components/upnp/manifest.json b/homeassistant/components/upnp/manifest.json index 7d353a475c7..30054af0512 100644 --- a/homeassistant/components/upnp/manifest.json +++ b/homeassistant/components/upnp/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.38.3", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.40.0", "getmac==0.9.4"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" diff --git a/homeassistant/components/upnp/sensor.py b/homeassistant/components/upnp/sensor.py index df7128830b3..d6da50c877d 100644 --- a/homeassistant/components/upnp/sensor.py +++ b/homeassistant/components/upnp/sensor.py @@ -33,6 +33,7 @@ from .const import ( PACKETS_PER_SEC_SENT, PACKETS_RECEIVED, PACKETS_SENT, + PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4, ROUTER_IP, ROUTER_UPTIME, WAN_STATUS, @@ -99,6 +100,12 @@ SENSOR_DESCRIPTIONS: tuple[UpnpSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), + UpnpSensorEntityDescription( + key=PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4, + translation_key="port_mapping_number_of_entries_ipv4", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), UpnpSensorEntityDescription( key=BYTES_RECEIVED, translation_key="download_speed", @@ -159,8 +166,8 @@ async def async_setup_entry( if coordinator.data.get(entity_description.key) is not None ] - LOGGER.debug("Adding sensor entities: %s", entities) async_add_entities(entities) + LOGGER.debug("Added sensor entities: %s", entities) class UpnpSensor(UpnpEntity, SensorEntity): @@ -174,3 +181,13 @@ class UpnpSensor(UpnpEntity, SensorEntity): if (key := self.entity_description.value_key) is None: return None return self.coordinator.data[key] + + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + await super().async_added_to_hass() + + # Register self at coordinator. + key = self.entity_description.key + entity_id = self.entity_id + unregister = self.coordinator.register_entity(key, entity_id) + self.async_on_remove(unregister) diff --git a/homeassistant/components/upnp/strings.json b/homeassistant/components/upnp/strings.json index 7ce1798c351..bb414fa95f8 100644 --- a/homeassistant/components/upnp/strings.json +++ b/homeassistant/components/upnp/strings.json @@ -21,7 +21,8 @@ "step": { "init": { "data": { - "scan_interval": "Update interval (seconds, minimal 30)" + "scan_interval": "Update interval (seconds, minimal 30)", + "force_poll": "Force polling of all data" } } } @@ -65,6 +66,9 @@ }, "wan_status": { "name": "WAN status" + }, + "port_mapping_number_of_entries_ipv4": { + "name": "Number of port mapping entries (IPv4)" } } } diff --git a/homeassistant/components/usgs_earthquakes_feed/geo_location.py b/homeassistant/components/usgs_earthquakes_feed/geo_location.py index c8ee88a84ed..aa9817eab7d 100644 --- a/homeassistant/components/usgs_earthquakes_feed/geo_location.py +++ b/homeassistant/components/usgs_earthquakes_feed/geo_location.py @@ -13,7 +13,10 @@ from aio_geojson_usgs_earthquakes.feed_entry import ( ) import voluptuous as vol -from homeassistant.components.geo_location import PLATFORM_SCHEMA, GeolocationEvent +from homeassistant.components.geo_location import ( + PLATFORM_SCHEMA as GEO_LOCATION_PLATFORM_SCHEMA, + GeolocationEvent, +) from homeassistant.const import ( ATTR_TIME, CONF_LATITUDE, @@ -81,7 +84,7 @@ VALID_FEED_TYPES = [ "past_month_all_earthquakes", ] -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = GEO_LOCATION_PLATFORM_SCHEMA.extend( { vol.Required(CONF_FEED_TYPE): vol.In(VALID_FEED_TYPES), vol.Optional(CONF_LATITUDE): cv.latitude, @@ -273,17 +276,17 @@ class UsgsEarthquakesEvent(GeolocationEvent): @property def extra_state_attributes(self) -> dict[str, Any]: """Return the device state attributes.""" - attributes = {} - for key, value in ( - (ATTR_EXTERNAL_ID, self._external_id), - (ATTR_PLACE, self._place), - (ATTR_MAGNITUDE, self._magnitude), - (ATTR_TIME, self._time), - (ATTR_UPDATED, self._updated), - (ATTR_STATUS, self._status), - (ATTR_TYPE, self._type), - (ATTR_ALERT, self._alert), - ): - if value or isinstance(value, bool): - attributes[key] = value - return attributes + return { + key: value + for key, value in ( + (ATTR_EXTERNAL_ID, self._external_id), + (ATTR_PLACE, self._place), + (ATTR_MAGNITUDE, self._magnitude), + (ATTR_TIME, self._time), + (ATTR_UPDATED, self._updated), + (ATTR_STATUS, self._status), + (ATTR_TYPE, self._type), + (ATTR_ALERT, self._alert), + ) + if value or isinstance(value, bool) + } diff --git a/homeassistant/components/uvc/camera.py b/homeassistant/components/uvc/camera.py index 3162fc67566..cd9594c7d31 100644 --- a/homeassistant/components/uvc/camera.py +++ b/homeassistant/components/uvc/camera.py @@ -10,7 +10,11 @@ import requests from uvcclient import camera as uvc_camera, nvr import voluptuous as vol -from homeassistant.components.camera import PLATFORM_SCHEMA, Camera, CameraEntityFeature +from homeassistant.components.camera import ( + PLATFORM_SCHEMA as CAMERA_PLATFORM_SCHEMA, + Camera, + CameraEntityFeature, +) from homeassistant.const import CONF_PASSWORD, CONF_PORT, CONF_SSL from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady @@ -28,7 +32,7 @@ DEFAULT_PASSWORD = "ubnt" DEFAULT_PORT = 7080 DEFAULT_SSL = False -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = CAMERA_PLATFORM_SCHEMA.extend( { vol.Required(CONF_NVR): cv.string, vol.Required(CONF_KEY): cv.string, diff --git a/homeassistant/components/uvc/manifest.json b/homeassistant/components/uvc/manifest.json index 57e798c3fa6..c72b865b5ef 100644 --- a/homeassistant/components/uvc/manifest.json +++ b/homeassistant/components/uvc/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/uvc", "iot_class": "local_polling", "loggers": ["uvcclient"], - "requirements": ["uvcclient==0.11.0"] + "requirements": ["uvcclient==0.12.1"] } diff --git a/homeassistant/components/v2c/icons.json b/homeassistant/components/v2c/icons.json index 1b76b669956..6b0a41bf752 100644 --- a/homeassistant/components/v2c/icons.json +++ b/homeassistant/components/v2c/icons.json @@ -21,6 +21,15 @@ }, "battery_power": { "default": "mdi:home-battery" + }, + "ssid": { + "default": "mdi:wifi" + }, + "ip_address": { + "default": "mdi:ip" + }, + "signal_status": { + "default": "mdi:signal" } }, "switch": { diff --git a/homeassistant/components/v2c/manifest.json b/homeassistant/components/v2c/manifest.json index ffe4b52ee6e..3a6eab0f335 100644 --- a/homeassistant/components/v2c/manifest.json +++ b/homeassistant/components/v2c/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/v2c", "iot_class": "local_polling", - "requirements": ["pytrydan==0.7.0"] + "requirements": ["pytrydan==0.8.0"] } diff --git a/homeassistant/components/v2c/number.py b/homeassistant/components/v2c/number.py index 2ff70226132..1540b098cf1 100644 --- a/homeassistant/components/v2c/number.py +++ b/homeassistant/components/v2c/number.py @@ -13,6 +13,7 @@ from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, ) +from homeassistant.const import EntityCategory, UnitOfElectricCurrent from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -37,11 +38,34 @@ TRYDAN_NUMBER_SETTINGS = ( key="intensity", translation_key="intensity", device_class=NumberDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, native_min_value=MIN_INTENSITY, native_max_value=MAX_INTENSITY, value_fn=lambda evse_data: evse_data.intensity, update_fn=lambda evse, value: evse.intensity(value), ), + V2CSettingsNumberEntityDescription( + key="min_intensity", + translation_key="min_intensity", + device_class=NumberDeviceClass.CURRENT, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + native_min_value=MIN_INTENSITY, + native_max_value=MAX_INTENSITY, + value_fn=lambda evse_data: evse_data.min_intensity, + update_fn=lambda evse, value: evse.min_intensity(value), + ), + V2CSettingsNumberEntityDescription( + key="max_intensity", + translation_key="max_intensity", + device_class=NumberDeviceClass.CURRENT, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + native_min_value=MIN_INTENSITY, + native_max_value=MAX_INTENSITY, + value_fn=lambda evse_data: evse_data.max_intensity, + update_fn=lambda evse, value: evse.max_intensity(value), + ), ) diff --git a/homeassistant/components/v2c/sensor.py b/homeassistant/components/v2c/sensor.py index fc0cc0bfaa8..97853740e9d 100644 --- a/homeassistant/components/v2c/sensor.py +++ b/homeassistant/components/v2c/sensor.py @@ -15,7 +15,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfTime +from homeassistant.const import ( + EntityCategory, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfPower, + UnitOfTime, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -45,12 +51,20 @@ TRYDAN_SENSORS = ( V2CSensorEntityDescription( key="charge_power", translation_key="charge_power", - icon="mdi:ev-station", native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.POWER, value_fn=lambda evse_data: evse_data.charge_power, ), + V2CSensorEntityDescription( + key="voltage_installation", + translation_key="voltage_installation", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.VOLTAGE, + value_fn=lambda evse_data: evse_data.voltage_installation, + entity_registry_enabled_default=False, + ), V2CSensorEntityDescription( key="charge_energy", translation_key="charge_energy", @@ -86,6 +100,7 @@ TRYDAN_SENSORS = ( V2CSensorEntityDescription( key="meter_error", translation_key="meter_error", + entity_category=EntityCategory.DIAGNOSTIC, value_fn=lambda evse_data: get_meter_value(evse_data.slave_error), entity_registry_enabled_default=False, device_class=SensorDeviceClass.ENUM, @@ -100,6 +115,28 @@ TRYDAN_SENSORS = ( value_fn=lambda evse_data: evse_data.battery_power, entity_registry_enabled_default=False, ), + V2CSensorEntityDescription( + key="ssid", + translation_key="ssid", + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda evse_data: evse_data.SSID, + entity_registry_enabled_default=False, + ), + V2CSensorEntityDescription( + key="ip_address", + translation_key="ip_address", + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda evse_data: evse_data.IP, + entity_registry_enabled_default=False, + ), + V2CSensorEntityDescription( + key="signal_status", + translation_key="signal_status", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda evse_data: evse_data.signal_status, + entity_registry_enabled_default=False, + ), ) diff --git a/homeassistant/components/v2c/strings.json b/homeassistant/components/v2c/strings.json index 3342652cfb4..d52b8f066f9 100644 --- a/homeassistant/components/v2c/strings.json +++ b/homeassistant/components/v2c/strings.json @@ -33,12 +33,21 @@ "number": { "intensity": { "name": "Intensity" + }, + "max_intensity": { + "name": "Max intensity" + }, + "min_intensity": { + "name": "Min intensity" } }, "sensor": { "charge_power": { "name": "Charge power" }, + "voltage_installation": { + "name": "Installation voltage" + }, "charge_energy": { "name": "Charge energy" }, @@ -93,6 +102,15 @@ "empty_message": "Empty message", "undefined_error": "Undefined error" } + }, + "ssid": { + "name": "SSID" + }, + "ip_address": { + "name": "IP address" + }, + "signal_status": { + "name": "Signal status" } }, "switch": { diff --git a/homeassistant/components/v2c/switch.py b/homeassistant/components/v2c/switch.py index cd89e954275..cca7da70e48 100644 --- a/homeassistant/components/v2c/switch.py +++ b/homeassistant/components/v2c/switch.py @@ -111,12 +111,12 @@ class V2CSwitchEntity(V2CBaseEntity, SwitchEntity): """Return the state of the EVSE switch.""" return self.entity_description.value_fn(self.data) - async def async_turn_on(self): + async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the EVSE switch.""" await self.entity_description.turn_on_fn(self.coordinator.evse) await self.coordinator.async_request_refresh() - async def async_turn_off(self): + async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the EVSE switch.""" await self.entity_description.turn_off_fn(self.coordinator.evse) await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/vacuum/__init__.py b/homeassistant/components/vacuum/__init__.py index f68f9a4f082..867e25d4b2a 100644 --- a/homeassistant/components/vacuum/__init__.py +++ b/homeassistant/components/vacuum/__init__.py @@ -23,11 +23,6 @@ from homeassistant.const import ( # noqa: F401 # STATE_PAUSED/IDLE are API ) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.config_validation import ( # noqa: F401 - PLATFORM_SCHEMA, - PLATFORM_SCHEMA_BASE, - make_entity_service_schema, -) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.icon import icon_for_battery_level @@ -39,6 +34,8 @@ from .const import DOMAIN, STATE_CLEANING, STATE_DOCKED, STATE_ERROR, STATE_RETU _LOGGER = logging.getLogger(__name__) ENTITY_ID_FORMAT = DOMAIN + ".{}" +PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA +PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE SCAN_INTERVAL = timedelta(seconds=20) ATTR_BATTERY_ICON = "battery_icon" @@ -119,37 +116,37 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_START, - {}, + None, "async_start", [VacuumEntityFeature.START], ) component.async_register_entity_service( SERVICE_PAUSE, - {}, + None, "async_pause", [VacuumEntityFeature.PAUSE], ) component.async_register_entity_service( SERVICE_RETURN_TO_BASE, - {}, + None, "async_return_to_base", [VacuumEntityFeature.RETURN_HOME], ) component.async_register_entity_service( SERVICE_CLEAN_SPOT, - {}, + None, "async_clean_spot", [VacuumEntityFeature.CLEAN_SPOT], ) component.async_register_entity_service( SERVICE_LOCATE, - {}, + None, "async_locate", [VacuumEntityFeature.LOCATE], ) component.async_register_entity_service( SERVICE_STOP, - {}, + None, "async_stop", [VacuumEntityFeature.STOP], ) diff --git a/homeassistant/components/vallox/fan.py b/homeassistant/components/vallox/fan.py index a5bdf0983ae..4fe2cfd45d4 100644 --- a/homeassistant/components/vallox/fan.py +++ b/homeassistant/components/vallox/fan.py @@ -77,7 +77,13 @@ class ValloxFanEntity(ValloxEntity, FanEntity): """Representation of the fan.""" _attr_name = None - _attr_supported_features = FanEntityFeature.PRESET_MODE | FanEntityFeature.SET_SPEED + _attr_supported_features = ( + FanEntityFeature.PRESET_MODE + | FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/vallox/manifest.json b/homeassistant/components/vallox/manifest.json index 9a57358cd14..bbc806d8f38 100644 --- a/homeassistant/components/vallox/manifest.json +++ b/homeassistant/components/vallox/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/vallox", "iot_class": "local_polling", "loggers": ["vallox_websocket_api"], - "requirements": ["vallox-websocket-api==5.1.1"] + "requirements": ["vallox-websocket-api==5.3.0"] } diff --git a/homeassistant/components/vallox/sensor.py b/homeassistant/components/vallox/sensor.py index 281bc002f68..0bb509a9c5a 100644 --- a/homeassistant/components/vallox/sensor.py +++ b/homeassistant/components/vallox/sensor.py @@ -18,6 +18,7 @@ from homeassistant.const import ( REVOLUTIONS_PER_MINUTE, EntityCategory, UnitOfTemperature, + UnitOfTime, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -127,6 +128,18 @@ class ValloxCellStateSensor(ValloxSensorEntity): return VALLOX_CELL_STATE_TO_STR.get(super_native_value) +class ValloxProfileDurationSensor(ValloxSensorEntity): + """Child class for profile duration reporting.""" + + @property + def native_value(self) -> StateType: + """Return the value reported by the sensor.""" + + return self.coordinator.data.get_remaining_profile_duration( + self.coordinator.data.profile + ) + + @dataclass(frozen=True) class ValloxSensorEntityDescription(SensorEntityDescription): """Describes Vallox sensor entity.""" @@ -253,6 +266,14 @@ SENSOR_ENTITIES: tuple[ValloxSensorEntityDescription, ...] = ( native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, entity_registry_enabled_default=False, ), + ValloxSensorEntityDescription( + key="profile_duration", + translation_key="profile_duration", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTime.MINUTES, + entity_type=ValloxProfileDurationSensor, + ), ) diff --git a/homeassistant/components/vallox/strings.json b/homeassistant/components/vallox/strings.json index 072b59b78e0..4df57b81bb5 100644 --- a/homeassistant/components/vallox/strings.json +++ b/homeassistant/components/vallox/strings.json @@ -87,6 +87,9 @@ }, "efficiency": { "name": "Efficiency" + }, + "profile_duration": { + "name": "Profile duration" } }, "switch": { diff --git a/homeassistant/components/valve/__init__.py b/homeassistant/components/valve/__init__.py index 0363ef55832..04ce12e8a8f 100644 --- a/homeassistant/components/valve/__init__.py +++ b/homeassistant/components/valve/__init__.py @@ -23,10 +23,7 @@ from homeassistant.const import ( STATE_OPENING, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.config_validation import ( # noqa: F401 - PLATFORM_SCHEMA, - PLATFORM_SCHEMA_BASE, -) +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType @@ -34,9 +31,10 @@ from homeassistant.helpers.typing import ConfigType _LOGGER = logging.getLogger(__name__) DOMAIN = "valve" -SCAN_INTERVAL = timedelta(seconds=15) - ENTITY_ID_FORMAT = DOMAIN + ".{}" +PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA +PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE +SCAN_INTERVAL = timedelta(seconds=15) class ValveDeviceClass(StrEnum): @@ -73,11 +71,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_OPEN_VALVE, {}, "async_handle_open_valve", [ValveEntityFeature.OPEN] + SERVICE_OPEN_VALVE, None, "async_handle_open_valve", [ValveEntityFeature.OPEN] ) component.async_register_entity_service( - SERVICE_CLOSE_VALVE, {}, "async_handle_close_valve", [ValveEntityFeature.CLOSE] + SERVICE_CLOSE_VALVE, + None, + "async_handle_close_valve", + [ValveEntityFeature.CLOSE], ) component.async_register_entity_service( @@ -92,12 +93,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) component.async_register_entity_service( - SERVICE_STOP_VALVE, {}, "async_stop_valve", [ValveEntityFeature.STOP] + SERVICE_STOP_VALVE, None, "async_stop_valve", [ValveEntityFeature.STOP] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE], ) @@ -225,7 +226,8 @@ class ValveEntity(Entity): async def async_handle_open_valve(self) -> None: """Open the valve.""" if self.supported_features & ValveEntityFeature.SET_POSITION: - return await self.async_set_valve_position(100) + await self.async_set_valve_position(100) + return await self.async_open_valve() def close_valve(self) -> None: @@ -240,7 +242,8 @@ class ValveEntity(Entity): async def async_handle_close_valve(self) -> None: """Close the valve.""" if self.supported_features & ValveEntityFeature.SET_POSITION: - return await self.async_set_valve_position(0) + await self.async_set_valve_position(0) + return await self.async_close_valve() async def async_toggle(self) -> None: diff --git a/homeassistant/components/valve/icons.json b/homeassistant/components/valve/icons.json index 349196658d4..2c887ebf273 100644 --- a/homeassistant/components/valve/icons.json +++ b/homeassistant/components/valve/icons.json @@ -1,13 +1,19 @@ { "entity_component": { "_": { - "default": "mdi:pipe-valve" + "default": "mdi:valve-open", + "state": { + "closed": "mdi:valve-closed" + } }, "gas": { "default": "mdi:meter-gas" }, "water": { - "default": "mdi:pipe-valve" + "default": "mdi:valve-open", + "state": { + "closed": "mdi:valve-closed" + } } }, "services": { diff --git a/homeassistant/components/vasttrafik/sensor.py b/homeassistant/components/vasttrafik/sensor.py index 611f571336c..48f659103e1 100644 --- a/homeassistant/components/vasttrafik/sensor.py +++ b/homeassistant/components/vasttrafik/sensor.py @@ -8,7 +8,10 @@ import logging import vasttrafik import voluptuous as vol -from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity +from homeassistant.components.sensor import ( + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorEntity, +) from homeassistant.const import CONF_DELAY, CONF_NAME from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv @@ -38,7 +41,7 @@ DEFAULT_DELAY = 0 MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=120) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_KEY): cv.string, vol.Required(CONF_SECRET): cv.string, diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index 479b7f02024..685f8b49500 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -89,9 +89,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True def check_entry_id(interface: str) -> str: - for entry in hass.config_entries.async_entries(DOMAIN): - if "port" in entry.data and entry.data["port"] == interface: - return entry.entry_id + for config_entry in hass.config_entries.async_entries(DOMAIN): + if "port" in config_entry.data and config_entry.data["port"] == interface: + return config_entry.entry_id raise vol.Invalid( "The interface provided is not defined as a port in a Velbus integration" ) @@ -119,7 +119,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def set_memo_text(call: ServiceCall) -> None: """Handle Memo Text service call.""" memo_text = call.data[CONF_MEMO_TEXT] - memo_text.hass = hass await ( hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] .get_module(call.data[CONF_ADDRESS]) diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 823d682d339..8b9d927f3d7 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -66,12 +66,16 @@ class VelbusCover(VelbusEntity, CoverEntity): @property def is_opening(self) -> bool: """Return if the cover is opening.""" - return self._channel.is_opening() + if opening := self._channel.is_opening(): + self._assumed_closed = False + return opening @property def is_closing(self) -> bool: """Return if the cover is closing.""" - return self._channel.is_closing() + if closing := self._channel.is_closing(): + self._assumed_closed = True + return closing @property def current_cover_position(self) -> int | None: @@ -89,13 +93,11 @@ class VelbusCover(VelbusEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" await self._channel.open() - self._assumed_closed = False @api_call async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self._channel.close() - self._assumed_closed = True @api_call async def async_stop_cover(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index f778533cad8..c1cf2951bbd 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.5.1"], + "requirements": ["velbus-aio==2024.7.6"], "usb": [ { "vid": "10CF", diff --git a/homeassistant/components/velux/__init__.py b/homeassistant/components/velux/__init__.py index 4b89fc66a84..614ed810429 100644 --- a/homeassistant/components/velux/__init__.py +++ b/homeassistant/components/velux/__init__.py @@ -1,48 +1,14 @@ """Support for VELUX KLF 200 devices.""" from pyvlx import Node, PyVLX, PyVLXException -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant, ServiceCall, callback -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, LOGGER, PLATFORMS -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - } - ) - }, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the velux component.""" - if DOMAIN not in config: - return True - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config[DOMAIN], - ) - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up the velux component.""" @@ -108,10 +74,14 @@ class VeluxEntity(Entity): _attr_should_poll = False - def __init__(self, node: Node) -> None: + def __init__(self, node: Node, config_entry_id: str) -> None: """Initialize the Velux device.""" self.node = node - self._attr_unique_id = node.serial_number + self._attr_unique_id = ( + node.serial_number + if node.serial_number + else f"{config_entry_id}_{node.node_id}" + ) self._attr_name = node.name if node.name else f"#{node.node_id}" @callback diff --git a/homeassistant/components/velux/config_flow.py b/homeassistant/components/velux/config_flow.py index c0d4ec8035b..f4bfa13b4d5 100644 --- a/homeassistant/components/velux/config_flow.py +++ b/homeassistant/components/velux/config_flow.py @@ -1,15 +1,11 @@ """Config flow for Velux integration.""" -from typing import Any - from pyvlx import PyVLX, PyVLXException import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import DOMAIN, LOGGER @@ -24,59 +20,6 @@ DATA_SCHEMA = vol.Schema( class VeluxConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for velux.""" - async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry.""" - - def create_repair(error: str | None = None) -> None: - if error: - async_create_issue( - self.hass, - DOMAIN, - f"deprecated_yaml_import_issue_{error}", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{error}", - ) - else: - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Velux", - }, - ) - - for entry in self._async_current_entries(): - if entry.data[CONF_HOST] == config[CONF_HOST]: - create_repair() - return self.async_abort(reason="already_configured") - - pyvlx = PyVLX(host=config[CONF_HOST], password=config[CONF_PASSWORD]) - try: - await pyvlx.connect() - await pyvlx.disconnect() - except (PyVLXException, ConnectionError): - create_repair("cannot_connect") - return self.async_abort(reason="cannot_connect") - except Exception: # noqa: BLE001 - create_repair("unknown") - return self.async_abort(reason="unknown") - - create_repair() - return self.async_create_entry( - title=config[CONF_HOST], - data=config, - ) - async def async_step_user( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/velux/cover.py b/homeassistant/components/velux/cover.py index c8688e4d186..cd7564eee81 100644 --- a/homeassistant/components/velux/cover.py +++ b/homeassistant/components/velux/cover.py @@ -29,7 +29,7 @@ async def async_setup_entry( """Set up cover(s) for Velux platform.""" module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxCover(node) + VeluxCover(node, config.entry_id) for node in module.pyvlx.nodes if isinstance(node, OpeningDevice) ) @@ -41,9 +41,9 @@ class VeluxCover(VeluxEntity, CoverEntity): _is_blind = False node: OpeningDevice - def __init__(self, node: OpeningDevice) -> None: + def __init__(self, node: OpeningDevice, config_entry_id: str) -> None: """Initialize VeluxCover.""" - super().__init__(node) + super().__init__(node, config_entry_id) self._attr_device_class = CoverDeviceClass.WINDOW if isinstance(node, Awning): self._attr_device_class = CoverDeviceClass.AWNING diff --git a/homeassistant/components/velux/light.py b/homeassistant/components/velux/light.py index bbe9822648e..e98632701f3 100644 --- a/homeassistant/components/velux/light.py +++ b/homeassistant/components/velux/light.py @@ -23,7 +23,7 @@ async def async_setup_entry( module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxLight(node) + VeluxLight(node, config.entry_id) for node in module.pyvlx.nodes if isinstance(node, LighteningDevice) ) diff --git a/homeassistant/components/velux/strings.json b/homeassistant/components/velux/strings.json index 3964c22efe2..5b7b459a3f7 100644 --- a/homeassistant/components/velux/strings.json +++ b/homeassistant/components/velux/strings.json @@ -17,16 +17,6 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, - "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Velux YAML configuration import cannot connect to server", - "description": "Configuring Velux using YAML is being removed but there was an connection error importing your YAML configuration.\n\nMake sure your home assistant can reach the KLF 200." - }, - "deprecated_yaml_import_issue_unknown": { - "title": "The Velux YAML configuration import failed with unknown error raised by pyvlx", - "description": "Configuring Velux using YAML is being removed but there was an unknown error importing your YAML configuration.\n\nCheck your configuration or have a look at the documentation of the integration." - } - }, "services": { "reboot_gateway": { "name": "Reboot gateway", diff --git a/homeassistant/components/venstar/climate.py b/homeassistant/components/venstar/climate.py index f47cf59be9c..ea833dc3183 100644 --- a/homeassistant/components/venstar/climate.py +++ b/homeassistant/components/venstar/climate.py @@ -10,7 +10,7 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_LOW, FAN_AUTO, FAN_ON, - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as CLIMATE_PLATFORM_SCHEMA, PRESET_AWAY, PRESET_NONE, ClimateEntity, @@ -48,7 +48,7 @@ from .const import ( ) from .coordinator import VenstarDataUpdateCoordinator -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = CLIMATE_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PASSWORD): cv.string, diff --git a/homeassistant/components/vera/config_flow.py b/homeassistant/components/vera/config_flow.py index fcb1e5f013e..181849f46a1 100644 --- a/homeassistant/components/vera/config_flow.py +++ b/homeassistant/components/vera/config_flow.py @@ -22,6 +22,7 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_EXCLUDE, CONF_LIGHTS, CONF_SOURCE from homeassistant.core import callback from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.typing import VolDictType from .const import CONF_CONTROLLER, CONF_LEGACY_UNIQUE_ID, DOMAIN @@ -49,9 +50,7 @@ def new_options(lights: list[int], exclude: list[int]) -> dict[str, list[int]]: return {CONF_LIGHTS: lights, CONF_EXCLUDE: exclude} -def options_schema( - options: Mapping[str, Any] | None = None, -) -> dict[vol.Optional, type[str]]: +def options_schema(options: Mapping[str, Any] | None = None) -> VolDictType: """Return options schema.""" options = options or {} return { diff --git a/homeassistant/components/vera/cover.py b/homeassistant/components/vera/cover.py index 542680925f2..25ffe987d5e 100644 --- a/homeassistant/components/vera/cover.py +++ b/homeassistant/components/vera/cover.py @@ -61,10 +61,11 @@ class VeraCover(VeraDevice[veraApi.VeraCurtain], CoverEntity): self.schedule_update_ha_state() @property - def is_closed(self) -> bool: + def is_closed(self) -> bool | None: """Return if the cover is closed.""" if self.current_cover_position is not None: return self.current_cover_position == 0 + return None def open_cover(self, **kwargs: Any) -> None: """Open the cover.""" diff --git a/homeassistant/components/verisure/__init__.py b/homeassistant/components/verisure/__init__.py index 9e5f0ca2703..0f8c8d936ef 100644 --- a/homeassistant/components/verisure/__init__.py +++ b/homeassistant/components/verisure/__init__.py @@ -12,7 +12,6 @@ from homeassistant.const import CONF_EMAIL, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import entity_registry as er -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.storage import STORAGE_DIR from .const import CONF_LOCK_DEFAULT_CODE, DOMAIN, LOGGER @@ -27,8 +26,6 @@ PLATFORMS = [ Platform.SWITCH, ] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Verisure from a config entry.""" diff --git a/homeassistant/components/verisure/camera.py b/homeassistant/components/verisure/camera.py index 72f5ab93c70..50606a49eab 100644 --- a/homeassistant/components/verisure/camera.py +++ b/homeassistant/components/verisure/camera.py @@ -33,7 +33,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_CAPTURE_SMARTCAM, - {}, + None, VerisureSmartcam.capture_smartcam.__name__, ) diff --git a/homeassistant/components/verisure/lock.py b/homeassistant/components/verisure/lock.py index da2bc2ced2b..5c56fc0df2c 100644 --- a/homeassistant/components/verisure/lock.py +++ b/homeassistant/components/verisure/lock.py @@ -41,12 +41,12 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_DISABLE_AUTOLOCK, - {}, + None, VerisureDoorlock.disable_autolock.__name__, ) platform.async_register_entity_service( SERVICE_ENABLE_AUTOLOCK, - {}, + None, VerisureDoorlock.enable_autolock.__name__, ) diff --git a/homeassistant/components/versasense/sensor.py b/homeassistant/components/versasense/sensor.py index 59d092ccdc1..4c861bf5787 100644 --- a/homeassistant/components/versasense/sensor.py +++ b/homeassistant/components/versasense/sensor.py @@ -30,7 +30,7 @@ async def async_setup_platform( ) -> None: """Set up the sensor platform.""" if discovery_info is None: - return None + return consumer = hass.data[DOMAIN][KEY_CONSUMER] diff --git a/homeassistant/components/versasense/switch.py b/homeassistant/components/versasense/switch.py index 195045882ff..10bca79e536 100644 --- a/homeassistant/components/versasense/switch.py +++ b/homeassistant/components/versasense/switch.py @@ -33,7 +33,7 @@ async def async_setup_platform( ) -> None: """Set up actuator platform.""" if discovery_info is None: - return None + return consumer = hass.data[DOMAIN][KEY_CONSUMER] diff --git a/homeassistant/components/version/__init__.py b/homeassistant/components/version/__init__.py index 4112cc51e46..cf13821dc8a 100644 --- a/homeassistant/components/version/__init__.py +++ b/homeassistant/components/version/__init__.py @@ -16,15 +16,16 @@ from .const import ( CONF_CHANNEL, CONF_IMAGE, CONF_SOURCE, - DOMAIN, PLATFORMS, ) from .coordinator import VersionDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) +type VersionConfigEntry = ConfigEntry[VersionDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: VersionConfigEntry) -> bool: """Set up the version integration from a config entry.""" board = entry.data[CONF_BOARD] @@ -50,14 +51,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: VersionConfigEntry) -> bool: """Unload the config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/version/binary_sensor.py b/homeassistant/components/version/binary_sensor.py index ff4f51e409f..827029e1d8c 100644 --- a/homeassistant/components/version/binary_sensor.py +++ b/homeassistant/components/version/binary_sensor.py @@ -9,13 +9,12 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_NAME, EntityCategory, __version__ as HA_VERSION from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import CONF_SOURCE, DEFAULT_NAME, DOMAIN -from .coordinator import VersionDataUpdateCoordinator +from . import VersionConfigEntry +from .const import CONF_SOURCE, DEFAULT_NAME from .entity import VersionEntity HA_VERSION_OBJECT = AwesomeVersion(HA_VERSION) @@ -23,11 +22,11 @@ HA_VERSION_OBJECT = AwesomeVersion(HA_VERSION) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: VersionConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up version binary_sensors.""" - coordinator: VersionDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data if (source := config_entry.data[CONF_SOURCE]) == "local": return diff --git a/homeassistant/components/version/diagnostics.py b/homeassistant/components/version/diagnostics.py index 194027d6ef4..ca7318f468b 100644 --- a/homeassistant/components/version/diagnostics.py +++ b/homeassistant/components/version/diagnostics.py @@ -6,20 +6,18 @@ from typing import Any from attr import asdict -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from .const import DOMAIN -from .coordinator import VersionDataUpdateCoordinator +from . import VersionConfigEntry async def async_get_config_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: VersionConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: VersionDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data device_registry = dr.async_get(hass) entity_registry = er.async_get(hass) diff --git a/homeassistant/components/version/sensor.py b/homeassistant/components/version/sensor.py index 6b0565b8cb3..e1d552bcd36 100644 --- a/homeassistant/components/version/sensor.py +++ b/homeassistant/components/version/sensor.py @@ -5,24 +5,23 @@ from __future__ import annotations from typing import Any from homeassistant.components.sensor import SensorEntity, SensorEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import CONF_SOURCE, DEFAULT_NAME, DOMAIN -from .coordinator import VersionDataUpdateCoordinator +from . import VersionConfigEntry +from .const import CONF_SOURCE, DEFAULT_NAME from .entity import VersionEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VersionConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up version sensors.""" - coordinator: VersionDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data if (entity_name := entry.data[CONF_NAME]) == DEFAULT_NAME: entity_name = entry.title diff --git a/homeassistant/components/vesync/__init__.py b/homeassistant/components/vesync/__init__.py index 7dceb1b3f8f..04547d33dea 100644 --- a/homeassistant/components/vesync/__init__.py +++ b/homeassistant/components/vesync/__init__.py @@ -7,7 +7,6 @@ from pyvesync import VeSync from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .common import async_process_devices @@ -26,8 +25,6 @@ PLATFORMS = [Platform.FAN, Platform.LIGHT, Platform.SENSOR, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up Vesync as config entry.""" diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 6272c033b4f..4dce2762eef 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -84,8 +84,14 @@ def _setup_entities(devices, async_add_entities): class VeSyncFanHA(VeSyncDevice, FanEntity): """Representation of a VeSync fan.""" - _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) _attr_name = None + _enable_turn_on_off_backwards_compatibility = False def __init__(self, fan) -> None: """Initialize the VeSync fan device.""" diff --git a/homeassistant/components/vesync/manifest.json b/homeassistant/components/vesync/manifest.json index ff3f56dd184..c5926cc224a 100644 --- a/homeassistant/components/vesync/manifest.json +++ b/homeassistant/components/vesync/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/vesync", "iot_class": "cloud_polling", "loggers": ["pyvesync"], - "requirements": ["pyvesync==2.1.10"] + "requirements": ["pyvesync==2.1.12"] } diff --git a/homeassistant/components/viaggiatreno/sensor.py b/homeassistant/components/viaggiatreno/sensor.py index 9c6c6bca422..1ea12ed6a41 100644 --- a/homeassistant/components/viaggiatreno/sensor.py +++ b/homeassistant/components/viaggiatreno/sensor.py @@ -10,7 +10,10 @@ import time import aiohttp import voluptuous as vol -from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity +from homeassistant.components.sensor import ( + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorEntity, +) from homeassistant.const import UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -52,7 +55,7 @@ CANCELLED_STRING = "Cancelled" NOT_DEPARTED_STRING = "Not departed yet" NO_INFORMATION_STRING = "No information for this train now" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_TRAIN_ID): cv.string, vol.Required(CONF_STATION_ID): cv.string, diff --git a/homeassistant/components/vicare/const.py b/homeassistant/components/vicare/const.py index 24ab94778e3..8f8ae3c94e3 100644 --- a/homeassistant/components/vicare/const.py +++ b/homeassistant/components/vicare/const.py @@ -10,6 +10,7 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, + Platform.FAN, Platform.NUMBER, Platform.SENSOR, Platform.WATER_HEATER, @@ -18,6 +19,7 @@ PLATFORMS = [ UNSUPPORTED_DEVICES = [ "Heatbox1", "Heatbox2_SRC", + "E3_TCU41_x04", "E3_FloorHeatingCircuitChannel", "E3_FloorHeatingCircuitDistributorBox", "E3_RoomControl_One_522", diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py new file mode 100644 index 00000000000..088e54c7354 --- /dev/null +++ b/homeassistant/components/vicare/fan.py @@ -0,0 +1,124 @@ +"""Viessmann ViCare ventilation device.""" + +from __future__ import annotations + +from contextlib import suppress +import logging + +from PyViCare.PyViCareDevice import Device as PyViCareDevice +from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareUtils import ( + PyViCareInvalidDataError, + PyViCareNotSupportedFeatureError, + PyViCareRateLimitError, +) +from PyViCare.PyViCareVentilationDevice import ( + VentilationDevice as PyViCareVentilationDevice, +) +from requests.exceptions import ConnectionError as RequestConnectionError + +from homeassistant.components.fan import FanEntity, FanEntityFeature +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.percentage import ( + ordered_list_item_to_percentage, + percentage_to_ordered_list_item, +) + +from .const import DEVICE_LIST, DOMAIN +from .entity import ViCareEntity +from .types import VentilationMode, VentilationProgram + +_LOGGER = logging.getLogger(__name__) + +ORDERED_NAMED_FAN_SPEEDS = [ + VentilationProgram.LEVEL_ONE, + VentilationProgram.LEVEL_TWO, + VentilationProgram.LEVEL_THREE, + VentilationProgram.LEVEL_FOUR, +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the ViCare fan platform.""" + + device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] + + async_add_entities( + [ + ViCareFan(device.config, device.api) + for device in device_list + if isinstance(device.api, PyViCareVentilationDevice) + ] + ) + + +class ViCareFan(ViCareEntity, FanEntity): + """Representation of the ViCare ventilation device.""" + + _attr_preset_modes = list[str]( + [ + VentilationMode.PERMANENT, + VentilationMode.VENTILATION, + VentilationMode.SENSOR_DRIVEN, + VentilationMode.SENSOR_OVERRIDE, + ] + ) + _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) + _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + _attr_translation_key = "ventilation" + _enable_turn_on_off_backwards_compatibility = False + + def __init__( + self, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + ) -> None: + """Initialize the fan entity.""" + super().__init__(device_config, device, self._attr_translation_key) + + def update(self) -> None: + """Update state of fan.""" + try: + with suppress(PyViCareNotSupportedFeatureError): + self._attr_preset_mode = VentilationMode.from_vicare_mode( + self._api.getActiveMode() + ) + with suppress(PyViCareNotSupportedFeatureError): + self._attr_percentage = ordered_list_item_to_percentage( + ORDERED_NAMED_FAN_SPEEDS, self._api.getActiveProgram() + ) + except RequestConnectionError: + _LOGGER.error("Unable to retrieve data from ViCare server") + except ValueError: + _LOGGER.error("Unable to decode data from ViCare server") + except PyViCareRateLimitError as limit_exception: + _LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception) + except PyViCareInvalidDataError as invalid_data_exception: + _LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception) + + @property + def is_on(self) -> bool | None: + """Return true if the entity is on.""" + # Viessmann ventilation unit cannot be turned off + return True + + def set_percentage(self, percentage: int) -> None: + """Set the speed of the fan, as a percentage.""" + if self._attr_preset_mode != str(VentilationMode.PERMANENT): + self.set_preset_mode(VentilationMode.PERMANENT) + + level = percentage_to_ordered_list_item(ORDERED_NAMED_FAN_SPEEDS, percentage) + _LOGGER.debug("changing ventilation level to %s", level) + self._api.setPermanentLevel(level) + + def set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + target_mode = VentilationMode.to_vicare_mode(preset_mode) + _LOGGER.debug("changing ventilation mode to %s", target_mode) + self._api.setActiveMode(target_mode) diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 0e98729e40f..0271ffc9798 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -950,6 +950,8 @@ class ViCareSensor(ViCareEntity, SensorEntity): """Initialize the sensor.""" super().__init__(device_config, api, description.key) self.entity_description = description + # run update to have device_class set depending on unit_of_measurement + self.update() @property def available(self) -> bool: diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index de92d0ec271..0452a560cb8 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -65,6 +65,21 @@ "name": "Heating" } }, + "fan": { + "ventilation": { + "name": "Ventilation", + "state_attributes": { + "preset_mode": { + "state": { + "permanent": "permanent", + "ventilation": "schedule", + "sensor_driven": "sensor", + "sensor_override": "schedule with sensor-override" + } + } + } + } + }, "number": { "heating_curve_shift": { "name": "Heating curve shift" @@ -304,8 +319,8 @@ "ess_discharge_total": { "name": "Battery discharge total" }, - "pcc_current_power_exchange": { - "name": "Grid power exchange" + "pcc_transfer_power_exchange": { + "name": "Power exchange with grid" }, "pcc_energy_consumption": { "name": "Energy import from grid" diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 7e1ec7f8bee..596605fccdd 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -64,6 +64,55 @@ VICARE_TO_HA_PRESET_HEATING = { } +class VentilationMode(enum.StrEnum): + """ViCare ventilation modes.""" + + PERMANENT = "permanent" # on, speed controlled by program (levelOne-levelFour) + VENTILATION = "ventilation" # activated by schedule + SENSOR_DRIVEN = "sensor_driven" # activated by schedule, override by sensor + SENSOR_OVERRIDE = "sensor_override" # activated by sensor + + @staticmethod + def to_vicare_mode(mode: str | None) -> str | None: + """Return the mapped ViCare ventilation mode for the Home Assistant mode.""" + if mode: + try: + ventilation_mode = VentilationMode(mode) + except ValueError: + # ignore unsupported / unmapped modes + return None + return HA_TO_VICARE_MODE_VENTILATION.get(ventilation_mode) if mode else None + return None + + @staticmethod + def from_vicare_mode(vicare_mode: str | None) -> str | None: + """Return the mapped Home Assistant mode for the ViCare ventilation mode.""" + for mode in VentilationMode: + if HA_TO_VICARE_MODE_VENTILATION.get(VentilationMode(mode)) == vicare_mode: + return mode + return None + + +HA_TO_VICARE_MODE_VENTILATION = { + VentilationMode.PERMANENT: "permanent", + VentilationMode.VENTILATION: "ventilation", + VentilationMode.SENSOR_DRIVEN: "sensorDriven", + VentilationMode.SENSOR_OVERRIDE: "sensorOverride", +} + + +class VentilationProgram(enum.StrEnum): + """ViCare preset ventilation programs. + + As listed in https://github.com/somm15/PyViCare/blob/6c5b023ca6c8bb2d38141dd1746dc1705ec84ce8/PyViCare/PyViCareVentilationDevice.py#L37 + """ + + LEVEL_ONE = "levelOne" + LEVEL_TWO = "levelTwo" + LEVEL_THREE = "levelThree" + LEVEL_FOUR = "levelFour" + + @dataclass(frozen=True) class ViCareDevice: """Dataclass holding the device api and config.""" diff --git a/homeassistant/components/vivotek/camera.py b/homeassistant/components/vivotek/camera.py index 8719d55ec29..a8bf652e963 100644 --- a/homeassistant/components/vivotek/camera.py +++ b/homeassistant/components/vivotek/camera.py @@ -5,7 +5,11 @@ from __future__ import annotations from libpyvivotek import VivotekCamera import voluptuous as vol -from homeassistant.components.camera import PLATFORM_SCHEMA, Camera, CameraEntityFeature +from homeassistant.components.camera import ( + PLATFORM_SCHEMA as CAMERA_PLATFORM_SCHEMA, + Camera, + CameraEntityFeature, +) from homeassistant.const import ( CONF_AUTHENTICATION, CONF_IP_ADDRESS, @@ -32,7 +36,7 @@ DEFAULT_EVENT_0_KEY = "event_i0_enable" DEFAULT_SECURITY_LEVEL = "admin" DEFAULT_STREAM_SOURCE = "live.sdp" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = CAMERA_PLATFORM_SCHEMA.extend( { vol.Required(CONF_IP_ADDRESS): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, diff --git a/homeassistant/components/vizio/config_flow.py b/homeassistant/components/vizio/config_flow.py index fb5f74f4e09..d8b99595f54 100644 --- a/homeassistant/components/vizio/config_flow.py +++ b/homeassistant/components/vizio/config_flow.py @@ -188,7 +188,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize config flow.""" - self._user_schema = None + self._user_schema: vol.Schema | None = None self._must_show_form: bool | None = None self._ch_type: str | None = None self._pairing_token: str | None = None diff --git a/homeassistant/components/vizio/const.py b/homeassistant/components/vizio/const.py index 03caa723771..4eb96256d2e 100644 --- a/homeassistant/components/vizio/const.py +++ b/homeassistant/components/vizio/const.py @@ -19,6 +19,7 @@ from homeassistant.const import ( CONF_NAME, ) import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.typing import VolDictType SERVICE_UPDATE_SETTING = "update_setting" @@ -26,7 +27,7 @@ ATTR_SETTING_TYPE = "setting_type" ATTR_SETTING_NAME = "setting_name" ATTR_NEW_VALUE = "new_value" -UPDATE_SETTING_SCHEMA = { +UPDATE_SETTING_SCHEMA: VolDictType = { vol.Required(ATTR_SETTING_TYPE): vol.All(cv.string, vol.Lower, cv.slugify), vol.Required(ATTR_SETTING_NAME): vol.All(cv.string, vol.Lower, cv.slugify), vol.Required(ATTR_NEW_VALUE): vol.Any(vol.Coerce(int), cv.string), diff --git a/homeassistant/components/vlc/media_player.py b/homeassistant/components/vlc/media_player.py index 53831fb8db0..cd05c919d58 100644 --- a/homeassistant/components/vlc/media_player.py +++ b/homeassistant/components/vlc/media_player.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, BrowseMedia, MediaPlayerEntity, MediaPlayerEntityFeature, @@ -30,7 +30,7 @@ _LOGGER = logging.getLogger(__name__) CONF_ARGUMENTS = "arguments" DEFAULT_NAME = "Vlc" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( { vol.Optional(CONF_ARGUMENTS, default=""): cv.string, vol.Optional(CONF_NAME): cv.string, diff --git a/homeassistant/components/voicerss/tts.py b/homeassistant/components/voicerss/tts.py index 84bbcc19409..9f1615ffa01 100644 --- a/homeassistant/components/voicerss/tts.py +++ b/homeassistant/components/voicerss/tts.py @@ -7,7 +7,11 @@ import logging import aiohttp import voluptuous as vol -from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider +from homeassistant.components.tts import ( + CONF_LANG, + PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA, + Provider, +) from homeassistant.const import CONF_API_KEY from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv @@ -145,7 +149,7 @@ DEFAULT_CODEC = "mp3" DEFAULT_FORMAT = "8khz_8bit_mono" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = TTS_PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES), diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 5770d9d2b4a..161e938a3b6 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -21,7 +21,7 @@ from voip_utils import ( VoipDatagramProtocol, ) -from homeassistant.components import stt, tts +from homeassistant.components import assist_pipeline, stt, tts from homeassistant.components.assist_pipeline import ( Pipeline, PipelineEvent, @@ -31,12 +31,14 @@ from homeassistant.components.assist_pipeline import ( async_pipeline_from_audio_stream, select as pipeline_select, ) +from homeassistant.components.assist_pipeline.audio_enhancer import ( + AudioEnhancer, + MicroVadEnhancer, +) from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, VadSensitivity, - VoiceActivityDetector, VoiceCommandSegmenter, - WebRtcVad, ) from homeassistant.const import __version__ from homeassistant.core import Context, HomeAssistant @@ -233,13 +235,13 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: # Wait for speech before starting pipeline segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) - vad = WebRtcVad() + audio_enhancer = MicroVadEnhancer(0, 0, True) chunk_buffer: deque[bytes] = deque( maxlen=self.buffered_chunks_before_speech, ) speech_detected = await self._wait_for_speech( segmenter, - vad, + audio_enhancer, chunk_buffer, ) if not speech_detected: @@ -253,7 +255,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: async for chunk in self._segment_audio( segmenter, - vad, + audio_enhancer, chunk_buffer, ): yield chunk @@ -317,7 +319,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async def _wait_for_speech( self, segmenter: VoiceCommandSegmenter, - vad: VoiceActivityDetector, + audio_enhancer: AudioEnhancer, chunk_buffer: MutableSequence[bytes], ): """Buffer audio chunks until speech is detected. @@ -329,13 +331,17 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert vad.samples_per_chunk is not None - vad_buffer = AudioBuffer(vad.samples_per_chunk * WIDTH) + vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) while chunk: chunk_buffer.append(chunk) - segmenter.process_with_vad(chunk, vad, vad_buffer) + segmenter.process_with_vad( + chunk, + assist_pipeline.SAMPLES_PER_CHUNK, + lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, + vad_buffer, + ) if segmenter.in_command: # Buffer until command starts if len(vad_buffer) > 0: @@ -351,7 +357,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async def _segment_audio( self, segmenter: VoiceCommandSegmenter, - vad: VoiceActivityDetector, + audio_enhancer: AudioEnhancer, chunk_buffer: Sequence[bytes], ) -> AsyncIterable[bytes]: """Yield audio chunks until voice command has finished.""" @@ -364,11 +370,15 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert vad.samples_per_chunk is not None - vad_buffer = AudioBuffer(vad.samples_per_chunk * WIDTH) + vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) while chunk: - if not segmenter.process_with_vad(chunk, vad, vad_buffer): + if not segmenter.process_with_vad( + chunk, + assist_pipeline.SAMPLES_PER_CHUNK, + lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, + vad_buffer, + ): # Voice command is finished break @@ -425,13 +435,13 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): sample_channels = wav_file.getnchannels() if ( - (sample_rate != 16000) - or (sample_width != 2) - or (sample_channels != 1) + (sample_rate != RATE) + or (sample_width != WIDTH) + or (sample_channels != CHANNELS) ): raise ValueError( - "Expected rate/width/channels as 16000/2/1," - " got {sample_rate}/{sample_width}/{sample_channels}}" + f"Expected rate/width/channels as {RATE}/{WIDTH}/{CHANNELS}," + f" got {sample_rate}/{sample_width}/{sample_channels}" ) audio_bytes = wav_file.readframes(wav_file.getnframes()) diff --git a/homeassistant/components/volkszaehler/sensor.py b/homeassistant/components/volkszaehler/sensor.py index ce5691b1193..c4fa7b1088b 100644 --- a/homeassistant/components/volkszaehler/sensor.py +++ b/homeassistant/components/volkszaehler/sensor.py @@ -10,7 +10,7 @@ from volkszaehler.exceptions import VolkszaehlerApiConnectionError import voluptuous as vol from homeassistant.components.sensor import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -73,7 +73,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SENSOR_KEYS: list[str] = [desc.key for desc in SENSOR_TYPES] -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_UUID): cv.string, vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, diff --git a/homeassistant/components/vultr/binary_sensor.py b/homeassistant/components/vultr/binary_sensor.py index 5c0db81e843..6a697eebe11 100644 --- a/homeassistant/components/vultr/binary_sensor.py +++ b/homeassistant/components/vultr/binary_sensor.py @@ -7,7 +7,7 @@ import logging import voluptuous as vol from homeassistant.components.binary_sensor import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as BINARY_SENSOR_PLATFORM_SCHEMA, BinarySensorDeviceClass, BinarySensorEntity, ) @@ -38,7 +38,7 @@ from . import ( _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "Vultr {}" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_SUBSCRIPTION): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, diff --git a/homeassistant/components/vultr/sensor.py b/homeassistant/components/vultr/sensor.py index 816a55736be..843aa416297 100644 --- a/homeassistant/components/vultr/sensor.py +++ b/homeassistant/components/vultr/sensor.py @@ -7,7 +7,7 @@ import logging import voluptuous as vol from homeassistant.components.sensor import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -45,7 +45,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( ) SENSOR_KEYS: list[str] = [desc.key for desc in SENSOR_TYPES] -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_SUBSCRIPTION): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, diff --git a/homeassistant/components/vultr/switch.py b/homeassistant/components/vultr/switch.py index 6758748b9f3..b03d613895a 100644 --- a/homeassistant/components/vultr/switch.py +++ b/homeassistant/components/vultr/switch.py @@ -7,7 +7,10 @@ from typing import Any import voluptuous as vol -from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity +from homeassistant.components.switch import ( + PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA, + SwitchEntity, +) from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv @@ -35,7 +38,7 @@ from . import ( _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "Vultr {}" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( { vol.Required(CONF_SUBSCRIPTION): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, diff --git a/homeassistant/components/w800rf32/binary_sensor.py b/homeassistant/components/w800rf32/binary_sensor.py index 49eec35cb1e..06e9e0dfdac 100644 --- a/homeassistant/components/w800rf32/binary_sensor.py +++ b/homeassistant/components/w800rf32/binary_sensor.py @@ -9,7 +9,7 @@ import W800rf32 as w800 from homeassistant.components.binary_sensor import ( DEVICE_CLASSES_SCHEMA, - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as BINARY_SENSOR_PLATFORM_SCHEMA, BinarySensorEntity, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_DEVICES, CONF_NAME @@ -25,7 +25,7 @@ _LOGGER = logging.getLogger(__name__) CONF_OFF_DELAY = "off_delay" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_DEVICES): { cv.string: vol.Schema( diff --git a/homeassistant/components/wake_on_lan/__init__.py b/homeassistant/components/wake_on_lan/__init__.py index 37837da683a..efd72c4564c 100644 --- a/homeassistant/components/wake_on_lan/__init__.py +++ b/homeassistant/components/wake_on_lan/__init__.py @@ -6,12 +6,13 @@ import logging import voluptuous as vol import wakeonlan +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN +from .const import DOMAIN, PLATFORMS _LOGGER = logging.getLogger(__name__) @@ -43,7 +44,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if broadcast_port is not None: service_kwargs["port"] = broadcast_port - _LOGGER.info( + _LOGGER.debug( "Send magic packet to mac %s (broadcast: %s, port: %s)", mac_address, broadcast_address, @@ -62,3 +63,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up a Wake on LAN component entry.""" + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/wake_on_lan/button.py b/homeassistant/components/wake_on_lan/button.py new file mode 100644 index 00000000000..87135a61380 --- /dev/null +++ b/homeassistant/components/wake_on_lan/button.py @@ -0,0 +1,84 @@ +"""Support for button entity in wake on lan.""" + +from __future__ import annotations + +from functools import partial +import logging +from typing import Any + +import wakeonlan + +from homeassistant.components.button import ButtonEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Wake on LAN button entry.""" + broadcast_address: str | None = entry.options.get(CONF_BROADCAST_ADDRESS) + broadcast_port: int | None = entry.options.get(CONF_BROADCAST_PORT) + mac_address: str = entry.options[CONF_MAC] + name: str = entry.title + + async_add_entities( + [ + WolButton( + name, + mac_address, + broadcast_address, + broadcast_port, + ) + ] + ) + + +class WolButton(ButtonEntity): + """Representation of a wake on lan button.""" + + _attr_name = None + + def __init__( + self, + name: str, + mac_address: str, + broadcast_address: str | None, + broadcast_port: int | None, + ) -> None: + """Initialize the WOL button.""" + self._mac_address = mac_address + self._broadcast_address = broadcast_address + self._broadcast_port = broadcast_port + self._attr_unique_id = dr.format_mac(mac_address) + self._attr_device_info = dr.DeviceInfo( + connections={(dr.CONNECTION_NETWORK_MAC, self._attr_unique_id)}, + default_manufacturer="Wake on LAN", + default_name=name, + ) + + async def async_press(self) -> None: + """Press the button.""" + service_kwargs: dict[str, Any] = {} + if self._broadcast_address is not None: + service_kwargs["ip_address"] = self._broadcast_address + if self._broadcast_port is not None: + service_kwargs["port"] = self._broadcast_port + + _LOGGER.debug( + "Send magic packet to mac %s (broadcast: %s, port: %s)", + self._mac_address, + self._broadcast_address, + self._broadcast_port, + ) + + await self.hass.async_add_executor_job( + partial(wakeonlan.send_magic_packet, self._mac_address, **service_kwargs) + ) diff --git a/homeassistant/components/wake_on_lan/config_flow.py b/homeassistant/components/wake_on_lan/config_flow.py new file mode 100644 index 00000000000..fb54dd146e5 --- /dev/null +++ b/homeassistant/components/wake_on_lan/config_flow.py @@ -0,0 +1,80 @@ +"""Config flow for Wake on lan integration.""" + +from collections.abc import Mapping +from typing import Any + +import voluptuous as vol + +from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + TextSelector, +) + +from .const import DEFAULT_NAME, DOMAIN + + +async def validate( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate input setup.""" + user_input = await validate_options(handler, user_input) + + user_input[CONF_MAC] = dr.format_mac(user_input[CONF_MAC]) + + # Mac address needs to be unique + handler.parent_handler._async_abort_entries_match({CONF_MAC: user_input[CONF_MAC]}) # noqa: SLF001 + + return user_input + + +async def validate_options( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate input options.""" + if CONF_BROADCAST_PORT in user_input: + # Convert float to int for broadcast port + user_input[CONF_BROADCAST_PORT] = int(user_input[CONF_BROADCAST_PORT]) + return user_input + + +DATA_SCHEMA = {vol.Required(CONF_MAC): TextSelector()} +OPTIONS_SCHEMA = { + vol.Optional(CONF_BROADCAST_ADDRESS): TextSelector(), + vol.Optional(CONF_BROADCAST_PORT): NumberSelector( + NumberSelectorConfig(min=0, max=65535, step=1, mode=NumberSelectorMode.BOX) + ), +} + + +CONFIG_FLOW = { + "user": SchemaFlowFormStep( + schema=vol.Schema(DATA_SCHEMA).extend(OPTIONS_SCHEMA), + validate_user_input=validate, + ) +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep( + vol.Schema(OPTIONS_SCHEMA), validate_user_input=validate_options + ), +} + + +class WakeonLanConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Wake on Lan.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + mac: str = options[CONF_MAC] + return f"{DEFAULT_NAME} {mac}" diff --git a/homeassistant/components/wake_on_lan/const.py b/homeassistant/components/wake_on_lan/const.py index 2560ef40382..20b9573cfde 100644 --- a/homeassistant/components/wake_on_lan/const.py +++ b/homeassistant/components/wake_on_lan/const.py @@ -1,3 +1,11 @@ """Constants for the Wake-On-LAN component.""" +from homeassistant.const import Platform + DOMAIN = "wake_on_lan" +PLATFORMS = [Platform.BUTTON] + +CONF_OFF_ACTION = "turn_off" + +DEFAULT_NAME = "Wake on LAN" +DEFAULT_PING_TIMEOUT = 1 diff --git a/homeassistant/components/wake_on_lan/manifest.json b/homeassistant/components/wake_on_lan/manifest.json index a62980b3010..c716a851ae4 100644 --- a/homeassistant/components/wake_on_lan/manifest.json +++ b/homeassistant/components/wake_on_lan/manifest.json @@ -2,6 +2,7 @@ "domain": "wake_on_lan", "name": "Wake on LAN", "codeowners": ["@ntilley905"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/wake_on_lan", "iot_class": "local_push", "requirements": ["wakeonlan==2.1.0"] diff --git a/homeassistant/components/wake_on_lan/strings.json b/homeassistant/components/wake_on_lan/strings.json index 8395bc7503a..89bc30e405a 100644 --- a/homeassistant/components/wake_on_lan/strings.json +++ b/homeassistant/components/wake_on_lan/strings.json @@ -1,20 +1,56 @@ { + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "step": { + "user": { + "data": { + "mac": "MAC address", + "broadcast_address": "Broadcast address", + "broadcast_port": "Broadcast port" + }, + "data_description": { + "mac": "MAC address of the device to wake up.", + "broadcast_address": "The IP address of the host to send the magic packet to. Defaults to `255.255.255.255` and is normally not changed.", + "broadcast_port": "The port to send the magic packet to. Defaults to `9` and is normally not changed." + } + } + } + }, + "options": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "step": { + "init": { + "data": { + "broadcast_address": "[%key:component::wake_on_lan::config::step::user::data::broadcast_address%]", + "broadcast_port": "[%key:component::wake_on_lan::config::step::user::data::broadcast_port%]" + }, + "data_description": { + "broadcast_address": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_address%]", + "broadcast_port": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_port%]" + } + } + } + }, "services": { "send_magic_packet": { "name": "Send magic packet", "description": "Sends a 'magic packet' to wake up a device with 'Wake-On-LAN' capabilities.", "fields": { "mac": { - "name": "MAC address", - "description": "MAC address of the device to wake up." + "name": "[%key:component::wake_on_lan::config::step::user::data::mac%]", + "description": "[%key:component::wake_on_lan::config::step::user::data_description::mac%]" }, "broadcast_address": { - "name": "Broadcast address", - "description": "Broadcast IP where to send the magic packet." + "name": "[%key:component::wake_on_lan::config::step::user::data::broadcast_address%]", + "description": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_address%]" }, "broadcast_port": { - "name": "Broadcast port", - "description": "Port where to send the magic packet." + "name": "[%key:component::wake_on_lan::config::step::user::data::broadcast_port%]", + "description": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_port%]" } } } diff --git a/homeassistant/components/wake_on_lan/switch.py b/homeassistant/components/wake_on_lan/switch.py index e5c3a055310..f4949ec6901 100644 --- a/homeassistant/components/wake_on_lan/switch.py +++ b/homeassistant/components/wake_on_lan/switch.py @@ -10,7 +10,7 @@ import voluptuous as vol import wakeonlan from homeassistant.components.switch import ( - PLATFORM_SCHEMA as PARENT_PLATFORM_SCHEMA, + PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA, SwitchEntity, ) from homeassistant.const import ( @@ -27,16 +27,11 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import DOMAIN +from .const import CONF_OFF_ACTION, DEFAULT_NAME, DEFAULT_PING_TIMEOUT, DOMAIN _LOGGER = logging.getLogger(__name__) -CONF_OFF_ACTION = "turn_off" - -DEFAULT_NAME = "Wake on LAN" -DEFAULT_PING_TIMEOUT = 1 - -PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( { vol.Required(CONF_MAC): cv.string, vol.Optional(CONF_BROADCAST_ADDRESS): cv.string, @@ -48,10 +43,10 @@ PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend( ) -def setup_platform( +async def async_setup_platform( hass: HomeAssistant, config: ConfigType, - add_entities: AddEntitiesCallback, + async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up a wake on lan switch.""" @@ -62,7 +57,7 @@ def setup_platform( name: str = config[CONF_NAME] off_action: list[Any] | None = config.get(CONF_OFF_ACTION) - add_entities( + async_add_entities( [ WolSwitch( hass, diff --git a/homeassistant/components/wallbox/manifest.json b/homeassistant/components/wallbox/manifest.json index ce9008ef8bb..63102646508 100644 --- a/homeassistant/components/wallbox/manifest.json +++ b/homeassistant/components/wallbox/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/wallbox", "iot_class": "cloud_polling", "loggers": ["wallbox"], - "requirements": ["wallbox==0.6.0"] + "requirements": ["wallbox==0.7.0"] } diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 1623b391e53..2e749735b0c 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -25,11 +25,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ServiceValidationError -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.config_validation import ( # noqa: F401 - PLATFORM_SCHEMA, - PLATFORM_SCHEMA_BASE, -) +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.deprecation import ( DeprecatedConstantEnum, all_with_deprecated_constants, @@ -39,17 +35,19 @@ from homeassistant.helpers.deprecation import ( from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.temperature import display_temp as show_temp -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.util.unit_conversion import TemperatureConverter from .const import DOMAIN +ENTITY_ID_FORMAT = DOMAIN + ".{}" +PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA +PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE +SCAN_INTERVAL = timedelta(seconds=60) + DEFAULT_MIN_TEMP = 110 DEFAULT_MAX_TEMP = 140 -ENTITY_ID_FORMAT = DOMAIN + ".{}" -SCAN_INTERVAL = timedelta(seconds=60) - SERVICE_SET_AWAY_MODE = "set_away_mode" SERVICE_SET_TEMPERATURE = "set_temperature" SERVICE_SET_OPERATION_MODE = "set_operation_mode" @@ -96,29 +94,17 @@ CONVERTIBLE_ATTRIBUTE = [ATTR_TEMPERATURE] _LOGGER = logging.getLogger(__name__) -ON_OFF_SERVICE_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids}) - -SET_AWAY_MODE_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_AWAY_MODE): cv.boolean, - } -) -SET_TEMPERATURE_SCHEMA = vol.Schema( - vol.All( - { - vol.Required(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Optional(ATTR_OPERATION_MODE): cv.string, - } - ) -) -SET_OPERATION_MODE_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_OPERATION_MODE): cv.string, - } -) +SET_AWAY_MODE_SCHEMA: VolDictType = { + vol.Required(ATTR_AWAY_MODE): cv.boolean, +} +SET_TEMPERATURE_SCHEMA: VolDictType = { + vol.Required(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), + vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, + vol.Optional(ATTR_OPERATION_MODE): cv.string, +} +SET_OPERATION_MODE_SCHEMA: VolDictType = { + vol.Required(ATTR_OPERATION_MODE): cv.string, +} # mypy: disallow-any-generics @@ -131,10 +117,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_TURN_ON, {}, "async_turn_on", [WaterHeaterEntityFeature.ON_OFF] + SERVICE_TURN_ON, None, "async_turn_on", [WaterHeaterEntityFeature.ON_OFF] ) component.async_register_entity_service( - SERVICE_TURN_OFF, {}, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF] + SERVICE_TURN_OFF, None, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF] ) component.async_register_entity_service( SERVICE_SET_AWAY_MODE, SET_AWAY_MODE_SCHEMA, async_service_away_mode @@ -147,12 +133,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: SET_OPERATION_MODE_SCHEMA, "async_handle_set_operation_mode", ) - component.async_register_entity_service( - SERVICE_TURN_OFF, ON_OFF_SERVICE_SCHEMA, "async_turn_off" - ) - component.async_register_entity_service( - SERVICE_TURN_ON, ON_OFF_SERVICE_SCHEMA, "async_turn_on" - ) return True diff --git a/homeassistant/components/watson_tts/tts.py b/homeassistant/components/watson_tts/tts.py index 3cf1582e008..373d17438c9 100644 --- a/homeassistant/components/watson_tts/tts.py +++ b/homeassistant/components/watson_tts/tts.py @@ -6,7 +6,10 @@ from ibm_cloud_sdk_core.authenticators import IAMAuthenticator from ibm_watson import TextToSpeechV1 import voluptuous as vol -from homeassistant.components.tts import PLATFORM_SCHEMA, Provider +from homeassistant.components.tts import ( + PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA, + Provider, +) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) @@ -114,7 +117,7 @@ CONTENT_TYPE_EXTENSIONS = { DEFAULT_VOICE = "en-US_AllisonV3Voice" DEFAULT_OUTPUT_FORMAT = "audio/mp3" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = TTS_PLATFORM_SCHEMA.extend( { vol.Optional(CONF_URL, default=DEFAULT_URL): cv.string, vol.Required(CONF_APIKEY): cv.string, diff --git a/homeassistant/components/waze_travel_time/manifest.json b/homeassistant/components/waze_travel_time/manifest.json index ce7c9105781..9d615431c7d 100644 --- a/homeassistant/components/waze_travel_time/manifest.json +++ b/homeassistant/components/waze_travel_time/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/waze_travel_time", "iot_class": "cloud_polling", "loggers": ["pywaze", "homeassistant.helpers.location"], - "requirements": ["pywaze==1.0.1"] + "requirements": ["pywaze==1.0.2"] } diff --git a/homeassistant/components/weather/__init__.py b/homeassistant/components/weather/__init__.py index b3ce52510d2..dab3394426e 100644 --- a/homeassistant/components/weather/__init__.py +++ b/homeassistant/components/weather/__init__.py @@ -31,10 +31,7 @@ from homeassistant.core import ( callback, ) from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.config_validation import ( # noqa: F401 - PLATFORM_SCHEMA, - PLATFORM_SCHEMA_BASE, -) +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import ABCCachedProperties, Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType @@ -74,6 +71,11 @@ from .websocket_api import async_setup as async_setup_ws_api _LOGGER = logging.getLogger(__name__) +ENTITY_ID_FORMAT = DOMAIN + ".{}" +PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA +PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE +SCAN_INTERVAL = timedelta(seconds=30) + ATTR_CONDITION_CLASS = "condition_class" ATTR_CONDITION_CLEAR_NIGHT = "clear-night" ATTR_CONDITION_CLOUDY = "cloudy" @@ -115,10 +117,6 @@ ATTR_FORECAST_DEW_POINT: Final = "dew_point" ATTR_FORECAST_CLOUD_COVERAGE: Final = "cloud_coverage" ATTR_FORECAST_UV_INDEX: Final = "uv_index" -ENTITY_ID_FORMAT = DOMAIN + ".{}" - -SCAN_INTERVAL = timedelta(seconds=30) - ROUNDING_PRECISION = 2 SERVICE_GET_FORECASTS: Final = "get_forecasts" @@ -924,7 +922,6 @@ class WeatherEntity(Entity, PostInit, cached_properties=CACHED_PROPERTIES_WITH_A forecast_type: Literal["daily", "hourly", "twice_daily"], ) -> None: """Start subscription to forecast_type.""" - return None @callback def _async_subscription_ended( @@ -932,7 +929,6 @@ class WeatherEntity(Entity, PostInit, cached_properties=CACHED_PROPERTIES_WITH_A forecast_type: Literal["daily", "hourly", "twice_daily"], ) -> None: """End subscription to forecast_type.""" - return None @final @callback diff --git a/homeassistant/components/weatherflow_cloud/__init__.py b/homeassistant/components/weatherflow_cloud/__init__.py index a40386100e7..8dc26f9b9c6 100644 --- a/homeassistant/components/weatherflow_cloud/__init__.py +++ b/homeassistant/components/weatherflow_cloud/__init__.py @@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant from .const import DOMAIN from .coordinator import WeatherFlowCloudDataUpdateCoordinator -PLATFORMS: list[Platform] = [Platform.WEATHER] +PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WEATHER] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/weatherflow_cloud/coordinator.py b/homeassistant/components/weatherflow_cloud/coordinator.py index 78b4f3be223..8b8a916262f 100644 --- a/homeassistant/components/weatherflow_cloud/coordinator.py +++ b/homeassistant/components/weatherflow_cloud/coordinator.py @@ -21,12 +21,11 @@ class WeatherFlowCloudDataUpdateCoordinator( def __init__(self, hass: HomeAssistant, api_token: str) -> None: """Initialize global WeatherFlow forecast data updater.""" self.weather_api = WeatherFlowRestAPI(api_token=api_token) - super().__init__( hass, LOGGER, name=DOMAIN, - update_interval=timedelta(minutes=15), + update_interval=timedelta(seconds=60), ) async def _async_update_data(self) -> dict[int, WeatherFlowDataREST]: diff --git a/homeassistant/components/weatherflow_cloud/entity.py b/homeassistant/components/weatherflow_cloud/entity.py new file mode 100644 index 00000000000..46077ab0870 --- /dev/null +++ b/homeassistant/components/weatherflow_cloud/entity.py @@ -0,0 +1,38 @@ +"""Base entity class for WeatherFlow Cloud integration.""" + +from weatherflow4py.models.rest.unified import WeatherFlowDataREST + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import ATTR_ATTRIBUTION, DOMAIN, MANUFACTURER +from .coordinator import WeatherFlowCloudDataUpdateCoordinator + + +class WeatherFlowCloudEntity(CoordinatorEntity[WeatherFlowCloudDataUpdateCoordinator]): + """Base entity class to use for everything.""" + + _attr_attribution = ATTR_ATTRIBUTION + _attr_has_entity_name = True + + def __init__( + self, + coordinator: WeatherFlowCloudDataUpdateCoordinator, + station_id: int, + ) -> None: + """Class initializer.""" + super().__init__(coordinator) + self.station_id = station_id + + self._attr_device_info = DeviceInfo( + name=self.station.station.name, + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, str(station_id))}, + manufacturer=MANUFACTURER, + configuration_url=f"https://tempestwx.com/station/{station_id}/grid", + ) + + @property + def station(self) -> WeatherFlowDataREST: + """Individual Station data.""" + return self.coordinator.data[self.station_id] diff --git a/homeassistant/components/weatherflow_cloud/icons.json b/homeassistant/components/weatherflow_cloud/icons.json new file mode 100644 index 00000000000..19e6ac56821 --- /dev/null +++ b/homeassistant/components/weatherflow_cloud/icons.json @@ -0,0 +1,42 @@ +{ + "entity": { + "sensor": { + "air_temperature": { + "default": "mdi:thermometer" + }, + "air_density": { + "default": "mdi:format-line-weight" + }, + "feels_like": { + "default": "mdi:thermometer" + }, + "heat_index": { + "default": "mdi:sun-thermometer" + }, + "wet_bulb_temperature": { + "default": "mdi:thermometer-water" + }, + "wet_bulb_globe_temperature": { + "default": "mdi:thermometer-water" + }, + "lightning_strike_count": { + "default": "mdi:lightning-bolt" + }, + "lightning_strike_count_last_1hr": { + "default": "mdi:lightning-bolt" + }, + "lightning_strike_count_last_3hr": { + "default": "mdi:lightning-bolt" + }, + "lightning_strike_last_distance": { + "default": "mdi:lightning-bolt" + }, + "lightning_strike_last_epoch": { + "default": "mdi:lightning-bolt" + }, + "wind_chill": { + "default": "mdi:snowflake-thermometer" + } + } + } +} diff --git a/homeassistant/components/weatherflow_cloud/manifest.json b/homeassistant/components/weatherflow_cloud/manifest.json index 93df04d833c..354b9642c06 100644 --- a/homeassistant/components/weatherflow_cloud/manifest.json +++ b/homeassistant/components/weatherflow_cloud/manifest.json @@ -5,5 +5,6 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud", "iot_class": "cloud_polling", + "loggers": ["weatherflow4py"], "requirements": ["weatherflow4py==0.2.21"] } diff --git a/homeassistant/components/weatherflow_cloud/sensor.py b/homeassistant/components/weatherflow_cloud/sensor.py new file mode 100644 index 00000000000..9314c77a65c --- /dev/null +++ b/homeassistant/components/weatherflow_cloud/sensor.py @@ -0,0 +1,208 @@ +"""Sensors for cloud based weatherflow.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import UTC, datetime + +from weatherflow4py.models.rest.observation import Observation + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import UnitOfLength, UnitOfPressure, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from .const import DOMAIN +from .coordinator import WeatherFlowCloudDataUpdateCoordinator +from .entity import WeatherFlowCloudEntity + + +@dataclass(frozen=True, kw_only=True) +class WeatherFlowCloudSensorEntityDescription( + SensorEntityDescription, +): + """Describes a weatherflow sensor.""" + + value_fn: Callable[[Observation], StateType | datetime] + + +WF_SENSORS: tuple[WeatherFlowCloudSensorEntityDescription, ...] = ( + # Air Sensors + WeatherFlowCloudSensorEntityDescription( + key="air_density", + translation_key="air_density", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=5, + value_fn=lambda data: data.air_density, + native_unit_of_measurement="kg/m³", + ), + # Temp Sensors + WeatherFlowCloudSensorEntityDescription( + key="air_temperature", + translation_key="air_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.air_temperature, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="dew_point", + translation_key="dew_point", + value_fn=lambda data: data.dew_point, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + ), + WeatherFlowCloudSensorEntityDescription( + key="feels_like", + translation_key="feels_like", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.feels_like, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="heat_index", + translation_key="heat_index", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.heat_index, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="wind_chill", + translation_key="wind_chill", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.wind_chill, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="wet_bulb_temperature", + translation_key="wet_bulb_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.wet_bulb_temperature, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="wet_bulb_globe_temperature", + translation_key="wet_bulb_globe_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.wet_bulb_globe_temperature, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + # Pressure Sensors + WeatherFlowCloudSensorEntityDescription( + key="barometric_pressure", + translation_key="barometric_pressure", + value_fn=lambda data: data.barometric_pressure, + native_unit_of_measurement=UnitOfPressure.MBAR, + device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=3, + ), + WeatherFlowCloudSensorEntityDescription( + key="sea_level_pressure", + translation_key="sea_level_pressure", + value_fn=lambda data: data.sea_level_pressure, + native_unit_of_measurement=UnitOfPressure.MBAR, + device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=3, + ), + # Lightning Sensors + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_count", + translation_key="lightning_strike_count", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.lightning_strike_count, + ), + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_count_last_1hr", + translation_key="lightning_strike_count_last_1hr", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.lightning_strike_count_last_1hr, + ), + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_count_last_3hr", + translation_key="lightning_strike_count_last_3hr", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.lightning_strike_count_last_3hr, + ), + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_last_distance", + translation_key="lightning_strike_last_distance", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.DISTANCE, + native_unit_of_measurement=UnitOfLength.KILOMETERS, + value_fn=lambda data: data.lightning_strike_last_distance, + ), + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_last_epoch", + translation_key="lightning_strike_last_epoch", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: datetime.fromtimestamp( + data.lightning_strike_last_epoch, tz=UTC + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up WeatherFlow sensors based on a config entry.""" + + coordinator: WeatherFlowCloudDataUpdateCoordinator = hass.data[DOMAIN][ + entry.entry_id + ] + + stations = coordinator.data.keys() + + async_add_entities( + WeatherFlowCloudSensor(coordinator, sensor_description, station_id) + for station_id in stations + for sensor_description in WF_SENSORS + ) + + +class WeatherFlowCloudSensor(WeatherFlowCloudEntity, SensorEntity): + """Implementation of a WeatherFlow sensor.""" + + entity_description: WeatherFlowCloudSensorEntityDescription + + def __init__( + self, + coordinator: WeatherFlowCloudDataUpdateCoordinator, + description: WeatherFlowCloudSensorEntityDescription, + station_id: int, + ) -> None: + """Initialize the sensor.""" + # Initialize the Entity Class + super().__init__(coordinator, station_id) + self.entity_description = description + self._attr_unique_id = f"{station_id}_{description.key}" + + @property + def native_value(self) -> StateType | datetime: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.station.observation.obs[0]) diff --git a/homeassistant/components/weatherflow_cloud/strings.json b/homeassistant/components/weatherflow_cloud/strings.json index 782b0dcf960..df561c8b753 100644 --- a/homeassistant/components/weatherflow_cloud/strings.json +++ b/homeassistant/components/weatherflow_cloud/strings.json @@ -23,5 +23,65 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } + }, + "entity": { + "sensor": { + "air_density": { + "name": "Air density" + }, + "barometric_pressure": { + "name": "Pressure barometric" + }, + "sea_level_pressure": { + "name": "Pressure sea level" + }, + + "dew_point": { + "name": "Dew point" + }, + "lightning_strike_count": { + "name": "Lightning count" + }, + "lightning_strike_count_last_1hr": { + "name": "Lightning count last 1 hr" + }, + "lightning_strike_count_last_3hr": { + "name": "Lightning count last 3 hr" + }, + "lightning_strike_last_distance": { + "name": "Lightning last distance" + }, + "lightning_strike_last_epoch": { + "name": "Lightning last strike" + }, + + "wind_chill": { + "name": "Wind chill" + }, + "wind_direction": { + "name": "Wind direction" + }, + "wind_direction_cardinal": { + "name": "Wind direction (cardinal)" + }, + "wind_gust": { + "name": "Wind gust" + }, + "wind_lull": { + "name": "Wind lull" + }, + "feels_like": { + "name": "Feels like" + }, + "heat_index": { + "name": "Heat index" + }, + "wet_bulb_temperature": { + "name": "Wet bulb temperature" + }, + "wet_bulb_globe_temperature": { + "name": "Wet bulb globe temperature" + } + } } } diff --git a/homeassistant/components/weatherflow_cloud/weather.py b/homeassistant/components/weatherflow_cloud/weather.py index 47e2b6a28df..c475f2974a9 100644 --- a/homeassistant/components/weatherflow_cloud/weather.py +++ b/homeassistant/components/weatherflow_cloud/weather.py @@ -17,11 +17,11 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import ATTR_ATTRIBUTION, DOMAIN, MANUFACTURER, STATE_MAP +from .const import DOMAIN, STATE_MAP from .coordinator import WeatherFlowCloudDataUpdateCoordinator +from .entity import WeatherFlowCloudEntity async def async_setup_entry( @@ -43,13 +43,11 @@ async def async_setup_entry( class WeatherFlowWeather( - SingleCoordinatorWeatherEntity[WeatherFlowCloudDataUpdateCoordinator] + WeatherFlowCloudEntity, + SingleCoordinatorWeatherEntity[WeatherFlowCloudDataUpdateCoordinator], ): """Implementation of a WeatherFlow weather condition.""" - _attr_attribution = ATTR_ATTRIBUTION - _attr_has_entity_name = True - _attr_native_temperature_unit = UnitOfTemperature.CELSIUS _attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS _attr_native_pressure_unit = UnitOfPressure.MBAR @@ -65,19 +63,9 @@ class WeatherFlowWeather( station_id: int, ) -> None: """Initialise the platform with a data instance and station.""" - super().__init__(coordinator) - - self.station_id = station_id + super().__init__(coordinator, station_id) self._attr_unique_id = f"weatherflow_forecast_{station_id}" - self._attr_device_info = DeviceInfo( - name=self.local_data.station.name, - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, f"{station_id}")}, - manufacturer=MANUFACTURER, - configuration_url=f"https://tempestwx.com/station/{station_id}/grid", - ) - @property def local_data(self) -> WeatherFlowDataREST: """Return the local weather data object for this station.""" @@ -98,7 +86,6 @@ class WeatherFlowWeather( """Return the Air Pressure @ Station.""" return self.local_data.weather.current_conditions.station_pressure - # @property def humidity(self) -> float | None: """Return the humidity.""" diff --git a/homeassistant/components/webhook/manifest.json b/homeassistant/components/webhook/manifest.json index c2795e8ac17..43f5321d9f6 100644 --- a/homeassistant/components/webhook/manifest.json +++ b/homeassistant/components/webhook/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@home-assistant/core"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/webhook", + "integration_type": "system", "quality_scale": "internal" } diff --git a/homeassistant/components/webmin/config_flow.py b/homeassistant/components/webmin/config_flow.py index 5fa3aefb048..3f55bbd9110 100644 --- a/homeassistant/components/webmin/config_flow.py +++ b/homeassistant/components/webmin/config_flow.py @@ -53,9 +53,10 @@ async def validate_user_input( except Exception as err: raise SchemaFlowError("unknown") from err - await cast(SchemaConfigFlowHandler, handler.parent_handler).async_set_unique_id( - get_sorted_mac_addresses(data)[0] - ) + if len(mac_addresses := get_sorted_mac_addresses(data)) > 0: + await cast(SchemaConfigFlowHandler, handler.parent_handler).async_set_unique_id( + mac_addresses[0] + ) return user_input diff --git a/homeassistant/components/webmin/coordinator.py b/homeassistant/components/webmin/coordinator.py index dab5e495c1a..45261787e75 100644 --- a/homeassistant/components/webmin/coordinator.py +++ b/homeassistant/components/webmin/coordinator.py @@ -23,6 +23,7 @@ class WebminUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """The Webmin data update coordinator.""" mac_address: str + unique_id: str def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize the Webmin data update coordinator.""" @@ -41,14 +42,19 @@ class WebminUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): async def async_setup(self) -> None: """Provide needed data to the device info.""" mac_addresses = get_sorted_mac_addresses(self.data) - self.mac_address = mac_addresses[0] - self.device_info[ATTR_CONNECTIONS] = { - (CONNECTION_NETWORK_MAC, format_mac(mac_address)) - for mac_address in mac_addresses - } - self.device_info[ATTR_IDENTIFIERS] = { - (DOMAIN, format_mac(mac_address)) for mac_address in mac_addresses - } + if len(mac_addresses) > 0: + self.mac_address = mac_addresses[0] + self.unique_id = self.mac_address + self.device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, format_mac(mac_address)) + for mac_address in mac_addresses + } + self.device_info[ATTR_IDENTIFIERS] = { + (DOMAIN, format_mac(mac_address)) for mac_address in mac_addresses + } + else: + assert self.config_entry + self.unique_id = self.config_entry.entry_id async def _async_update_data(self) -> dict[str, Any]: data = await self.instance.update() diff --git a/homeassistant/components/webmin/sensor.py b/homeassistant/components/webmin/sensor.py index cf1a9845c02..785140393a2 100644 --- a/homeassistant/components/webmin/sensor.py +++ b/homeassistant/components/webmin/sensor.py @@ -235,7 +235,7 @@ class WebminSensor(CoordinatorEntity[WebminUpdateCoordinator], SensorEntity): super().__init__(coordinator) self.entity_description = description self._attr_device_info = coordinator.device_info - self._attr_unique_id = f"{coordinator.mac_address}_{description.key}" + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" @property def native_value(self) -> int | float: diff --git a/homeassistant/components/webostv/__init__.py b/homeassistant/components/webostv/__init__.py index 479407c3199..36950b0e02a 100644 --- a/homeassistant/components/webostv/__init__.py +++ b/homeassistant/components/webostv/__init__.py @@ -4,6 +4,7 @@ from __future__ import annotations from contextlib import suppress import logging +from typing import NamedTuple from aiowebostv import WebOsClient, WebOsTvPairError import voluptuous as vol @@ -43,6 +44,14 @@ CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) CALL_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.comp_entity_ids}) + +class ServiceMethodDetails(NamedTuple): + """Details for SERVICE_TO_METHOD mapping.""" + + method: str + schema: vol.Schema + + BUTTON_SCHEMA = CALL_SCHEMA.extend({vol.Required(ATTR_BUTTON): cv.string}) COMMAND_SCHEMA = CALL_SCHEMA.extend( @@ -52,12 +61,14 @@ COMMAND_SCHEMA = CALL_SCHEMA.extend( SOUND_OUTPUT_SCHEMA = CALL_SCHEMA.extend({vol.Required(ATTR_SOUND_OUTPUT): cv.string}) SERVICE_TO_METHOD = { - SERVICE_BUTTON: {"method": "async_button", "schema": BUTTON_SCHEMA}, - SERVICE_COMMAND: {"method": "async_command", "schema": COMMAND_SCHEMA}, - SERVICE_SELECT_SOUND_OUTPUT: { - "method": "async_select_sound_output", - "schema": SOUND_OUTPUT_SCHEMA, - }, + SERVICE_BUTTON: ServiceMethodDetails(method="async_button", schema=BUTTON_SCHEMA), + SERVICE_COMMAND: ServiceMethodDetails( + method="async_command", schema=COMMAND_SCHEMA + ), + SERVICE_SELECT_SOUND_OUTPUT: ServiceMethodDetails( + method="async_select_sound_output", + schema=SOUND_OUTPUT_SCHEMA, + ), } _LOGGER = logging.getLogger(__name__) @@ -92,13 +103,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_service_handler(service: ServiceCall) -> None: method = SERVICE_TO_METHOD[service.service] data = service.data.copy() - data["method"] = method["method"] + data["method"] = method.method async_dispatcher_send(hass, DOMAIN, data) for service, method in SERVICE_TO_METHOD.items(): - schema = method["schema"] hass.services.async_register( - DOMAIN, service, async_service_handler, schema=schema + DOMAIN, service, async_service_handler, schema=method.schema ) hass.data[DOMAIN][DATA_CONFIG_ENTRY][entry.entry_id] = client diff --git a/homeassistant/components/webostv/manifest.json b/homeassistant/components/webostv/manifest.json index ed8e1a6cc6e..679bad9b9f5 100644 --- a/homeassistant/components/webostv/manifest.json +++ b/homeassistant/components/webostv/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_push", "loggers": ["aiowebostv"], "quality_scale": "platinum", - "requirements": ["aiowebostv==0.4.0"], + "requirements": ["aiowebostv==0.4.2"], "ssdp": [ { "st": "urn:lge-com:service:webos-second-screen:1" diff --git a/homeassistant/components/webostv/media_player.py b/homeassistant/components/webostv/media_player.py index 6aef47515db..099b5a73784 100644 --- a/homeassistant/components/webostv/media_player.py +++ b/homeassistant/components/webostv/media_player.py @@ -239,7 +239,8 @@ class LgWebOSMediaPlayerEntity(RestoreEntity, MediaPlayerEntity): self._attr_assumed_state = True if ( - self._client.media_state is not None + self._client.is_on + and self._client.media_state is not None and self._client.media_state.get("foregroundAppInfo") is not None ): self._attr_assumed_state = False diff --git a/homeassistant/components/websocket_api/__init__.py b/homeassistant/components/websocket_api/__init__.py index d8427bff10e..f9bc4396e01 100644 --- a/homeassistant/components/websocket_api/__init__.py +++ b/homeassistant/components/websocket_api/__init__.py @@ -4,11 +4,9 @@ from __future__ import annotations from typing import Final, cast -import voluptuous as vol - from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.loader import bind_hass from . import commands, connection, const, decorators, http, messages # noqa: F401 @@ -55,7 +53,7 @@ def async_register_command( hass: HomeAssistant, command_or_handler: str | const.WebSocketCommandHandler, handler: const.WebSocketCommandHandler | None = None, - schema: vol.Schema | None = None, + schema: VolSchemaType | None = None, ) -> None: """Register a websocket command.""" if handler is None: diff --git a/homeassistant/components/websocket_api/connection.py b/homeassistant/components/websocket_api/connection.py index ef70df4a123..6c0c6f0c587 100644 --- a/homeassistant/components/websocket_api/connection.py +++ b/homeassistant/components/websocket_api/connection.py @@ -223,7 +223,7 @@ class ActiveConnection: try: if schema is False: if len(msg) > 2: - raise vol.Invalid("extra keys not allowed") + raise vol.Invalid("extra keys not allowed") # noqa: TRY301 handler(self.hass, self, msg) else: handler(self.hass, self, schema(msg)) diff --git a/homeassistant/components/websocket_api/decorators.py b/homeassistant/components/websocket_api/decorators.py index 5131d02b4d3..2c8a6cc02f1 100644 --- a/homeassistant/components/websocket_api/decorators.py +++ b/homeassistant/components/websocket_api/decorators.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.const import HASSIO_USER_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import Unauthorized +from homeassistant.helpers.typing import VolDictType from . import const, messages from .connection import ActiveConnection @@ -130,7 +131,7 @@ def ws_require_user( def websocket_command( - schema: dict[vol.Marker, Any] | vol.All, + schema: VolDictType | vol.All, ) -> Callable[[const.WebSocketCommandHandler], const.WebSocketCommandHandler]: """Tag a function as a websocket command. @@ -144,7 +145,7 @@ def websocket_command( def decorate(func: const.WebSocketCommandHandler) -> const.WebSocketCommandHandler: """Decorate ws command function.""" - if is_dict and len(schema) == 1: # type only empty schema + if is_dict and len(schema) == 1: # type: ignore[arg-type] # type only empty schema func._ws_schema = False # type: ignore[attr-defined] # noqa: SLF001 elif is_dict: func._ws_schema = messages.BASE_COMMAND_MESSAGE_SCHEMA.extend(schema) # type: ignore[attr-defined] # noqa: SLF001 diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index c65c4c65988..8ed3469d7ed 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -339,11 +339,11 @@ class WebSocketHandler: raise Disconnect from err if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): - raise Disconnect + raise Disconnect # noqa: TRY301 if msg.type != WSMsgType.TEXT: disconnect_warn = "Received non-Text message." - raise Disconnect + raise Disconnect # noqa: TRY301 try: auth_msg_data = json_loads(msg.data) diff --git a/homeassistant/components/websocket_api/manifest.json b/homeassistant/components/websocket_api/manifest.json index 116bd0ccee8..315411ea4cf 100644 --- a/homeassistant/components/websocket_api/manifest.json +++ b/homeassistant/components/websocket_api/manifest.json @@ -1,7 +1,6 @@ { "domain": "websocket_api", "name": "Home Assistant WebSocket API", - "after_dependencies": ["recorder"], "codeowners": ["@home-assistant/core"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/websocket_api", diff --git a/homeassistant/components/websocket_api/strings.json b/homeassistant/components/websocket_api/strings.json index 10b95637b6b..afef732b8f5 100644 --- a/homeassistant/components/websocket_api/strings.json +++ b/homeassistant/components/websocket_api/strings.json @@ -1,7 +1,7 @@ { "exceptions": { "child_service_not_found": { - "message": "Service {domain}.{service} called service {child_domain}.{child_service} which was not found." + "message": "Action {domain}.{service} uses action {child_domain}.{child_service} which was not found." } } } diff --git a/homeassistant/components/wemo/entity.py b/homeassistant/components/wemo/entity.py index db64aa3137e..16ab3ae1173 100644 --- a/homeassistant/components/wemo/entity.py +++ b/homeassistant/components/wemo/entity.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator import contextlib import logging from pywemo.exceptions import ActionException -from typing_extensions import Generator from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity diff --git a/homeassistant/components/wemo/fan.py b/homeassistant/components/wemo/fan.py index 3ef8aa67a3d..f9d3270aaa0 100644 --- a/homeassistant/components/wemo/fan.py +++ b/homeassistant/components/wemo/fan.py @@ -14,6 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import VolDictType from homeassistant.util.percentage import ( percentage_to_ranged_value, ranged_value_to_percentage, @@ -37,7 +38,7 @@ ATTR_WATER_LEVEL = "water_level" SPEED_RANGE = (FanMode.Minimum, FanMode.Maximum) # off is not included -SET_HUMIDITY_SCHEMA = { +SET_HUMIDITY_SCHEMA: VolDictType = { vol.Required(ATTR_TARGET_HUMIDITY): vol.All( vol.Coerce(float), vol.Range(min=0, max=100) ), @@ -66,16 +67,21 @@ async def async_setup_entry( # This will call WemoHumidifier.reset_filter_life() platform.async_register_entity_service( - SERVICE_RESET_FILTER_LIFE, {}, WemoHumidifier.reset_filter_life.__name__ + SERVICE_RESET_FILTER_LIFE, None, WemoHumidifier.reset_filter_life.__name__ ) class WemoHumidifier(WemoBinaryStateEntity, FanEntity): """Representation of a WeMo humidifier.""" - _attr_supported_features = FanEntityFeature.SET_SPEED + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) wemo: Humidifier _last_fan_on_mode: FanMode + _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: DeviceCoordinator) -> None: """Initialize the WeMo switch.""" diff --git a/homeassistant/components/wiffi/binary_sensor.py b/homeassistant/components/wiffi/binary_sensor.py index 23aebd122f2..80088f373b4 100644 --- a/homeassistant/components/wiffi/binary_sensor.py +++ b/homeassistant/components/wiffi/binary_sensor.py @@ -17,7 +17,7 @@ async def async_setup_entry( ) -> None: """Set up platform for a new integration. - Called by the HA framework after async_forward_entry_setup has been called + Called by the HA framework after async_forward_entry_setups has been called during initialization of a new integration (= wiffi). """ diff --git a/homeassistant/components/wiffi/sensor.py b/homeassistant/components/wiffi/sensor.py index 7b64628085a..cf8cf8719c3 100644 --- a/homeassistant/components/wiffi/sensor.py +++ b/homeassistant/components/wiffi/sensor.py @@ -45,7 +45,7 @@ async def async_setup_entry( ) -> None: """Set up platform for a new integration. - Called by the HA framework after async_forward_entry_setup has been called + Called by the HA framework after async_forward_entry_setups has been called during initialization of a new integration (= wiffi). """ diff --git a/homeassistant/components/wilight/fan.py b/homeassistant/components/wilight/fan.py index 5c05575c4f8..71559658c35 100644 --- a/homeassistant/components/wilight/fan.py +++ b/homeassistant/components/wilight/fan.py @@ -57,7 +57,13 @@ class WiLightFan(WiLightDevice, FanEntity): _attr_name = None _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) - _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.DIRECTION + | FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + ) + _enable_turn_on_off_backwards_compatibility = False def __init__(self, api_device: PyWiLightDevice, index: str, item_name: str) -> None: """Initialize the device.""" diff --git a/homeassistant/components/wirelesstag/binary_sensor.py b/homeassistant/components/wirelesstag/binary_sensor.py index 85efab16e70..052f6547dd2 100644 --- a/homeassistant/components/wirelesstag/binary_sensor.py +++ b/homeassistant/components/wirelesstag/binary_sensor.py @@ -4,7 +4,10 @@ from __future__ import annotations import voluptuous as vol -from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity +from homeassistant.components.binary_sensor import ( + PLATFORM_SCHEMA as BINARY_SENSOR_PLATFORM_SCHEMA, + BinarySensorEntity, +) from homeassistant.const import CONF_MONITORED_CONDITIONS, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv @@ -65,7 +68,7 @@ SENSOR_TYPES = { } -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] diff --git a/homeassistant/components/wirelesstag/sensor.py b/homeassistant/components/wirelesstag/sensor.py index 0e88272a41c..87906bdc2ae 100644 --- a/homeassistant/components/wirelesstag/sensor.py +++ b/homeassistant/components/wirelesstag/sensor.py @@ -7,7 +7,7 @@ import logging import voluptuous as vol from homeassistant.components.sensor import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -65,7 +65,7 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = { SENSOR_KEYS: list[str] = list(SENSOR_TYPES) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All( cv.ensure_list, [vol.In(SENSOR_KEYS)] diff --git a/homeassistant/components/wirelesstag/switch.py b/homeassistant/components/wirelesstag/switch.py index 0eafea0699b..239461df4ea 100644 --- a/homeassistant/components/wirelesstag/switch.py +++ b/homeassistant/components/wirelesstag/switch.py @@ -7,7 +7,7 @@ from typing import Any import voluptuous as vol from homeassistant.components.switch import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA, SwitchEntity, SwitchEntityDescription, ) @@ -48,7 +48,7 @@ SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( SWITCH_KEYS: list[str] = [desc.key for desc in SWITCH_TYPES] -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( { vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All( cv.ensure_list, [vol.In(SWITCH_KEYS)] diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index 4c97f43fd80..090f8c4588e 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -9,5 +9,5 @@ "iot_class": "cloud_push", "loggers": ["aiowithings"], "quality_scale": "platinum", - "requirements": ["aiowithings==3.0.1"] + "requirements": ["aiowithings==3.0.2"] } diff --git a/homeassistant/components/wiz/__init__.py b/homeassistant/components/wiz/__init__.py index 79c317f178b..1bf3188e9e9 100644 --- a/homeassistant/components/wiz/__init__.py +++ b/homeassistant/components/wiz/__init__.py @@ -31,6 +31,8 @@ from .const import ( from .discovery import async_discover_devices, async_trigger_discovery from .models import WizData +type WizConfigEntry = ConfigEntry[WizData] + _LOGGER = logging.getLogger(__name__) PLATFORMS = [ @@ -135,9 +137,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await bulb.start_push(_async_push_update) bulb.set_discovery_callback(lambda bulb: async_trigger_discovery(hass, [bulb])) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = WizData( - coordinator=coordinator, bulb=bulb, scenes=scenes - ) + entry.runtime_data = WizData(coordinator=coordinator, bulb=bulb, scenes=scenes) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) @@ -147,6 +147,5 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - data: WizData = hass.data[DOMAIN].pop(entry.entry_id) - await data.bulb.async_close() + await entry.runtime_data.bulb.async_close() return unload_ok diff --git a/homeassistant/components/wiz/binary_sensor.py b/homeassistant/components/wiz/binary_sensor.py index b58e120a9dd..3411ee200b9 100644 --- a/homeassistant/components/wiz/binary_sensor.py +++ b/homeassistant/components/wiz/binary_sensor.py @@ -10,13 +10,13 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import WizConfigEntry from .const import DOMAIN, SIGNAL_WIZ_PIR from .entity import WizEntity from .models import WizData @@ -26,17 +26,16 @@ OCCUPANCY_UNIQUE_ID = "{}_occupancy" async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the WiZ binary sensor platform.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] - mac = wiz_data.bulb.mac + mac = entry.runtime_data.bulb.mac if er.async_get(hass).async_get_entity_id( Platform.BINARY_SENSOR, DOMAIN, OCCUPANCY_UNIQUE_ID.format(mac) ): - async_add_entities([WizOccupancyEntity(wiz_data, entry.title)]) + async_add_entities([WizOccupancyEntity(entry.runtime_data, entry.title)]) return cancel_dispatcher: Callable[[], None] | None = None @@ -47,7 +46,7 @@ async def async_setup_entry( assert cancel_dispatcher is not None cancel_dispatcher() cancel_dispatcher = None - async_add_entities([WizOccupancyEntity(wiz_data, entry.title)]) + async_add_entities([WizOccupancyEntity(entry.runtime_data, entry.title)]) cancel_dispatcher = async_dispatcher_connect( hass, SIGNAL_WIZ_PIR.format(mac), _async_add_occupancy_sensor diff --git a/homeassistant/components/wiz/diagnostics.py b/homeassistant/components/wiz/diagnostics.py index 5f617ebafe9..c58751c7fc0 100644 --- a/homeassistant/components/wiz/diagnostics.py +++ b/homeassistant/components/wiz/diagnostics.py @@ -5,24 +5,21 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .models import WizData +from . import WizConfigEntry TO_REDACT = {"roomId", "homeId"} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: WizConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] return { "entry": { "title": entry.title, "data": dict(entry.data), }, - "data": async_redact_data(wiz_data.bulb.diagnostics, TO_REDACT), + "data": async_redact_data(entry.runtime_data.bulb.diagnostics, TO_REDACT), } diff --git a/homeassistant/components/wiz/light.py b/homeassistant/components/wiz/light.py index aece184720d..a3f36d580d2 100644 --- a/homeassistant/components/wiz/light.py +++ b/homeassistant/components/wiz/light.py @@ -19,7 +19,6 @@ from homeassistant.components.light import ( LightEntityFeature, filter_supported_color_modes, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.color import ( @@ -27,7 +26,7 @@ from homeassistant.util.color import ( color_temperature_mired_to_kelvin, ) -from .const import DOMAIN +from . import WizConfigEntry from .entity import WizToggleEntity from .models import WizData @@ -61,13 +60,12 @@ def _async_pilot_builder(**kwargs: Any) -> PilotBuilder: async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the WiZ Platform from config_flow.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] - if wiz_data.bulb.bulbtype.bulb_type != BulbClass.SOCKET: - async_add_entities([WizBulbEntity(wiz_data, entry.title)]) + if entry.runtime_data.bulb.bulbtype.bulb_type != BulbClass.SOCKET: + async_add_entities([WizBulbEntity(entry.runtime_data, entry.title)]) class WizBulbEntity(WizToggleEntity, LightEntity): diff --git a/homeassistant/components/wiz/number.py b/homeassistant/components/wiz/number.py index 46708ac001e..0591e854d7d 100644 --- a/homeassistant/components/wiz/number.py +++ b/homeassistant/components/wiz/number.py @@ -13,12 +13,11 @@ from homeassistant.components.number import ( NumberEntityDescription, NumberMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import WizConfigEntry from .entity import WizEntity from .models import WizData @@ -68,15 +67,16 @@ NUMBERS: tuple[WizNumberEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the wiz speed number.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] async_add_entities( - WizSpeedNumber(wiz_data, entry.title, description) + WizSpeedNumber(entry.runtime_data, entry.title, description) for description in NUMBERS - if getattr(wiz_data.bulb.bulbtype.features, description.required_feature) + if getattr( + entry.runtime_data.bulb.bulbtype.features, description.required_feature + ) ) diff --git a/homeassistant/components/wiz/sensor.py b/homeassistant/components/wiz/sensor.py index aae443e60d0..eb77686a5cf 100644 --- a/homeassistant/components/wiz/sensor.py +++ b/homeassistant/components/wiz/sensor.py @@ -8,7 +8,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -17,7 +16,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import WizConfigEntry from .entity import WizEntity from .models import WizData @@ -45,18 +44,18 @@ POWER_SENSORS: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the wiz sensor.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] entities = [ - WizSensor(wiz_data, entry.title, description) for description in SENSORS + WizSensor(entry.runtime_data, entry.title, description) + for description in SENSORS ] - if wiz_data.coordinator.data is not None: + if entry.runtime_data.coordinator.data is not None: entities.extend( [ - WizPowerSensor(wiz_data, entry.title, description) + WizPowerSensor(entry.runtime_data, entry.title, description) for description in POWER_SENSORS ] ) diff --git a/homeassistant/components/wiz/switch.py b/homeassistant/components/wiz/switch.py index d94bf12da9f..4c089d2d6d2 100644 --- a/homeassistant/components/wiz/switch.py +++ b/homeassistant/components/wiz/switch.py @@ -8,24 +8,22 @@ from pywizlight import PilotBuilder from pywizlight.bulblibrary import BulbClass from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import WizConfigEntry from .entity import WizToggleEntity from .models import WizData async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the WiZ switch platform.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] - if wiz_data.bulb.bulbtype.bulb_type == BulbClass.SOCKET: - async_add_entities([WizSocketEntity(wiz_data, entry.title)]) + if entry.runtime_data.bulb.bulbtype.bulb_type == BulbClass.SOCKET: + async_add_entities([WizSocketEntity(entry.runtime_data, entry.title)]) class WizSocketEntity(WizToggleEntity, SwitchEntity): diff --git a/homeassistant/components/wled/__init__.py b/homeassistant/components/wled/__init__.py index ba87fb58122..b4834347694 100644 --- a/homeassistant/components/wled/__init__.py +++ b/homeassistant/components/wled/__init__.py @@ -5,9 +5,12 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey -from .const import LOGGER -from .coordinator import WLEDDataUpdateCoordinator +from .const import DOMAIN +from .coordinator import WLEDDataUpdateCoordinator, WLEDReleasesDataUpdateCoordinator PLATFORMS = ( Platform.BUTTON, @@ -21,23 +24,26 @@ PLATFORMS = ( type WLEDConfigEntry = ConfigEntry[WLEDDataUpdateCoordinator] +WLED_KEY: HassKey[WLEDReleasesDataUpdateCoordinator] = HassKey(DOMAIN) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the WLED integration. + + We set up a single coordinator for fetching WLED releases, which + is used across all WLED devices (and config entries) to avoid + fetching the same data multiple times for each. + """ + hass.data[WLED_KEY] = WLEDReleasesDataUpdateCoordinator(hass) + await hass.data[WLED_KEY].async_request_refresh() + return True + async def async_setup_entry(hass: HomeAssistant, entry: WLEDConfigEntry) -> bool: """Set up WLED from a config entry.""" - coordinator = WLEDDataUpdateCoordinator(hass, entry=entry) - await coordinator.async_config_entry_first_refresh() - - if coordinator.data.info.leds.cct: - LOGGER.error( - ( - "WLED device '%s' has a CCT channel, which is not supported by " - "this integration" - ), - entry.title, - ) - return False - - entry.runtime_data = coordinator + entry.runtime_data = WLEDDataUpdateCoordinator(hass, entry=entry) + await entry.runtime_data.async_config_entry_first_refresh() # Set up all platforms for this device/entry. await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/wled/config_flow.py b/homeassistant/components/wled/config_flow.py index c40753b686a..7853ad2101e 100644 --- a/homeassistant/components/wled/config_flow.py +++ b/homeassistant/components/wled/config_flow.py @@ -46,8 +46,6 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): except WLEDConnectionError: errors["base"] = "cannot_connect" else: - if device.info.leds.cct: - return self.async_abort(reason="cct_unsupported") await self.async_set_unique_id(device.info.mac_address) self._abort_if_unique_id_configured( updates={CONF_HOST: user_input[CONF_HOST]} @@ -84,9 +82,6 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): except WLEDConnectionError: return self.async_abort(reason="cannot_connect") - if self.discovered_device.info.leds.cct: - return self.async_abort(reason="cct_unsupported") - await self.async_set_unique_id(self.discovered_device.info.mac_address) self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host}) diff --git a/homeassistant/components/wled/const.py b/homeassistant/components/wled/const.py index f698347537c..69ff6ccb1fa 100644 --- a/homeassistant/components/wled/const.py +++ b/homeassistant/components/wled/const.py @@ -3,17 +3,23 @@ from datetime import timedelta import logging +from wled import LightCapability + +from homeassistant.components.light import ColorMode + # Integration domain DOMAIN = "wled" LOGGER = logging.getLogger(__package__) SCAN_INTERVAL = timedelta(seconds=10) +RELEASES_SCAN_INTERVAL = timedelta(hours=3) # Options CONF_KEEP_MAIN_LIGHT = "keep_master_light" DEFAULT_KEEP_MAIN_LIGHT = False # Attributes +ATTR_CCT = "cct" ATTR_COLOR_PRIMARY = "color_primary" ATTR_DURATION = "duration" ATTR_FADE = "fade" @@ -24,3 +30,76 @@ ATTR_SOFTWARE_VERSION = "sw_version" ATTR_SPEED = "speed" ATTR_TARGET_BRIGHTNESS = "target_brightness" ATTR_UDP_PORT = "udp_port" + +# Static values +COLOR_TEMP_K_MIN = 2000 +COLOR_TEMP_K_MAX = 6535 + + +LIGHT_CAPABILITIES_COLOR_MODE_MAPPING: dict[LightCapability, list[ColorMode]] = { + LightCapability.NONE: [ + ColorMode.ONOFF, + ], + LightCapability.RGB_COLOR: [ + ColorMode.RGB, + ], + LightCapability.WHITE_CHANNEL: [ + ColorMode.BRIGHTNESS, + ], + LightCapability.RGB_COLOR | LightCapability.WHITE_CHANNEL: [ + ColorMode.RGBW, + ], + LightCapability.COLOR_TEMPERATURE: [ + ColorMode.COLOR_TEMP, + ], + LightCapability.RGB_COLOR | LightCapability.COLOR_TEMPERATURE: [ + ColorMode.RGBWW, + ], + LightCapability.WHITE_CHANNEL | LightCapability.COLOR_TEMPERATURE: [ + ColorMode.COLOR_TEMP, + ], + LightCapability.RGB_COLOR + | LightCapability.WHITE_CHANNEL + | LightCapability.COLOR_TEMPERATURE: [ + ColorMode.COLOR_TEMP, + ColorMode.RGBW, + ], + LightCapability.MANUAL_WHITE: [ + ColorMode.BRIGHTNESS, + ], + LightCapability.RGB_COLOR | LightCapability.MANUAL_WHITE: [ + ColorMode.RGBW, + ], + LightCapability.WHITE_CHANNEL | LightCapability.MANUAL_WHITE: [ + ColorMode.BRIGHTNESS, + ], + LightCapability.RGB_COLOR + | LightCapability.WHITE_CHANNEL + | LightCapability.MANUAL_WHITE: [ + ColorMode.RGBW, + ColorMode.WHITE, + ], + LightCapability.COLOR_TEMPERATURE | LightCapability.MANUAL_WHITE: [ + ColorMode.COLOR_TEMP, + ColorMode.WHITE, + ], + LightCapability.RGB_COLOR + | LightCapability.COLOR_TEMPERATURE + | LightCapability.MANUAL_WHITE: [ + ColorMode.RGBW, + ColorMode.COLOR_TEMP, + ], + LightCapability.WHITE_CHANNEL + | LightCapability.COLOR_TEMPERATURE + | LightCapability.MANUAL_WHITE: [ + ColorMode.COLOR_TEMP, + ColorMode.WHITE, + ], + LightCapability.RGB_COLOR + | LightCapability.WHITE_CHANNEL + | LightCapability.COLOR_TEMPERATURE + | LightCapability.MANUAL_WHITE: [ + ColorMode.RGBW, + ColorMode.COLOR_TEMP, + ], +} diff --git a/homeassistant/components/wled/coordinator.py b/homeassistant/components/wled/coordinator.py index f6219c63cb8..cb39fde5e5a 100644 --- a/homeassistant/components/wled/coordinator.py +++ b/homeassistant/components/wled/coordinator.py @@ -2,7 +2,14 @@ from __future__ import annotations -from wled import WLED, Device as WLEDDevice, WLEDConnectionClosedError, WLEDError +from wled import ( + WLED, + Device as WLEDDevice, + Releases, + WLEDConnectionClosedError, + WLEDError, + WLEDReleases, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP @@ -15,6 +22,7 @@ from .const import ( DEFAULT_KEEP_MAIN_LIGHT, DOMAIN, LOGGER, + RELEASES_SCAN_INTERVAL, SCAN_INTERVAL, ) @@ -101,17 +109,37 @@ class WLEDDataUpdateCoordinator(DataUpdateCoordinator[WLEDDevice]): async def _async_update_data(self) -> WLEDDevice: """Fetch data from WLED.""" try: - device = await self.wled.update(full_update=not self.last_update_success) + device = await self.wled.update() except WLEDError as error: raise UpdateFailed(f"Invalid response from API: {error}") from error # If the device supports a WebSocket, try activating it. if ( device.info.websocket is not None - and device.info.leds.cct is not True and not self.wled.connected and not self.unsub ): self._use_websocket() return device + + +class WLEDReleasesDataUpdateCoordinator(DataUpdateCoordinator[Releases]): + """Class to manage fetching WLED releases.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize global WLED releases updater.""" + self.wled = WLEDReleases(session=async_get_clientsession(hass)) + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=RELEASES_SCAN_INTERVAL, + ) + + async def _async_update_data(self) -> Releases: + """Fetch release data from WLED.""" + try: + return await self.wled.releases() + except WLEDError as error: + raise UpdateFailed(f"Invalid response from GitHub API: {error}") from error diff --git a/homeassistant/components/wled/diagnostics.py b/homeassistant/components/wled/diagnostics.py index e81760e0f72..732cd3602a0 100644 --- a/homeassistant/components/wled/diagnostics.py +++ b/homeassistant/components/wled/diagnostics.py @@ -17,31 +17,23 @@ async def async_get_config_entry_diagnostics( coordinator = entry.runtime_data data: dict[str, Any] = { - "info": async_redact_data(coordinator.data.info.__dict__, "wifi"), - "state": coordinator.data.state.__dict__, + "info": async_redact_data(coordinator.data.info.to_dict(), "wifi"), + "state": coordinator.data.state.to_dict(), "effects": { - effect.effect_id: effect.name for effect in coordinator.data.effects + effect.effect_id: effect.name + for effect in coordinator.data.effects.values() }, "palettes": { - palette.palette_id: palette.name for palette in coordinator.data.palettes + palette.palette_id: palette.name + for palette in coordinator.data.palettes.values() }, "playlists": { - playlist.playlist_id: { - "name": playlist.name, - "repeat": playlist.repeat, - "shuffle": playlist.shuffle, - "end": playlist.end.preset_id if playlist.end else None, - } - for playlist in coordinator.data.playlists + playlist.playlist_id: playlist.name + for playlist in coordinator.data.playlists.values() }, "presets": { - preset.preset_id: { - "name": preset.name, - "quick_label": preset.quick_label, - "on": preset.on, - "transition": preset.transition, - } - for preset in coordinator.data.presets + preset.preset_id: preset.name + for preset in coordinator.data.presets.values() }, } return data diff --git a/homeassistant/components/wled/helpers.py b/homeassistant/components/wled/helpers.py index 0dd29fdc2a3..216dba67c94 100644 --- a/homeassistant/components/wled/helpers.py +++ b/homeassistant/components/wled/helpers.py @@ -35,3 +35,13 @@ def wled_exception_handler[_WLEDEntityT: WLEDEntity, **_P]( raise HomeAssistantError("Invalid response from WLED API") from error return handler + + +def kelvin_to_255(k: int, min_k: int, max_k: int) -> int: + """Map color temperature in K from minK-maxK to 0-255.""" + return int((k - min_k) / (max_k - min_k) * 255) + + +def kelvin_to_255_reverse(v: int, min_k: int, max_k: int) -> int: + """Map color temperature from 0-255 to minK-maxK K.""" + return int(v / 255 * (max_k - min_k) + min_k) diff --git a/homeassistant/components/wled/light.py b/homeassistant/components/wled/light.py index 36ebd024de3..b4edf10dc58 100644 --- a/homeassistant/components/wled/light.py +++ b/homeassistant/components/wled/light.py @@ -7,6 +7,7 @@ from typing import Any, cast from homeassistant.components.light import ( ATTR_BRIGHTNESS, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -19,10 +20,18 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import WLEDConfigEntry -from .const import ATTR_COLOR_PRIMARY, ATTR_ON, ATTR_SEGMENT_ID +from .const import ( + ATTR_CCT, + ATTR_COLOR_PRIMARY, + ATTR_ON, + ATTR_SEGMENT_ID, + COLOR_TEMP_K_MAX, + COLOR_TEMP_K_MIN, + LIGHT_CAPABILITIES_COLOR_MODE_MAPPING, +) from .coordinator import WLEDDataUpdateCoordinator from .entity import WLEDEntity -from .helpers import wled_exception_handler +from .helpers import kelvin_to_255, kelvin_to_255_reverse, wled_exception_handler PARALLEL_UPDATES = 1 @@ -104,6 +113,8 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): _attr_supported_features = LightEntityFeature.EFFECT | LightEntityFeature.TRANSITION _attr_translation_key = "segment" + _attr_min_color_temp_kelvin = COLOR_TEMP_K_MIN + _attr_max_color_temp_kelvin = COLOR_TEMP_K_MAX def __init__( self, @@ -112,8 +123,6 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): ) -> None: """Initialize WLED segment light.""" super().__init__(coordinator=coordinator) - self._rgbw = coordinator.data.info.leds.rgbw - self._wv = coordinator.data.info.leds.wv self._segment = segment # Segment 0 uses a simpler name, which is more natural for when using @@ -127,18 +136,24 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): f"{self.coordinator.data.info.mac_address}_{self._segment}" ) - self._attr_color_mode = ColorMode.RGB - self._attr_supported_color_modes = {ColorMode.RGB} - if self._rgbw and self._wv: - self._attr_color_mode = ColorMode.RGBW - self._attr_supported_color_modes = {ColorMode.RGBW} + if ( + coordinator.data.info.leds.segment_light_capabilities is not None + and ( + color_modes := LIGHT_CAPABILITIES_COLOR_MODE_MAPPING.get( + coordinator.data.info.leds.segment_light_capabilities[segment] + ) + ) + is not None + ): + self._attr_color_mode = color_modes[0] + self._attr_supported_color_modes = set(color_modes) @property def available(self) -> bool: """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except IndexError: + except KeyError: return False return super().available @@ -146,20 +161,29 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): @property def rgb_color(self) -> tuple[int, int, int] | None: """Return the color value.""" - return self.coordinator.data.state.segments[self._segment].color_primary[:3] + if not (color := self.coordinator.data.state.segments[self._segment].color): + return None + return color.primary[:3] @property def rgbw_color(self) -> tuple[int, int, int, int] | None: """Return the color value.""" - return cast( - tuple[int, int, int, int], - self.coordinator.data.state.segments[self._segment].color_primary, - ) + if not (color := self.coordinator.data.state.segments[self._segment].color): + return None + return cast(tuple[int, int, int, int], color.primary) + + @property + def color_temp_kelvin(self) -> int | None: + """Return the CT color value in K.""" + cct = self.coordinator.data.state.segments[self._segment].cct + return kelvin_to_255_reverse(cct, COLOR_TEMP_K_MIN, COLOR_TEMP_K_MAX) @property def effect(self) -> str | None: """Return the current effect of the light.""" - return self.coordinator.data.state.segments[self._segment].effect.name + return self.coordinator.data.effects[ + int(self.coordinator.data.state.segments[self._segment].effect_id) + ].name @property def brightness(self) -> int | None: @@ -178,7 +202,7 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): @property def effect_list(self) -> list[str]: """Return the list of supported effects.""" - return [effect.name for effect in self.coordinator.data.effects] + return [effect.name for effect in self.coordinator.data.effects.values()] @property def is_on(self) -> bool: @@ -223,6 +247,11 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): if ATTR_RGBW_COLOR in kwargs: data[ATTR_COLOR_PRIMARY] = kwargs[ATTR_RGBW_COLOR] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + data[ATTR_CCT] = kelvin_to_255( + kwargs[ATTR_COLOR_TEMP_KELVIN], COLOR_TEMP_K_MIN, COLOR_TEMP_K_MAX + ) + if ATTR_TRANSITION in kwargs: # WLED uses 100ms per unit, so 10 = 1 second. data[ATTR_TRANSITION] = round(kwargs[ATTR_TRANSITION] * 10) @@ -258,7 +287,11 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = {light.segment_id for light in coordinator.data.state.segments} + segment_ids = { + light.segment_id + for light in coordinator.data.state.segments.values() + if light.segment_id is not None + } new_entities: list[WLEDMainLight | WLEDSegmentLight] = [] # More than 1 segment now? No main? Add main controls diff --git a/homeassistant/components/wled/manifest.json b/homeassistant/components/wled/manifest.json index a01bbcabdd6..71939127356 100644 --- a/homeassistant/components/wled/manifest.json +++ b/homeassistant/components/wled/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "quality_scale": "platinum", - "requirements": ["wled==0.18.0"], + "requirements": ["wled==0.20.2"], "zeroconf": ["_wled._tcp.local."] } diff --git a/homeassistant/components/wled/number.py b/homeassistant/components/wled/number.py index 5af466360bb..225d783bfdb 100644 --- a/homeassistant/components/wled/number.py +++ b/homeassistant/components/wled/number.py @@ -44,7 +44,7 @@ async def async_setup_entry( class WLEDNumberEntityDescription(NumberEntityDescription): """Class describing WLED number entities.""" - value_fn: Callable[[Segment], float | None] + value_fn: Callable[[Segment], int | None] NUMBERS = [ @@ -64,7 +64,7 @@ NUMBERS = [ native_step=1, native_min_value=0, native_max_value=255, - value_fn=lambda segment: segment.intensity, + value_fn=lambda segment: int(segment.intensity), ), ] @@ -100,7 +100,7 @@ class WLEDNumber(WLEDEntity, NumberEntity): """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except IndexError: + except KeyError: return False return super().available @@ -133,7 +133,11 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = {segment.segment_id for segment in coordinator.data.state.segments} + segment_ids = { + segment.segment_id + for segment in coordinator.data.state.segments.values() + if segment.segment_id is not None + } new_entities: list[WLEDNumber] = [] diff --git a/homeassistant/components/wled/select.py b/homeassistant/components/wled/select.py index 20b14531ac7..a645b04573c 100644 --- a/homeassistant/components/wled/select.py +++ b/homeassistant/components/wled/select.py @@ -4,7 +4,7 @@ from __future__ import annotations from functools import partial -from wled import Live, Playlist, Preset +from wled import LiveDataOverride from homeassistant.components.select import SelectEntity from homeassistant.const import EntityCategory @@ -56,17 +56,17 @@ class WLEDLiveOverrideSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_live_override" - self._attr_options = [str(live.value) for live in Live] + self._attr_options = [str(live.value) for live in LiveDataOverride] @property def current_option(self) -> str: """Return the current selected live override.""" - return str(self.coordinator.data.state.lor.value) + return str(self.coordinator.data.state.live_data_override.value) @wled_exception_handler async def async_select_option(self, option: str) -> None: """Set WLED state to the selected live override state.""" - await self.coordinator.wled.live(live=Live(int(option))) + await self.coordinator.wled.live(live=LiveDataOverride(int(option))) class WLEDPresetSelect(WLEDEntity, SelectEntity): @@ -79,7 +79,9 @@ class WLEDPresetSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_preset" - self._attr_options = [preset.name for preset in self.coordinator.data.presets] + self._attr_options = [ + preset.name for preset in self.coordinator.data.presets.values() + ] @property def available(self) -> bool: @@ -89,9 +91,13 @@ class WLEDPresetSelect(WLEDEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the current selected preset.""" - if not isinstance(self.coordinator.data.state.preset, Preset): + if not self.coordinator.data.state.preset_id: return None - return self.coordinator.data.state.preset.name + if preset := self.coordinator.data.presets.get( + self.coordinator.data.state.preset_id + ): + return preset.name + return None @wled_exception_handler async def async_select_option(self, option: str) -> None: @@ -110,7 +116,7 @@ class WLEDPlaylistSelect(WLEDEntity, SelectEntity): self._attr_unique_id = f"{coordinator.data.info.mac_address}_playlist" self._attr_options = [ - playlist.name for playlist in self.coordinator.data.playlists + playlist.name for playlist in self.coordinator.data.playlists.values() ] @property @@ -121,9 +127,13 @@ class WLEDPlaylistSelect(WLEDEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the currently selected playlist.""" - if not isinstance(self.coordinator.data.state.playlist, Playlist): + if not self.coordinator.data.state.playlist_id: return None - return self.coordinator.data.state.playlist.name + if playlist := self.coordinator.data.playlists.get( + self.coordinator.data.state.playlist_id + ): + return playlist.name + return None @wled_exception_handler async def async_select_option(self, option: str) -> None: @@ -150,7 +160,7 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): self._attr_unique_id = f"{coordinator.data.info.mac_address}_palette_{segment}" self._attr_options = [ - palette.name for palette in self.coordinator.data.palettes + palette.name for palette in self.coordinator.data.palettes.values() ] self._segment = segment @@ -159,7 +169,7 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except IndexError: + except KeyError: return False return super().available @@ -167,7 +177,9 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the current selected color palette.""" - return self.coordinator.data.state.segments[self._segment].palette.name + return self.coordinator.data.palettes[ + int(self.coordinator.data.state.segments[self._segment].palette_id) + ].name @wled_exception_handler async def async_select_option(self, option: str) -> None: @@ -182,7 +194,11 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = {segment.segment_id for segment in coordinator.data.state.segments} + segment_ids = { + segment.segment_id + for segment in coordinator.data.state.segments.values() + if segment.segment_id is not None + } new_entities: list[WLEDPaletteSelect] = [] diff --git a/homeassistant/components/wled/sensor.py b/homeassistant/components/wled/sensor.py index 7d18665a085..4f97c367612 100644 --- a/homeassistant/components/wled/sensor.py +++ b/homeassistant/components/wled/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from datetime import datetime, timedelta +from datetime import datetime from wled import Device as WLEDDevice @@ -71,7 +71,7 @@ SENSORS: tuple[WLEDSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, - value_fn=lambda device: (utcnow() - timedelta(seconds=device.info.uptime)), + value_fn=lambda device: (utcnow() - device.info.uptime), ), WLEDSensorEntityDescription( key="free_heap", diff --git a/homeassistant/components/wled/strings.json b/homeassistant/components/wled/strings.json index 9581641f545..50dc0129369 100644 --- a/homeassistant/components/wled/strings.json +++ b/homeassistant/components/wled/strings.json @@ -21,8 +21,7 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "cct_unsupported": "This WLED device uses CCT channels, which is not supported by this integration" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "options": { diff --git a/homeassistant/components/wled/switch.py b/homeassistant/components/wled/switch.py index 7ec75b956c0..643834dcdec 100644 --- a/homeassistant/components/wled/switch.py +++ b/homeassistant/components/wled/switch.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import WLEDConfigEntry -from .const import ATTR_DURATION, ATTR_FADE, ATTR_TARGET_BRIGHTNESS, ATTR_UDP_PORT +from .const import ATTR_DURATION, ATTR_TARGET_BRIGHTNESS, ATTR_UDP_PORT from .coordinator import WLEDDataUpdateCoordinator from .entity import WLEDEntity from .helpers import wled_exception_handler @@ -62,7 +62,6 @@ class WLEDNightlightSwitch(WLEDEntity, SwitchEntity): state = self.coordinator.data.state return { ATTR_DURATION: state.nightlight.duration, - ATTR_FADE: state.nightlight.fade, ATTR_TARGET_BRIGHTNESS: state.nightlight.target_brightness, } @@ -171,7 +170,7 @@ class WLEDReverseSwitch(WLEDEntity, SwitchEntity): """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except IndexError: + except KeyError: return False return super().available @@ -199,7 +198,11 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = {segment.segment_id for segment in coordinator.data.state.segments} + segment_ids = { + segment.segment_id + for segment in coordinator.data.state.segments.values() + if segment.segment_id is not None + } new_entities: list[WLEDReverseSwitch] = [] diff --git a/homeassistant/components/wled/update.py b/homeassistant/components/wled/update.py index 05df5fcf54f..384b394ac50 100644 --- a/homeassistant/components/wled/update.py +++ b/homeassistant/components/wled/update.py @@ -12,8 +12,8 @@ from homeassistant.components.update import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WLEDConfigEntry -from .coordinator import WLEDDataUpdateCoordinator +from . import WLED_KEY, WLEDConfigEntry +from .coordinator import WLEDDataUpdateCoordinator, WLEDReleasesDataUpdateCoordinator from .entity import WLEDEntity from .helpers import wled_exception_handler @@ -24,7 +24,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up WLED update based on a config entry.""" - async_add_entities([WLEDUpdateEntity(entry.runtime_data)]) + async_add_entities([WLEDUpdateEntity(entry.runtime_data, hass.data[WLED_KEY])]) class WLEDUpdateEntity(WLEDEntity, UpdateEntity): @@ -36,11 +36,33 @@ class WLEDUpdateEntity(WLEDEntity, UpdateEntity): ) _attr_title = "WLED" - def __init__(self, coordinator: WLEDDataUpdateCoordinator) -> None: + def __init__( + self, + coordinator: WLEDDataUpdateCoordinator, + releases_coordinator: WLEDReleasesDataUpdateCoordinator, + ) -> None: """Initialize the update entity.""" super().__init__(coordinator=coordinator) + self.releases_coordinator = releases_coordinator self._attr_unique_id = coordinator.data.info.mac_address + async def async_added_to_hass(self) -> None: + """When entity is added to hass. + + Register extra update listener for the releases coordinator. + """ + await super().async_added_to_hass() + self.async_on_remove( + self.releases_coordinator.async_add_listener( + self._handle_coordinator_update + ) + ) + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.releases_coordinator.last_update_success + @property def installed_version(self) -> str | None: """Version currently installed and in use.""" @@ -54,17 +76,17 @@ class WLEDUpdateEntity(WLEDEntity, UpdateEntity): # If we already run a pre-release, we consider being on the beta channel. # Offer beta version upgrade, unless stable is newer if ( - (beta := self.coordinator.data.info.version_latest_beta) is not None + (beta := self.releases_coordinator.data.beta) is not None and (current := self.coordinator.data.info.version) is not None and (current.alpha or current.beta or current.release_candidate) and ( - (stable := self.coordinator.data.info.version_latest_stable) is None - or (stable is not None and stable < beta) + (stable := self.releases_coordinator.data.stable) is None + or (stable is not None and stable < beta and current > stable) ) ): return str(beta) - if (stable := self.coordinator.data.info.version_latest_stable) is not None: + if (stable := self.releases_coordinator.data.stable) is not None: return str(stable) return None diff --git a/homeassistant/components/wolflink/manifest.json b/homeassistant/components/wolflink/manifest.json index e406217a0c8..6a98dcd6ca4 100644 --- a/homeassistant/components/wolflink/manifest.json +++ b/homeassistant/components/wolflink/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/wolflink", "iot_class": "cloud_polling", "loggers": ["wolf_comm"], - "requirements": ["wolf-comm==0.0.8"] + "requirements": ["wolf-comm==0.0.9"] } diff --git a/homeassistant/components/workday/binary_sensor.py b/homeassistant/components/workday/binary_sensor.py index 5df8e6c3d75..4635b2209a6 100644 --- a/homeassistant/components/workday/binary_sensor.py +++ b/homeassistant/components/workday/binary_sensor.py @@ -6,6 +6,7 @@ from datetime import date, datetime, timedelta from typing import Final from holidays import ( + PUBLIC, HolidayBase, __version__ as python_holidays_version, country_holidays, @@ -35,6 +36,7 @@ from homeassistant.util import dt as dt_util, slugify from .const import ( ALLOWED_DAYS, CONF_ADD_HOLIDAYS, + CONF_CATEGORY, CONF_EXCLUDES, CONF_OFFSET, CONF_PROVINCE, @@ -69,17 +71,28 @@ def validate_dates(holiday_list: list[str]) -> list[str]: def _get_obj_holidays( - country: str | None, province: str | None, year: int, language: str | None + country: str | None, + province: str | None, + year: int, + language: str | None, + categories: list[str] | None, ) -> HolidayBase: """Get the object for the requested country and year.""" if not country: return HolidayBase() + set_categories = None + if categories: + category_list = [PUBLIC] + category_list.extend(categories) + set_categories = tuple(category_list) + obj_holidays: HolidayBase = country_holidays( country, subdiv=province, years=year, language=language, + categories=set_categories, # type: ignore[arg-type] ) if (supported_languages := obj_holidays.supported_languages) and language == "en": for lang in supported_languages: @@ -89,6 +102,7 @@ def _get_obj_holidays( subdiv=province, years=year, language=lang, + categories=set_categories, # type: ignore[arg-type] ) LOGGER.debug("Changing language from %s to %s", language, lang) return obj_holidays @@ -107,10 +121,11 @@ async def async_setup_entry( sensor_name: str = entry.options[CONF_NAME] workdays: list[str] = entry.options[CONF_WORKDAYS] language: str | None = entry.options.get(CONF_LANGUAGE) + categories: list[str] | None = entry.options.get(CONF_CATEGORY) year: int = (dt_util.now() + timedelta(days=days_offset)).year obj_holidays: HolidayBase = await hass.async_add_executor_job( - _get_obj_holidays, country, province, year, language + _get_obj_holidays, country, province, year, language, categories ) calc_add_holidays: list[str] = validate_dates(add_holidays) calc_remove_holidays: list[str] = validate_dates(remove_holidays) diff --git a/homeassistant/components/workday/config_flow.py b/homeassistant/components/workday/config_flow.py index a66a9c51588..ebbc8fb0b99 100644 --- a/homeassistant/components/workday/config_flow.py +++ b/homeassistant/components/workday/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from functools import partial from typing import Any -from holidays import HolidayBase, country_holidays, list_supported_countries +from holidays import PUBLIC, HolidayBase, country_holidays, list_supported_countries import voluptuous as vol from homeassistant.config_entries import ( @@ -36,6 +36,7 @@ from homeassistant.util import dt as dt_util from .const import ( ALLOWED_DAYS, CONF_ADD_HOLIDAYS, + CONF_CATEGORY, CONF_EXCLUDES, CONF_OFFSET, CONF_PROVINCE, @@ -86,7 +87,29 @@ def add_province_and_language_to_schema( ), } - return vol.Schema({**DATA_SCHEMA_OPT.schema, **language_schema, **province_schema}) + category_schema = {} + # PUBLIC will always be included and can therefore not be set/removed + _categories = [x for x in _country.supported_categories if x != PUBLIC] + if _categories: + category_schema = { + vol.Optional(CONF_CATEGORY): SelectSelector( + SelectSelectorConfig( + options=_categories, + mode=SelectSelectorMode.DROPDOWN, + multiple=True, + translation_key=CONF_CATEGORY, + ) + ), + } + + return vol.Schema( + { + **DATA_SCHEMA_OPT.schema, + **language_schema, + **province_schema, + **category_schema, + } + ) def _is_valid_date_range(check_date: str, error: type[HomeAssistantError]) -> bool: @@ -256,6 +279,8 @@ class WorkdayConfigFlow(ConfigFlow, domain=DOMAIN): CONF_REMOVE_HOLIDAYS: combined_input[CONF_REMOVE_HOLIDAYS], CONF_PROVINCE: combined_input.get(CONF_PROVINCE), } + if CONF_CATEGORY in combined_input: + abort_match[CONF_CATEGORY] = combined_input[CONF_CATEGORY] LOGGER.debug("abort_check in options with %s", combined_input) self._async_abort_entries_match(abort_match) @@ -314,18 +339,19 @@ class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): errors["remove_holidays"] = "remove_holiday_range_error" else: LOGGER.debug("abort_check in options with %s", combined_input) + abort_match = { + CONF_COUNTRY: self._config_entry.options.get(CONF_COUNTRY), + CONF_EXCLUDES: combined_input[CONF_EXCLUDES], + CONF_OFFSET: combined_input[CONF_OFFSET], + CONF_WORKDAYS: combined_input[CONF_WORKDAYS], + CONF_ADD_HOLIDAYS: combined_input[CONF_ADD_HOLIDAYS], + CONF_REMOVE_HOLIDAYS: combined_input[CONF_REMOVE_HOLIDAYS], + CONF_PROVINCE: combined_input.get(CONF_PROVINCE), + } + if CONF_CATEGORY in combined_input: + abort_match[CONF_CATEGORY] = combined_input[CONF_CATEGORY] try: - self._async_abort_entries_match( - { - CONF_COUNTRY: self._config_entry.options.get(CONF_COUNTRY), - CONF_EXCLUDES: combined_input[CONF_EXCLUDES], - CONF_OFFSET: combined_input[CONF_OFFSET], - CONF_WORKDAYS: combined_input[CONF_WORKDAYS], - CONF_ADD_HOLIDAYS: combined_input[CONF_ADD_HOLIDAYS], - CONF_REMOVE_HOLIDAYS: combined_input[CONF_REMOVE_HOLIDAYS], - CONF_PROVINCE: combined_input.get(CONF_PROVINCE), - } - ) + self._async_abort_entries_match(abort_match) except AbortFlow as err: errors = {"base": err.reason} else: diff --git a/homeassistant/components/workday/const.py b/homeassistant/components/workday/const.py index 6a46f1e824b..76580ae642f 100644 --- a/homeassistant/components/workday/const.py +++ b/homeassistant/components/workday/const.py @@ -19,6 +19,7 @@ CONF_EXCLUDES = "excludes" CONF_OFFSET = "days_offset" CONF_ADD_HOLIDAYS = "add_holidays" CONF_REMOVE_HOLIDAYS = "remove_holidays" +CONF_CATEGORY = "category" # By default, Monday - Friday are workdays DEFAULT_WORKDAYS = ["mon", "tue", "wed", "thu", "fri"] diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 1148f46e2d1..69df8080fa5 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.51"] + "requirements": ["holidays==0.53"] } diff --git a/homeassistant/components/workday/strings.json b/homeassistant/components/workday/strings.json index 0e618beaf82..f3b966e28ea 100644 --- a/homeassistant/components/workday/strings.json +++ b/homeassistant/components/workday/strings.json @@ -20,7 +20,8 @@ "add_holidays": "Add holidays", "remove_holidays": "Remove Holidays", "province": "Subdivision of country", - "language": "Language for named holidays" + "language": "Language for named holidays", + "category": "Additional category as holiday" }, "data_description": { "excludes": "List of workdays to exclude, notice the keyword `holiday` and read the documentation on how to use it correctly", @@ -29,7 +30,8 @@ "add_holidays": "Add custom holidays as YYYY-MM-DD or as range using `,` as separator", "remove_holidays": "Remove holidays as YYYY-MM-DD, as range using `,` as separator or by using partial of name", "province": "State, territory, province or region of country", - "language": "Language to use when configuring named holiday exclusions" + "language": "Language to use when configuring named holiday exclusions", + "category": "Select additional categories to include as holidays" } } }, @@ -51,7 +53,8 @@ "add_holidays": "[%key:component::workday::config::step::options::data::add_holidays%]", "remove_holidays": "[%key:component::workday::config::step::options::data::remove_holidays%]", "province": "[%key:component::workday::config::step::options::data::province%]", - "language": "[%key:component::workday::config::step::options::data::language%]" + "language": "[%key:component::workday::config::step::options::data::language%]", + "category": "[%key:component::workday::config::step::options::data::category%]" }, "data_description": { "excludes": "[%key:component::workday::config::step::options::data_description::excludes%]", @@ -60,7 +63,8 @@ "add_holidays": "[%key:component::workday::config::step::options::data_description::add_holidays%]", "remove_holidays": "[%key:component::workday::config::step::options::data_description::remove_holidays%]", "province": "[%key:component::workday::config::step::options::data_description::province%]", - "language": "[%key:component::workday::config::step::options::data_description::language%]" + "language": "[%key:component::workday::config::step::options::data_description::language%]", + "category": "[%key:component::workday::config::step::options::data_description::category%]" } } }, @@ -78,6 +82,24 @@ "none": "No subdivision" } }, + "category": { + "options": { + "armed_forces": "Armed forces", + "bank": "Bank", + "government": "Government", + "half_day": "Half day", + "optional": "Optional", + "public": "Public", + "school": "School", + "unofficial": "Unofficial", + "workday": "Workday", + "chinese": "Chinese", + "christian": "Christian", + "hebrew": "Hebrew", + "hindu": "Hindu", + "islamic": "Islamic" + } + }, "days": { "options": { "mon": "[%key:common::time::monday%]", diff --git a/homeassistant/components/worldclock/__init__.py b/homeassistant/components/worldclock/__init__.py index 978eaac8968..ad01c45917a 100644 --- a/homeassistant/components/worldclock/__init__.py +++ b/homeassistant/components/worldclock/__init__.py @@ -1 +1,25 @@ """The worldclock component.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from .const import PLATFORMS + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Worldclock from a config entry.""" + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload World clock config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/worldclock/config_flow.py b/homeassistant/components/worldclock/config_flow.py new file mode 100644 index 00000000000..a9598c049aa --- /dev/null +++ b/homeassistant/components/worldclock/config_flow.py @@ -0,0 +1,107 @@ +"""Config flow for World clock.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, cast +import zoneinfo + +import voluptuous as vol + +from homeassistant.const import CONF_NAME, CONF_TIME_ZONE +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, + TextSelector, +) + +from .const import CONF_TIME_FORMAT, DEFAULT_NAME, DEFAULT_TIME_STR_FORMAT, DOMAIN + +TIME_STR_OPTIONS = [ + SelectOptionDict( + value=DEFAULT_TIME_STR_FORMAT, label=f"14:05 ({DEFAULT_TIME_STR_FORMAT})" + ), + SelectOptionDict(value="%I:%M %p", label="11:05 am (%I:%M %p)"), + SelectOptionDict(value="%Y-%m-%d %H:%M", label="2024-01-01 14:05 (%Y-%m-%d %H:%M)"), + SelectOptionDict( + value="%a, %b %d, %Y %I:%M %p", + label="Monday, Jan 01, 2024 11:05 am (%a, %b %d, %Y %I:%M %p)", + ), +] + + +async def validate_duplicate( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate already existing entry.""" + handler.parent_handler._async_abort_entries_match({**handler.options, **user_input}) # noqa: SLF001 + + return user_input + + +async def get_schema(handler: SchemaCommonFlowHandler) -> vol.Schema: + """Get available timezones.""" + get_timezones: list[str] = list( + await handler.parent_handler.hass.async_add_executor_job( + zoneinfo.available_timezones + ) + ) + return vol.Schema( + { + vol.Required(CONF_NAME, default=DEFAULT_NAME): TextSelector(), + vol.Required(CONF_TIME_ZONE): SelectSelector( + SelectSelectorConfig( + options=get_timezones, mode=SelectSelectorMode.DROPDOWN, sort=True + ) + ), + } + ).extend(DATA_SCHEMA_OPTIONS.schema) + + +DATA_SCHEMA_OPTIONS = vol.Schema( + { + vol.Optional(CONF_TIME_FORMAT, default=DEFAULT_TIME_STR_FORMAT): SelectSelector( + SelectSelectorConfig( + options=TIME_STR_OPTIONS, + custom_value=True, + mode=SelectSelectorMode.DROPDOWN, + ) + ) + } +) + + +CONFIG_FLOW = { + "user": SchemaFlowFormStep( + schema=get_schema, + validate_user_input=validate_duplicate, + ), + "import": SchemaFlowFormStep( + schema=get_schema, + validate_user_input=validate_duplicate, + ), +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep( + DATA_SCHEMA_OPTIONS, + validate_user_input=validate_duplicate, + ) +} + + +class WorldclockConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Worldclock.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + return cast(str, options[CONF_NAME]) diff --git a/homeassistant/components/worldclock/const.py b/homeassistant/components/worldclock/const.py new file mode 100644 index 00000000000..fafa3dbc52f --- /dev/null +++ b/homeassistant/components/worldclock/const.py @@ -0,0 +1,11 @@ +"""Constants for world clock component.""" + +from homeassistant.const import Platform + +DOMAIN = "worldclock" +PLATFORMS = [Platform.SENSOR] + +CONF_TIME_FORMAT = "time_format" + +DEFAULT_NAME = "Worldclock Sensor" +DEFAULT_TIME_STR_FORMAT = "%H:%M" diff --git a/homeassistant/components/worldclock/manifest.json b/homeassistant/components/worldclock/manifest.json index 61600e4f924..bc7ee3cd939 100644 --- a/homeassistant/components/worldclock/manifest.json +++ b/homeassistant/components/worldclock/manifest.json @@ -2,6 +2,7 @@ "domain": "worldclock", "name": "Worldclock", "codeowners": ["@fabaff"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/worldclock", "iot_class": "local_push", "quality_scale": "internal" diff --git a/homeassistant/components/worldclock/sensor.py b/homeassistant/components/worldclock/sensor.py index 9b2cb600ac1..f4879ca08c4 100644 --- a/homeassistant/components/worldclock/sensor.py +++ b/homeassistant/components/worldclock/sensor.py @@ -6,20 +6,23 @@ from datetime import tzinfo import voluptuous as vol -from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity +from homeassistant.components.sensor import ( + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorEntity, +) +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_NAME, CONF_TIME_ZONE -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util -CONF_TIME_FORMAT = "time_format" +from .const import CONF_TIME_FORMAT, DEFAULT_NAME, DEFAULT_TIME_STR_FORMAT, DOMAIN -DEFAULT_NAME = "Worldclock Sensor" -DEFAULT_TIME_STR_FORMAT = "%H:%M" - -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_TIME_ZONE): cv.time_zone, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, @@ -35,13 +38,44 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the World clock sensor.""" - time_zone = dt_util.get_time_zone(config[CONF_TIME_ZONE]) + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config, + ) + ) + + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.2.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Worldclock", + }, + ) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the World clock sensor entry.""" + time_zone = await dt_util.async_get_time_zone(entry.options[CONF_TIME_ZONE]) async_add_entities( [ WorldClockSensor( time_zone, - config[CONF_NAME], - config[CONF_TIME_FORMAT], + entry.options[CONF_NAME], + entry.options[CONF_TIME_FORMAT], + entry.entry_id, ) ], True, @@ -52,12 +86,22 @@ class WorldClockSensor(SensorEntity): """Representation of a World clock sensor.""" _attr_icon = "mdi:clock" + _attr_has_entity_name = True + _attr_name = None - def __init__(self, time_zone: tzinfo | None, name: str, time_format: str) -> None: + def __init__( + self, time_zone: tzinfo | None, name: str, time_format: str, unique_id: str + ) -> None: """Initialize the sensor.""" - self._attr_name = name self._time_zone = time_zone self._time_format = time_format + self._attr_unique_id = unique_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + name=name, + entry_type=DeviceEntryType.SERVICE, + manufacturer="Worldclock", + ) async def async_update(self) -> None: """Get the time and updates the states.""" diff --git a/homeassistant/components/worldclock/strings.json b/homeassistant/components/worldclock/strings.json new file mode 100644 index 00000000000..2f6b8d67a7c --- /dev/null +++ b/homeassistant/components/worldclock/strings.json @@ -0,0 +1,35 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "step": { + "user": { + "data": { + "name": "[%key:common::config_flow::data::name%]", + "time_zone": "Timezone", + "time_format": "Time format" + }, + "data_description": { + "time_zone": "Select timezone from list", + "time_format": "Select a pre-defined format from the list or define your own format." + } + } + } + }, + "options": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "step": { + "init": { + "data": { + "time_format": "[%key:component::worldclock::config::step::user::data::time_format%]" + }, + "data_description": { + "time_format": "[%key:component::worldclock::config::step::user::data_description::time_format%]" + } + } + } + } +} diff --git a/homeassistant/components/worldtidesinfo/sensor.py b/homeassistant/components/worldtidesinfo/sensor.py index a4d663cc184..45f39894abb 100644 --- a/homeassistant/components/worldtidesinfo/sensor.py +++ b/homeassistant/components/worldtidesinfo/sensor.py @@ -9,7 +9,10 @@ import time import requests import voluptuous as vol -from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity +from homeassistant.components.sensor import ( + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorEntity, +) from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv @@ -24,7 +27,7 @@ DEFAULT_NAME = "WorldTidesInfo" SCAN_INTERVAL = timedelta(seconds=3600) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LATITUDE): cv.latitude, diff --git a/homeassistant/components/worxlandroid/sensor.py b/homeassistant/components/worxlandroid/sensor.py index 10f40bea685..50700b78f35 100644 --- a/homeassistant/components/worxlandroid/sensor.py +++ b/homeassistant/components/worxlandroid/sensor.py @@ -8,7 +8,10 @@ import logging import aiohttp import voluptuous as vol -from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity +from homeassistant.components.sensor import ( + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorEntity, +) from homeassistant.const import CONF_HOST, CONF_PIN, CONF_TIMEOUT, PERCENTAGE from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -22,7 +25,7 @@ CONF_ALLOW_UNREACHABLE = "allow_unreachable" DEFAULT_TIMEOUT = 5 -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PIN): vol.All(vol.Coerce(str), vol.Match(r"\d{4}")), diff --git a/homeassistant/components/wsdot/sensor.py b/homeassistant/components/wsdot/sensor.py index 14e21f79282..73714b75c95 100644 --- a/homeassistant/components/wsdot/sensor.py +++ b/homeassistant/components/wsdot/sensor.py @@ -6,11 +6,15 @@ from datetime import datetime, timedelta, timezone from http import HTTPStatus import logging import re +from typing import Any import requests import voluptuous as vol -from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity +from homeassistant.components.sensor import ( + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorEntity, +) from homeassistant.const import ATTR_NAME, CONF_API_KEY, CONF_ID, CONF_NAME, UnitOfTime from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv @@ -39,7 +43,7 @@ RESOURCE = ( SCAN_INTERVAL = timedelta(minutes=3) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_TRAVEL_TIMES): [ @@ -122,7 +126,7 @@ class WashingtonStateTravelTimeSensor(WashingtonStateTransportSensor): self._state = self._data.get(ATTR_CURRENT_TIME) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return other details about the sensor state.""" if self._data is not None: attrs = {} @@ -137,6 +141,7 @@ class WashingtonStateTravelTimeSensor(WashingtonStateTransportSensor): self._data.get(ATTR_TIME_UPDATED) ) return attrs + return None def _parse_wsdot_timestamp(timestamp): diff --git a/homeassistant/components/wyoming/data.py b/homeassistant/components/wyoming/data.py index e333a740741..1ee0f24f805 100644 --- a/homeassistant/components/wyoming/data.py +++ b/homeassistant/components/wyoming/data.py @@ -100,7 +100,7 @@ async def load_wyoming_info( while True: event = await client.read_event() if event is None: - raise WyomingError( + raise WyomingError( # noqa: TRY301 "Connection closed unexpectedly", ) diff --git a/homeassistant/components/wyoming/devices.py b/homeassistant/components/wyoming/devices.py index 2ca66f3b21a..2e00b31fd34 100644 --- a/homeassistant/components/wyoming/devices.py +++ b/homeassistant/components/wyoming/devices.py @@ -5,6 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er @@ -23,6 +24,7 @@ class SatelliteDevice: noise_suppression_level: int = 0 auto_gain: int = 0 volume_multiplier: float = 1.0 + vad_sensitivity: VadSensitivity = VadSensitivity.DEFAULT _is_active_listener: Callable[[], None] | None = None _is_muted_listener: Callable[[], None] | None = None @@ -77,6 +79,14 @@ class SatelliteDevice: if self._audio_settings_listener is not None: self._audio_settings_listener() + @callback + def set_vad_sensitivity(self, vad_sensitivity: VadSensitivity) -> None: + """Set VAD sensitivity.""" + if vad_sensitivity != self.vad_sensitivity: + self.vad_sensitivity = vad_sensitivity + if self._audio_settings_listener is not None: + self._audio_settings_listener() + @callback def set_is_active_listener(self, is_active_listener: Callable[[], None]) -> None: """Listen for updates to is_active.""" @@ -140,3 +150,10 @@ class SatelliteDevice: return ent_reg.async_get_entity_id( "number", DOMAIN, f"{self.satellite_id}-volume_multiplier" ) + + def get_vad_sensitivity_entity_id(self, hass: HomeAssistant) -> str | None: + """Return entity id for VAD sensitivity.""" + ent_reg = er.async_get(hass) + return ent_reg.async_get_entity_id( + "select", DOMAIN, f"{self.satellite_id}-vad_sensitivity" + ) diff --git a/homeassistant/components/wyoming/satellite.py b/homeassistant/components/wyoming/satellite.py index 5af0c54abad..781f0706c68 100644 --- a/homeassistant/components/wyoming/satellite.py +++ b/homeassistant/components/wyoming/satellite.py @@ -1,6 +1,7 @@ """Support for Wyoming satellite services.""" import asyncio +from collections.abc import AsyncGenerator import io import logging import time @@ -8,7 +9,6 @@ from typing import Final from uuid import uuid4 import wave -from typing_extensions import AsyncGenerator from wyoming.asr import Transcribe, Transcript from wyoming.audio import AudioChunk, AudioChunkConverter, AudioStart, AudioStop from wyoming.client import AsyncTcpClient @@ -25,6 +25,7 @@ from wyoming.wake import Detect, Detection from homeassistant.components import assist_pipeline, intent, stt, tts from homeassistant.components.assist_pipeline import select as pipeline_select +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.config_entries import ConfigEntry from homeassistant.core import Context, HomeAssistant, callback @@ -409,6 +410,9 @@ class WyomingSatellite: noise_suppression_level=self.device.noise_suppression_level, auto_gain_dbfs=self.device.auto_gain, volume_multiplier=self.device.volume_multiplier, + silence_seconds=VadSensitivity.to_seconds( + self.device.vad_sensitivity + ), ), device_id=self.device.device_id, wake_word_phrase=wake_word_phrase, diff --git a/homeassistant/components/wyoming/select.py b/homeassistant/components/wyoming/select.py index 99f26c3e440..f852b4d0434 100644 --- a/homeassistant/components/wyoming/select.py +++ b/homeassistant/components/wyoming/select.py @@ -4,7 +4,11 @@ from __future__ import annotations from typing import TYPE_CHECKING, Final -from homeassistant.components.assist_pipeline.select import AssistPipelineSelect +from homeassistant.components.assist_pipeline.select import ( + AssistPipelineSelect, + VadSensitivitySelect, +) +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory @@ -45,6 +49,7 @@ async def async_setup_entry( [ WyomingSatellitePipelineSelect(hass, device), WyomingSatelliteNoiseSuppressionLevelSelect(device), + WyomingSatelliteVadSensitivitySelect(hass, device), ] ) @@ -92,3 +97,21 @@ class WyomingSatelliteNoiseSuppressionLevelSelect( self._attr_current_option = option self.async_write_ha_state() self._device.set_noise_suppression_level(_NOISE_SUPPRESSION_LEVEL[option]) + + +class WyomingSatelliteVadSensitivitySelect( + WyomingSatelliteEntity, VadSensitivitySelect +): + """VAD sensitivity selector for Wyoming satellites.""" + + def __init__(self, hass: HomeAssistant, device: SatelliteDevice) -> None: + """Initialize a VAD sensitivity selector.""" + self.device = device + + WyomingSatelliteEntity.__init__(self, device) + VadSensitivitySelect.__init__(self, hass, device.satellite_id) + + async def async_select_option(self, option: str) -> None: + """Select an option.""" + await super().async_select_option(option) + self.device.set_vad_sensitivity(VadSensitivity(option)) diff --git a/homeassistant/components/wyoming/strings.json b/homeassistant/components/wyoming/strings.json index f2768e45eb8..4a1a4c3a246 100644 --- a/homeassistant/components/wyoming/strings.json +++ b/homeassistant/components/wyoming/strings.json @@ -46,6 +46,14 @@ "high": "High", "max": "Max" } + }, + "vad_sensitivity": { + "name": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::name%]", + "state": { + "default": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::default%]", + "aggressive": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::aggressive%]", + "relaxed": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::relaxed%]" + } } }, "switch": { diff --git a/homeassistant/components/wyoming/wake_word.py b/homeassistant/components/wyoming/wake_word.py index 6eba0f7ca6d..64dfd60c068 100644 --- a/homeassistant/components/wyoming/wake_word.py +++ b/homeassistant/components/wyoming/wake_word.py @@ -89,6 +89,7 @@ class WyomingWakeWordProvider(wake_word.WakeWordDetectionEntity): """Get the next chunk from audio stream.""" async for chunk_bytes in stream: return chunk_bytes + return None try: async with AsyncTcpClient(self.service.host, self.service.port) as client: diff --git a/homeassistant/components/x10/light.py b/homeassistant/components/x10/light.py index 8f105d9c695..29c15f66993 100644 --- a/homeassistant/components/x10/light.py +++ b/homeassistant/components/x10/light.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, ColorMode, LightEntity, ) @@ -22,7 +22,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend( { vol.Required(CONF_DEVICES): vol.All( cv.ensure_list, diff --git a/homeassistant/components/xeoma/camera.py b/homeassistant/components/xeoma/camera.py index 7d6abde8535..0c19e126fa7 100644 --- a/homeassistant/components/xeoma/camera.py +++ b/homeassistant/components/xeoma/camera.py @@ -7,7 +7,10 @@ import logging from pyxeoma.xeoma import Xeoma, XeomaError import voluptuous as vol -from homeassistant.components.camera import PLATFORM_SCHEMA, Camera +from homeassistant.components.camera import ( + PLATFORM_SCHEMA as CAMERA_PLATFORM_SCHEMA, + Camera, +) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv @@ -32,7 +35,7 @@ CAMERAS_SCHEMA = vol.Schema( required=False, ) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = CAMERA_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_CAMERAS): vol.Schema( diff --git a/homeassistant/components/xiaomi/camera.py b/homeassistant/components/xiaomi/camera.py index f3e850a7839..8ab15f85147 100644 --- a/homeassistant/components/xiaomi/camera.py +++ b/homeassistant/components/xiaomi/camera.py @@ -9,7 +9,10 @@ from haffmpeg.camera import CameraMjpeg import voluptuous as vol from homeassistant.components import ffmpeg -from homeassistant.components.camera import PLATFORM_SCHEMA, Camera +from homeassistant.components.camera import ( + PLATFORM_SCHEMA as CAMERA_PLATFORM_SCHEMA, + Camera, +) from homeassistant.components.ffmpeg import get_ffmpeg_manager from homeassistant.const import ( CONF_HOST, @@ -40,7 +43,7 @@ CONF_FFMPEG_ARGUMENTS = "ffmpeg_arguments" MODEL_YI = "yi" MODEL_XIAOFANG = "xiaofang" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = CAMERA_PLATFORM_SCHEMA.extend( { vol.Required(CONF_NAME): cv.string, vol.Required(CONF_HOST): cv.template, @@ -77,7 +80,6 @@ class XiaomiCamera(Camera): self._manager = get_ffmpeg_manager(hass) self._name = config[CONF_NAME] self.host = config[CONF_HOST] - self.host.hass = hass self._model = config[CONF_MODEL] self.port = config[CONF_PORT] self.path = config[CONF_PATH] diff --git a/homeassistant/components/xiaomi/device_tracker.py b/homeassistant/components/xiaomi/device_tracker.py index 869a7a1cf1f..b14ec073938 100644 --- a/homeassistant/components/xiaomi/device_tracker.py +++ b/homeassistant/components/xiaomi/device_tracker.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, - PLATFORM_SCHEMA as PARENT_PLATFORM_SCHEMA, + PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME @@ -20,7 +20,7 @@ from homeassistant.helpers.typing import ConfigType _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME, default="admin"): cv.string, @@ -172,7 +172,6 @@ def _get_token(host, username, password): ) _LOGGER.exception(error_message, url, data, result) return None - else: - _LOGGER.error( - "Invalid response: [%s] at url: [%s] with data [%s]", res, url, data - ) + + _LOGGER.error("Invalid response: [%s] at url: [%s] with data [%s]", res, url, data) + return None diff --git a/homeassistant/components/xiaomi_aqara/binary_sensor.py b/homeassistant/components/xiaomi_aqara/binary_sensor.py index cee2980fe07..75208b142dd 100644 --- a/homeassistant/components/xiaomi_aqara/binary_sensor.py +++ b/homeassistant/components/xiaomi_aqara/binary_sensor.py @@ -202,6 +202,8 @@ class XiaomiNatgasSensor(XiaomiBinarySensor): return True return False + return False + class XiaomiMotionSensor(XiaomiBinarySensor): """Representation of a XiaomiMotionSensor.""" @@ -298,6 +300,8 @@ class XiaomiMotionSensor(XiaomiBinarySensor): self._state = True return True + return False + class XiaomiDoorSensor(XiaomiBinarySensor, RestoreEntity): """Representation of a XiaomiDoorSensor.""" @@ -357,6 +361,8 @@ class XiaomiDoorSensor(XiaomiBinarySensor, RestoreEntity): return True return False + return False + class XiaomiWaterLeakSensor(XiaomiBinarySensor): """Representation of a XiaomiWaterLeakSensor.""" @@ -401,6 +407,8 @@ class XiaomiWaterLeakSensor(XiaomiBinarySensor): return True return False + return False + class XiaomiSmokeSensor(XiaomiBinarySensor): """Representation of a XiaomiSmokeSensor.""" @@ -443,6 +451,8 @@ class XiaomiSmokeSensor(XiaomiBinarySensor): return True return False + return False + class XiaomiVibration(XiaomiBinarySensor): """Representation of a Xiaomi Vibration Sensor.""" diff --git a/homeassistant/components/xiaomi_ble/__init__.py b/homeassistant/components/xiaomi_ble/__init__.py index 4a9753bfe85..fae5e4d0c91 100644 --- a/homeassistant/components/xiaomi_ble/__init__.py +++ b/homeassistant/components/xiaomi_ble/__init__.py @@ -2,12 +2,12 @@ from __future__ import annotations +from functools import partial import logging from typing import cast from xiaomi_ble import EncryptionScheme, SensorUpdate, XiaomiBluetoothDeviceData -from homeassistant import config_entries from homeassistant.components.bluetooth import ( DOMAIN as BLUETOOTH_DOMAIN, BluetoothScanningMode, @@ -29,6 +29,7 @@ from .const import ( XiaomiBleEvent, ) from .coordinator import XiaomiActiveBluetoothProcessorCoordinator +from .types import XiaomiBLEConfigEntry PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.EVENT, Platform.SENSOR] @@ -37,16 +38,14 @@ _LOGGER = logging.getLogger(__name__) def process_service_info( hass: HomeAssistant, - entry: config_entries.ConfigEntry, - data: XiaomiBluetoothDeviceData, - service_info: BluetoothServiceInfoBleak, + entry: XiaomiBLEConfigEntry, device_registry: DeviceRegistry, + service_info: BluetoothServiceInfoBleak, ) -> SensorUpdate: """Process a BluetoothServiceInfoBleak, running side effects and returning sensor data.""" + coordinator = entry.runtime_data + data = coordinator.device_data update = data.update(service_info) - coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] discovered_event_classes = coordinator.discovered_event_classes if entry.data.get(CONF_SLEEPY_DEVICE, False) != data.sleepy_device: hass.config_entries.async_update_entry( @@ -165,38 +164,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return await data.async_poll(connectable_device) device_registry = dr.async_get(hass) - coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ( - XiaomiActiveBluetoothProcessorCoordinator( - hass, - _LOGGER, - address=address, - mode=BluetoothScanningMode.PASSIVE, - update_method=lambda service_info: process_service_info( - hass, entry, data, service_info, device_registry - ), - needs_poll_method=_needs_poll, - device_data=data, - discovered_event_classes=set( - entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, []) - ), - poll_method=_async_poll, - # We will take advertisements from non-connectable devices - # since we will trade the BLEDevice for a connectable one - # if we need to poll it - connectable=False, - entry=entry, - ) + coordinator = XiaomiActiveBluetoothProcessorCoordinator( + hass, + _LOGGER, + address=address, + mode=BluetoothScanningMode.PASSIVE, + update_method=partial(process_service_info, hass, entry, device_registry), + needs_poll_method=_needs_poll, + device_data=data, + discovered_event_classes=set(entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, [])), + poll_method=_async_poll, + # We will take advertisements from non-connectable devices + # since we will trade the BLEDevice for a connectable one + # if we need to poll it + connectable=False, + entry=entry, ) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload( - coordinator.async_start() - ) # only start after all platforms have had a chance to subscribe + # only start after all platforms have had a chance to subscribe + entry.async_on_unload(coordinator.async_start()) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: XiaomiBLEConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/xiaomi_ble/binary_sensor.py b/homeassistant/components/xiaomi_ble/binary_sensor.py index 8734f45c405..5336c4d8f7f 100644 --- a/homeassistant/components/xiaomi_ble/binary_sensor.py +++ b/homeassistant/components/xiaomi_ble/binary_sensor.py @@ -8,7 +8,6 @@ from xiaomi_ble.parser import ( SensorUpdate, ) -from homeassistant import config_entries from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, @@ -22,12 +21,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info -from .const import DOMAIN -from .coordinator import ( - XiaomiActiveBluetoothProcessorCoordinator, - XiaomiPassiveBluetoothDataProcessor, -) +from .coordinator import XiaomiPassiveBluetoothDataProcessor from .device import device_key_to_bluetooth_entity_key +from .types import XiaomiBLEConfigEntry BINARY_SENSOR_DESCRIPTIONS = { XiaomiBinarySensorDeviceClass.BATTERY: BinarySensorEntityDescription( @@ -134,13 +130,11 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: XiaomiBLEConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Xiaomi BLE sensors.""" - coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] + coordinator = entry.runtime_data processor = XiaomiPassiveBluetoothDataProcessor( sensor_update_to_bluetooth_data_update ) diff --git a/homeassistant/components/xiaomi_ble/coordinator.py b/homeassistant/components/xiaomi_ble/coordinator.py index 1cd49e851ea..69fc427013a 100644 --- a/homeassistant/components/xiaomi_ble/coordinator.py +++ b/homeassistant/components/xiaomi_ble/coordinator.py @@ -16,11 +16,11 @@ from homeassistant.components.bluetooth.active_update_processor import ( from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataProcessor, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.debounce import Debouncer from .const import CONF_SLEEPY_DEVICE +from .types import XiaomiBLEConfigEntry class XiaomiActiveBluetoothProcessorCoordinator( @@ -45,7 +45,7 @@ class XiaomiActiveBluetoothProcessorCoordinator( ] | None = None, poll_debouncer: Debouncer[Coroutine[Any, Any, None]] | None = None, - entry: ConfigEntry, + entry: XiaomiBLEConfigEntry, connectable: bool = True, ) -> None: """Initialize the Xiaomi Bluetooth Active Update Processor Coordinator.""" diff --git a/homeassistant/components/xiaomi_ble/event.py b/homeassistant/components/xiaomi_ble/event.py index e39a4adb3c7..7265bcd112c 100644 --- a/homeassistant/components/xiaomi_ble/event.py +++ b/homeassistant/components/xiaomi_ble/event.py @@ -9,7 +9,6 @@ from homeassistant.components.event import ( EventEntity, EventEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -29,7 +28,7 @@ from .const import ( EVENT_TYPE, XiaomiBleEvent, ) -from .coordinator import XiaomiActiveBluetoothProcessorCoordinator +from .types import XiaomiBLEConfigEntry DESCRIPTIONS_BY_EVENT_CLASS = { EVENT_CLASS_BUTTON: EventEntityDescription( @@ -183,13 +182,11 @@ class XiaomiEventEntity(EventEntity): async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: XiaomiBLEConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Xiaomi event.""" - coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] + coordinator = entry.runtime_data address = coordinator.address ent_reg = er.async_get(hass) async_add_entities( diff --git a/homeassistant/components/xiaomi_ble/manifest.json b/homeassistant/components/xiaomi_ble/manifest.json index 1e0a09015ee..21e9bc45bb8 100644 --- a/homeassistant/components/xiaomi_ble/manifest.json +++ b/homeassistant/components/xiaomi_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/xiaomi_ble", "iot_class": "local_push", - "requirements": ["xiaomi-ble==0.30.0"] + "requirements": ["xiaomi-ble==0.30.2"] } diff --git a/homeassistant/components/xiaomi_ble/sensor.py b/homeassistant/components/xiaomi_ble/sensor.py index 65b33c3c559..3108c285dbe 100644 --- a/homeassistant/components/xiaomi_ble/sensor.py +++ b/homeassistant/components/xiaomi_ble/sensor.py @@ -7,7 +7,6 @@ from typing import cast from xiaomi_ble import DeviceClass, SensorUpdate, Units from xiaomi_ble.parser import ExtendedSensorDeviceClass -from homeassistant import config_entries from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataUpdate, PassiveBluetoothProcessorEntity, @@ -35,12 +34,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info -from .const import DOMAIN -from .coordinator import ( - XiaomiActiveBluetoothProcessorCoordinator, - XiaomiPassiveBluetoothDataProcessor, -) +from .coordinator import XiaomiPassiveBluetoothDataProcessor from .device import device_key_to_bluetooth_entity_key +from .types import XiaomiBLEConfigEntry SENSOR_DESCRIPTIONS = { (DeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription( @@ -193,13 +189,11 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: XiaomiBLEConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Xiaomi BLE sensors.""" - coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] + coordinator = entry.runtime_data processor = XiaomiPassiveBluetoothDataProcessor( sensor_update_to_bluetooth_data_update ) diff --git a/homeassistant/components/xiaomi_ble/types.py b/homeassistant/components/xiaomi_ble/types.py new file mode 100644 index 00000000000..f0de8af9d06 --- /dev/null +++ b/homeassistant/components/xiaomi_ble/types.py @@ -0,0 +1,10 @@ +"""Support for xiaomi ble.""" + +from typing import TYPE_CHECKING + +from homeassistant.config_entries import ConfigEntry + +if TYPE_CHECKING: + from .coordinator import XiaomiActiveBluetoothProcessorCoordinator + +type XiaomiBLEConfigEntry = ConfigEntry[XiaomiActiveBluetoothProcessorCoordinator] diff --git a/homeassistant/components/xiaomi_miio/binary_sensor.py b/homeassistant/components/xiaomi_miio/binary_sensor.py index 7729ce27d29..6d1a81007dc 100644 --- a/homeassistant/components/xiaomi_miio/binary_sensor.py +++ b/homeassistant/components/xiaomi_miio/binary_sensor.py @@ -190,7 +190,8 @@ async def async_setup_entry( elif model in MODELS_HUMIDIFIER_MJJSQ: sensors = HUMIDIFIER_MJJSQ_BINARY_SENSORS elif model in MODELS_VACUUM: - return _setup_vacuum_sensors(hass, config_entry, async_add_entities) + _setup_vacuum_sensors(hass, config_entry, async_add_entities) + return for description in BINARY_SENSOR_TYPES: if description.key not in sensors: diff --git a/homeassistant/components/xiaomi_miio/const.py b/homeassistant/components/xiaomi_miio/const.py index 24b494f3d08..a8b1f8d4ba5 100644 --- a/homeassistant/components/xiaomi_miio/const.py +++ b/homeassistant/components/xiaomi_miio/const.py @@ -61,6 +61,7 @@ MODEL_AIRPURIFIER_2S = "zhimi.airpurifier.mc1" MODEL_AIRPURIFIER_3 = "zhimi.airpurifier.ma4" MODEL_AIRPURIFIER_3C = "zhimi.airpurifier.mb4" MODEL_AIRPURIFIER_3H = "zhimi.airpurifier.mb3" +MODEL_AIRPURIFIER_COMPACT = "xiaomi.airp.cpa4" MODEL_AIRPURIFIER_M1 = "zhimi.airpurifier.m1" MODEL_AIRPURIFIER_M2 = "zhimi.airpurifier.m2" MODEL_AIRPURIFIER_MA1 = "zhimi.airpurifier.ma1" @@ -83,6 +84,7 @@ MODEL_AIRHUMIDIFIER_CA4 = "zhimi.humidifier.ca4" MODEL_AIRHUMIDIFIER_CB1 = "zhimi.humidifier.cb1" MODEL_AIRHUMIDIFIER_JSQ = "deerma.humidifier.jsq" MODEL_AIRHUMIDIFIER_JSQ1 = "deerma.humidifier.jsq1" +MODEL_AIRHUMIDIFIER_JSQ2W = "deerma.humidifier.jsq2w" MODEL_AIRHUMIDIFIER_MJJSQ = "deerma.humidifier.mjjsq" MODEL_AIRFRESH_A1 = "dmaker.airfresh.a1" @@ -148,6 +150,7 @@ MODELS_PURIFIER_MIIO = [ MODEL_AIRPURIFIER_SA2, MODEL_AIRPURIFIER_2S, MODEL_AIRPURIFIER_2H, + MODEL_AIRPURIFIER_COMPACT, MODEL_AIRFRESH_A1, MODEL_AIRFRESH_VA2, MODEL_AIRFRESH_VA4, @@ -162,6 +165,7 @@ MODELS_HUMIDIFIER_MIOT = [MODEL_AIRHUMIDIFIER_CA4] MODELS_HUMIDIFIER_MJJSQ = [ MODEL_AIRHUMIDIFIER_JSQ, MODEL_AIRHUMIDIFIER_JSQ1, + MODEL_AIRHUMIDIFIER_JSQ2W, MODEL_AIRHUMIDIFIER_MJJSQ, ] diff --git a/homeassistant/components/xiaomi_miio/device_tracker.py b/homeassistant/components/xiaomi_miio/device_tracker.py index ba73ccc57f0..4a7e447b8a5 100644 --- a/homeassistant/components/xiaomi_miio/device_tracker.py +++ b/homeassistant/components/xiaomi_miio/device_tracker.py @@ -9,7 +9,7 @@ import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, - PLATFORM_SCHEMA as BASE_PLATFORM_SCHEMA, + PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST, CONF_TOKEN @@ -19,7 +19,7 @@ from homeassistant.helpers.typing import ConfigType _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = BASE_PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)), @@ -71,7 +71,7 @@ class XiaomiMiioDeviceScanner(DeviceScanner): return [device["mac"] for device in station_info.associated_stations] - async def async_get_device_name(self, device): + async def async_get_device_name(self, device: str) -> str | None: """Return None. The repeater doesn't provide the name of the associated device. diff --git a/homeassistant/components/xiaomi_miio/fan.py b/homeassistant/components/xiaomi_miio/fan.py index 75533513b5e..f075ff8816f 100644 --- a/homeassistant/components/xiaomi_miio/fan.py +++ b/homeassistant/components/xiaomi_miio/fan.py @@ -92,13 +92,14 @@ from .const import ( SERVICE_SET_EXTRA_FEATURES, ) from .device import XiaomiCoordinatedMiioEntity +from .typing import ServiceMethodDetails _LOGGER = logging.getLogger(__name__) DATA_KEY = "fan.xiaomi_miio" -ATTR_MODE_NATURE = "Nature" -ATTR_MODE_NORMAL = "Normal" +ATTR_MODE_NATURE = "nature" +ATTR_MODE_NORMAL = "normal" # Air Purifier ATTR_BRIGHTNESS = "brightness" @@ -182,11 +183,11 @@ SERVICE_SCHEMA_EXTRA_FEATURES = AIRPURIFIER_SERVICE_SCHEMA.extend( ) SERVICE_TO_METHOD = { - SERVICE_RESET_FILTER: {"method": "async_reset_filter"}, - SERVICE_SET_EXTRA_FEATURES: { - "method": "async_set_extra_features", - "schema": SERVICE_SCHEMA_EXTRA_FEATURES, - }, + SERVICE_RESET_FILTER: ServiceMethodDetails(method="async_reset_filter"), + SERVICE_SET_EXTRA_FEATURES: ServiceMethodDetails( + method="async_set_extra_features", + schema=SERVICE_SCHEMA_EXTRA_FEATURES, + ), } FAN_DIRECTIONS_MAP = { @@ -271,7 +272,7 @@ async def async_setup_entry( update_tasks = [] for entity in filtered_entities: - entity_method = getattr(entity, method["method"], None) + entity_method = getattr(entity, method.method, None) if not entity_method: continue await entity_method(**params) @@ -281,7 +282,7 @@ async def async_setup_entry( await asyncio.wait(update_tasks) for air_purifier_service, method in SERVICE_TO_METHOD.items(): - schema = method.get("schema", AIRPURIFIER_SERVICE_SCHEMA) + schema = method.schema or AIRPURIFIER_SERVICE_SCHEMA hass.services.async_register( DOMAIN, air_purifier_service, async_service_handler, schema=schema ) @@ -293,6 +294,7 @@ class XiaomiGenericDevice(XiaomiCoordinatedMiioEntity, FanEntity): """Representation of a generic Xiaomi device.""" _attr_name = None + _enable_turn_on_off_backwards_compatibility = False def __init__(self, device, entry, unique_id, coordinator): """Initialize the generic Xiaomi device.""" @@ -478,6 +480,9 @@ class XiaomiAirPurifier(XiaomiGenericAirPurifier): self._preset_modes = PRESET_MODES_AIRPURIFIER self._attr_supported_features = FanEntityFeature.PRESET_MODE self._speed_count = 1 + self._attr_supported_features |= ( + FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON + ) self._state = self.coordinator.data.is_on self._state_attrs.update( @@ -608,7 +613,11 @@ class XiaomiAirPurifierMB4(XiaomiGenericAirPurifier): self._device_features = FEATURE_FLAGS_AIRPURIFIER_3C self._preset_modes = PRESET_MODES_AIRPURIFIER_3C - self._attr_supported_features = FanEntityFeature.PRESET_MODE + self._attr_supported_features = ( + FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) self._state = self.coordinator.data.is_on self._mode = self.coordinator.data.mode.value @@ -662,7 +671,10 @@ class XiaomiAirFresh(XiaomiGenericAirPurifier): self._speed_count = 4 self._preset_modes = PRESET_MODES_AIRFRESH self._attr_supported_features = ( - FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) self._state = self.coordinator.data.is_on @@ -755,7 +767,10 @@ class XiaomiAirFreshA1(XiaomiGenericAirPurifier): self._device_features = FEATURE_FLAGS_AIRFRESH_A1 self._preset_modes = PRESET_MODES_AIRFRESH_A1 self._attr_supported_features = ( - FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) self._state = self.coordinator.data.is_on @@ -830,6 +845,8 @@ class XiaomiAirFreshT2017(XiaomiAirFreshA1): class XiaomiGenericFan(XiaomiGenericDevice): """Representation of a generic Xiaomi Fan.""" + _attr_translation_key = "generic_fan" + def __init__(self, device, entry, unique_id, coordinator): """Initialize the fan.""" super().__init__(device, entry, unique_id, coordinator) @@ -850,6 +867,8 @@ class XiaomiGenericFan(XiaomiGenericDevice): FanEntityFeature.SET_SPEED | FanEntityFeature.OSCILLATE | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) if self._model != MODEL_FAN_1C: self._attr_supported_features |= FanEntityFeature.DIRECTION diff --git a/homeassistant/components/xiaomi_miio/icons.json b/homeassistant/components/xiaomi_miio/icons.json index bbd3f6607d7..2e5084a1f6c 100644 --- a/homeassistant/components/xiaomi_miio/icons.json +++ b/homeassistant/components/xiaomi_miio/icons.json @@ -1,4 +1,18 @@ { + "entity": { + "fan": { + "generic_fan": { + "state_attributes": { + "preset_mode": { + "state": { + "nature": "mdi:leaf", + "normal": "mdi:weather-windy" + } + } + } + } + } + }, "services": { "fan_reset_filter": "mdi:refresh", "fan_set_extra_features": "mdi:cog", diff --git a/homeassistant/components/xiaomi_miio/light.py b/homeassistant/components/xiaomi_miio/light.py index 96f9595e0e8..35537e82b2e 100644 --- a/homeassistant/components/xiaomi_miio/light.py +++ b/homeassistant/components/xiaomi_miio/light.py @@ -68,6 +68,7 @@ from .const import ( ) from .device import XiaomiMiioEntity from .gateway import XiaomiGatewayDevice +from .typing import ServiceMethodDetails _LOGGER = logging.getLogger(__name__) @@ -108,20 +109,24 @@ SERVICE_SCHEMA_SET_DELAYED_TURN_OFF = XIAOMI_MIIO_SERVICE_SCHEMA.extend( ) SERVICE_TO_METHOD = { - SERVICE_SET_DELAYED_TURN_OFF: { - "method": "async_set_delayed_turn_off", - "schema": SERVICE_SCHEMA_SET_DELAYED_TURN_OFF, - }, - SERVICE_SET_SCENE: { - "method": "async_set_scene", - "schema": SERVICE_SCHEMA_SET_SCENE, - }, - SERVICE_REMINDER_ON: {"method": "async_reminder_on"}, - SERVICE_REMINDER_OFF: {"method": "async_reminder_off"}, - SERVICE_NIGHT_LIGHT_MODE_ON: {"method": "async_night_light_mode_on"}, - SERVICE_NIGHT_LIGHT_MODE_OFF: {"method": "async_night_light_mode_off"}, - SERVICE_EYECARE_MODE_ON: {"method": "async_eyecare_mode_on"}, - SERVICE_EYECARE_MODE_OFF: {"method": "async_eyecare_mode_off"}, + SERVICE_SET_DELAYED_TURN_OFF: ServiceMethodDetails( + method="async_set_delayed_turn_off", + schema=SERVICE_SCHEMA_SET_DELAYED_TURN_OFF, + ), + SERVICE_SET_SCENE: ServiceMethodDetails( + method="async_set_scene", + schema=SERVICE_SCHEMA_SET_SCENE, + ), + SERVICE_REMINDER_ON: ServiceMethodDetails(method="async_reminder_on"), + SERVICE_REMINDER_OFF: ServiceMethodDetails(method="async_reminder_off"), + SERVICE_NIGHT_LIGHT_MODE_ON: ServiceMethodDetails( + method="async_night_light_mode_on" + ), + SERVICE_NIGHT_LIGHT_MODE_OFF: ServiceMethodDetails( + method="async_night_light_mode_off" + ), + SERVICE_EYECARE_MODE_ON: ServiceMethodDetails(method="async_eyecare_mode_on"), + SERVICE_EYECARE_MODE_OFF: ServiceMethodDetails(method="async_eyecare_mode_off"), } @@ -232,9 +237,9 @@ async def async_setup_entry( update_tasks = [] for target_device in target_devices: - if not hasattr(target_device, method["method"]): + if not hasattr(target_device, method.method): continue - await getattr(target_device, method["method"])(**params) + await getattr(target_device, method.method)(**params) update_tasks.append( asyncio.create_task(target_device.async_update_ha_state(True)) ) @@ -243,7 +248,7 @@ async def async_setup_entry( await asyncio.wait(update_tasks) for xiaomi_miio_service, method in SERVICE_TO_METHOD.items(): - schema = method.get("schema", XIAOMI_MIIO_SERVICE_SCHEMA) + schema = method.schema or XIAOMI_MIIO_SERVICE_SCHEMA hass.services.async_register( DOMAIN, xiaomi_miio_service, async_service_handler, schema=schema ) diff --git a/homeassistant/components/xiaomi_miio/remote.py b/homeassistant/components/xiaomi_miio/remote.py index 5baaf614b01..72707109ad6 100644 --- a/homeassistant/components/xiaomi_miio/remote.py +++ b/homeassistant/components/xiaomi_miio/remote.py @@ -16,7 +16,7 @@ from homeassistant.components.remote import ( ATTR_DELAY_SECS, ATTR_NUM_REPEATS, DEFAULT_DELAY_SECS, - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as REMOTE_PLATFORM_SCHEMA, RemoteEntity, ) from homeassistant.const import ( @@ -49,7 +49,7 @@ COMMAND_SCHEMA = vol.Schema( {vol.Required(CONF_COMMAND): vol.All(cv.ensure_list, [cv.string])} ) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = REMOTE_PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_HOST): cv.string, @@ -170,12 +170,12 @@ async def async_setup_platform( ) platform.async_register_entity_service( SERVICE_SET_REMOTE_LED_ON, - {}, + None, async_service_led_on_handler, ) platform.async_register_entity_service( SERVICE_SET_REMOTE_LED_OFF, - {}, + None, async_service_led_off_handler, ) diff --git a/homeassistant/components/xiaomi_miio/select.py b/homeassistant/components/xiaomi_miio/select.py index b785adef15a..a8e936aaf8f 100644 --- a/homeassistant/components/xiaomi_miio/select.py +++ b/homeassistant/components/xiaomi_miio/select.py @@ -50,6 +50,7 @@ from .const import ( MODEL_AIRPURIFIER_3H, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_PRO, + MODEL_AIRPURIFIER_COMPACT, MODEL_AIRPURIFIER_M1, MODEL_AIRPURIFIER_M2, MODEL_AIRPURIFIER_MA2, @@ -129,6 +130,9 @@ MODEL_TO_ATTR_MAP: dict[str, list] = { MODEL_AIRPURIFIER_4_PRO: [ AttributeEnumMapping(ATTR_LED_BRIGHTNESS, AirpurifierMiotLedBrightness) ], + MODEL_AIRPURIFIER_COMPACT: [ + AttributeEnumMapping(ATTR_LED_BRIGHTNESS, AirpurifierMiotLedBrightness) + ], MODEL_AIRPURIFIER_M1: [ AttributeEnumMapping(ATTR_LED_BRIGHTNESS, AirpurifierLedBrightness) ], diff --git a/homeassistant/components/xiaomi_miio/strings.json b/homeassistant/components/xiaomi_miio/strings.json index a9588855818..bbdc3f5737d 100644 --- a/homeassistant/components/xiaomi_miio/strings.json +++ b/homeassistant/components/xiaomi_miio/strings.json @@ -93,6 +93,18 @@ "high": "High" } } + }, + "fan": { + "generic_fan": { + "state_attributes": { + "preset_mode": { + "state": { + "nature": "Nature", + "normal": "Normal" + } + } + } + } } }, "services": { @@ -210,7 +222,7 @@ }, "remote_learn_command": { "name": "Remote learn command", - "description": "Learns an IR command, press \"Call Service\", point the remote at the IR device, and the learned command will be shown as a notification in Overview.", + "description": "Learns an IR command, press \"Perform action\", point the remote at the IR device, and the learned command will be shown as a notification in Overview.", "fields": { "slot": { "name": "Slot", diff --git a/homeassistant/components/xiaomi_miio/switch.py b/homeassistant/components/xiaomi_miio/switch.py index 34ebb9addf5..797a98d9fa1 100644 --- a/homeassistant/components/xiaomi_miio/switch.py +++ b/homeassistant/components/xiaomi_miio/switch.py @@ -115,6 +115,7 @@ from .const import ( ) from .device import XiaomiCoordinatedMiioEntity, XiaomiMiioEntity from .gateway import XiaomiGatewayDevice +from .typing import ServiceMethodDetails _LOGGER = logging.getLogger(__name__) @@ -176,16 +177,16 @@ SERVICE_SCHEMA_POWER_PRICE = SERVICE_SCHEMA.extend( ) SERVICE_TO_METHOD = { - SERVICE_SET_WIFI_LED_ON: {"method": "async_set_wifi_led_on"}, - SERVICE_SET_WIFI_LED_OFF: {"method": "async_set_wifi_led_off"}, - SERVICE_SET_POWER_MODE: { - "method": "async_set_power_mode", - "schema": SERVICE_SCHEMA_POWER_MODE, - }, - SERVICE_SET_POWER_PRICE: { - "method": "async_set_power_price", - "schema": SERVICE_SCHEMA_POWER_PRICE, - }, + SERVICE_SET_WIFI_LED_ON: ServiceMethodDetails(method="async_set_wifi_led_on"), + SERVICE_SET_WIFI_LED_OFF: ServiceMethodDetails(method="async_set_wifi_led_off"), + SERVICE_SET_POWER_MODE: ServiceMethodDetails( + method="async_set_power_mode", + schema=SERVICE_SCHEMA_POWER_MODE, + ), + SERVICE_SET_POWER_PRICE: ServiceMethodDetails( + method="async_set_power_price", + schema=SERVICE_SCHEMA_POWER_PRICE, + ), } MODEL_TO_FEATURES_MAP = { @@ -488,9 +489,9 @@ async def async_setup_other_entry(hass, config_entry, async_add_entities): update_tasks = [] for device in devices: - if not hasattr(device, method["method"]): + if not hasattr(device, method.method): continue - await getattr(device, method["method"])(**params) + await getattr(device, method.method)(**params) update_tasks.append( asyncio.create_task(device.async_update_ha_state(True)) ) @@ -499,7 +500,7 @@ async def async_setup_other_entry(hass, config_entry, async_add_entities): await asyncio.wait(update_tasks) for plug_service, method in SERVICE_TO_METHOD.items(): - schema = method.get("schema", SERVICE_SCHEMA) + schema = method.schema or SERVICE_SCHEMA hass.services.async_register( DOMAIN, plug_service, async_service_handler, schema=schema ) diff --git a/homeassistant/components/xiaomi_miio/typing.py b/homeassistant/components/xiaomi_miio/typing.py new file mode 100644 index 00000000000..8fbb8e3d83f --- /dev/null +++ b/homeassistant/components/xiaomi_miio/typing.py @@ -0,0 +1,12 @@ +"""Typings for the xiaomi_miio integration.""" + +from typing import NamedTuple + +import voluptuous as vol + + +class ServiceMethodDetails(NamedTuple): + """Details for SERVICE_TO_METHOD mapping.""" + + method: str + schema: vol.Schema | None = None diff --git a/homeassistant/components/xiaomi_miio/vacuum.py b/homeassistant/components/xiaomi_miio/vacuum.py index ef6f94c162f..ac833f7646c 100644 --- a/homeassistant/components/xiaomi_miio/vacuum.py +++ b/homeassistant/components/xiaomi_miio/vacuum.py @@ -104,13 +104,13 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_START_REMOTE_CONTROL, - {}, + None, MiroboVacuum.async_remote_control_start.__name__, ) platform.async_register_entity_service( SERVICE_STOP_REMOTE_CONTROL, - {}, + None, MiroboVacuum.async_remote_control_stop.__name__, ) diff --git a/homeassistant/components/xiaomi_tv/media_player.py b/homeassistant/components/xiaomi_tv/media_player.py index da692d21bfc..675c802f79c 100644 --- a/homeassistant/components/xiaomi_tv/media_player.py +++ b/homeassistant/components/xiaomi_tv/media_player.py @@ -8,7 +8,7 @@ import pymitv import voluptuous as vol from homeassistant.components.media_player import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, @@ -24,7 +24,7 @@ DEFAULT_NAME = "Xiaomi TV" _LOGGER = logging.getLogger(__name__) # No host is needed for configuration, however it can be set. -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( { vol.Optional(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, diff --git a/homeassistant/components/xmpp/notify.py b/homeassistant/components/xmpp/notify.py index 4da1bf35d1a..c73248f2524 100644 --- a/homeassistant/components/xmpp/notify.py +++ b/homeassistant/components/xmpp/notify.py @@ -24,7 +24,7 @@ import voluptuous as vol from homeassistant.components.notify import ( ATTR_TITLE, ATTR_TITLE_DEFAULT, - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA, BaseNotificationService, ) from homeassistant.const import ( @@ -56,7 +56,7 @@ DEFAULT_CONTENT_TYPE = "application/octet-stream" DEFAULT_RESOURCE = "home-assistant" XEP_0363_TIMEOUT = 10 -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend( { vol.Required(CONF_SENDER): cv.string, vol.Required(CONF_PASSWORD): cv.string, @@ -305,16 +305,20 @@ async def async_send_message( # noqa: C901 timeout=timeout, ) - async def upload_file_from_path(self, path, timeout=None): + def _read_upload_file(self, path: str) -> bytes: + """Read file from path.""" + with open(path, "rb") as upfile: + _LOGGER.debug("Reading file %s", path) + return upfile.read() + + async def upload_file_from_path(self, path: str, timeout=None): """Upload a file from a local file path via XEP_0363.""" _LOGGER.info("Uploading file from path, %s", path) if not hass.config.is_allowed_path(path): raise PermissionError("Could not access file. Path not allowed") - with open(path, "rb") as upfile: - _LOGGER.debug("Reading file %s", path) - input_file = upfile.read() + input_file = await hass.async_add_executor_job(self._read_upload_file, path) filesize = len(input_file) _LOGGER.debug("Filesize is %s bytes", filesize) diff --git a/homeassistant/components/yale_smart_alarm/__init__.py b/homeassistant/components/yale_smart_alarm/__init__.py index 1ef68d98a13..3c853afb6fd 100644 --- a/homeassistant/components/yale_smart_alarm/__init__.py +++ b/homeassistant/components/yale_smart_alarm/__init__.py @@ -6,7 +6,6 @@ from homeassistant.components.lock import CONF_DEFAULT_CODE, DOMAIN as LOCK_DOMA from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_CODE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import entity_registry as er from .const import LOGGER, PLATFORMS @@ -19,9 +18,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool """Set up Yale from a config entry.""" coordinator = YaleDataUpdateCoordinator(hass, entry) - if not await hass.async_add_executor_job(coordinator.get_updates): - raise ConfigEntryAuthFailed - await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index 5307e166e17..328558d0aba 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -20,10 +20,11 @@ from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER, YALE_BASE_ERRORS class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """A Yale Data Update Coordinator.""" + yale: YaleSmartAlarmClient + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: """Initialize the Yale hub.""" self.entry = entry - self.yale: YaleSmartAlarmClient | None = None super().__init__( hass, LOGGER, @@ -32,6 +33,17 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): always_update=False, ) + async def _async_setup(self) -> None: + """Set up connection to Yale.""" + try: + self.yale = YaleSmartAlarmClient( + self.entry.data[CONF_USERNAME], self.entry.data[CONF_PASSWORD] + ) + except AuthenticationError as error: + raise ConfigEntryAuthFailed from error + except YALE_BASE_ERRORS as error: + raise UpdateFailed from error + async def _async_update_data(self) -> dict[str, Any]: """Fetch data from Yale.""" @@ -132,17 +144,6 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): def get_updates(self) -> dict[str, Any]: """Fetch data from Yale.""" - - if self.yale is None: - try: - self.yale = YaleSmartAlarmClient( - self.entry.data[CONF_USERNAME], self.entry.data[CONF_PASSWORD] - ) - except AuthenticationError as error: - raise ConfigEntryAuthFailed from error - except YALE_BASE_ERRORS as error: - raise UpdateFailed from error - try: arm_status = self.yale.get_armed_status() data = self.yale.get_all() diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 0cf142b63b5..293ba87df86 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.4.2"] + "requirements": ["yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/yamaha/const.py b/homeassistant/components/yamaha/const.py index c0f4e34dd50..1cdb619b6ef 100644 --- a/homeassistant/components/yamaha/const.py +++ b/homeassistant/components/yamaha/const.py @@ -1,6 +1,8 @@ """Constants for the Yamaha component.""" DOMAIN = "yamaha" +DISCOVER_TIMEOUT = 3 +KNOWN_ZONES = "known_zones" CURSOR_TYPE_DOWN = "down" CURSOR_TYPE_LEFT = "left" CURSOR_TYPE_RETURN = "return" diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index c648994c38d..58f501b99be 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -2,6 +2,7 @@ from __future__ import annotations +import contextlib import logging from typing import Any @@ -10,7 +11,7 @@ import rxv import voluptuous as vol from homeassistant.components.media_player import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, @@ -18,6 +19,7 @@ from homeassistant.components.media_player import ( ) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant +from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -29,6 +31,9 @@ from .const import ( CURSOR_TYPE_RIGHT, CURSOR_TYPE_SELECT, CURSOR_TYPE_UP, + DISCOVER_TIMEOUT, + DOMAIN, + KNOWN_ZONES, SERVICE_ENABLE_OUTPUT, SERVICE_MENU_CURSOR, SERVICE_SELECT_SCENE, @@ -55,7 +60,6 @@ CURSOR_TYPE_MAP = { CURSOR_TYPE_SELECT: rxv.RXV.menu_sel.__name__, CURSOR_TYPE_UP: rxv.RXV.menu_up.__name__, } -DATA_YAMAHA = "yamaha_known_receivers" DEFAULT_NAME = "Yamaha Receiver" SUPPORT_YAMAHA = ( @@ -68,7 +72,7 @@ SUPPORT_YAMAHA = ( | MediaPlayerEntityFeature.SELECT_SOUND_MODE ) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_HOST): cv.string, @@ -99,6 +103,7 @@ class YamahaConfigInfo: self.zone_ignore = config.get(CONF_ZONE_IGNORE) self.zone_names = config.get(CONF_ZONE_NAMES) self.from_discovery = False + _LOGGER.debug("Discovery Info: %s", discovery_info) if discovery_info is not None: self.name = discovery_info.get("name") self.model = discovery_info.get("model_name") @@ -109,23 +114,53 @@ class YamahaConfigInfo: def _discovery(config_info): - """Discover receivers from configuration in the network.""" + """Discover list of zone controllers from configuration in the network.""" if config_info.from_discovery: - receivers = rxv.RXV( + _LOGGER.debug("Discovery Zones") + zones = rxv.RXV( config_info.ctrl_url, model_name=config_info.model, friendly_name=config_info.name, unit_desc_url=config_info.desc_url, ).zone_controllers() - _LOGGER.debug("Receivers: %s", receivers) elif config_info.host is None: - receivers = [] - for recv in rxv.find(): - receivers.extend(recv.zone_controllers()) + _LOGGER.debug("Config No Host Supplied Zones") + zones = [] + for recv in rxv.find(DISCOVER_TIMEOUT): + zones.extend(recv.zone_controllers()) else: - receivers = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() + _LOGGER.debug("Config Zones") + zones = None - return receivers + # Fix for upstream issues in rxv.find() with some hardware. + with contextlib.suppress(AttributeError, ValueError): + for recv in rxv.find(DISCOVER_TIMEOUT): + _LOGGER.debug( + "Found Serial %s %s %s", + recv.serial_number, + recv.ctrl_url, + recv.zone, + ) + if recv.ctrl_url == config_info.ctrl_url: + _LOGGER.debug( + "Config Zones Matched Serial %s: %s", + recv.ctrl_url, + recv.serial_number, + ) + zones = rxv.RXV( + config_info.ctrl_url, + friendly_name=config_info.name, + serial_number=recv.serial_number, + model_name=recv.model_name, + ).zone_controllers() + break + + if not zones: + _LOGGER.debug("Config Zones Fallback") + zones = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() + + _LOGGER.debug("Returned _discover zones: %s", zones) + return zones async def async_setup_platform( @@ -138,21 +173,27 @@ async def async_setup_platform( # Keep track of configured receivers so that we don't end up # discovering a receiver dynamically that we have static config # for. Map each device from its zone_id . - known_zones = hass.data.setdefault(DATA_YAMAHA, set()) + known_zones = hass.data.setdefault(DOMAIN, {KNOWN_ZONES: set()})[KNOWN_ZONES] + _LOGGER.debug("Known receiver zones: %s", known_zones) # Get the Infos for configuration from config (YAML) or Discovery config_info = YamahaConfigInfo(config=config, discovery_info=discovery_info) # Async check if the Receivers are there in the network - receivers = await hass.async_add_executor_job(_discovery, config_info) + try: + zone_ctrls = await hass.async_add_executor_job(_discovery, config_info) + except requests.exceptions.ConnectionError as ex: + raise PlatformNotReady(f"Issue while connecting to {config_info.name}") from ex entities = [] - for receiver in receivers: - if config_info.zone_ignore and receiver.zone in config_info.zone_ignore: + for zctrl in zone_ctrls: + _LOGGER.debug("Receiver zone: %s serial %s", zctrl.zone, zctrl.serial_number) + if config_info.zone_ignore and zctrl.zone in config_info.zone_ignore: + _LOGGER.debug("Ignore receiver zone: %s %s", config_info.name, zctrl.zone) continue - entity = YamahaDevice( + entity = YamahaDeviceZone( config_info.name, - receiver, + zctrl, config_info.source_ignore, config_info.source_names, config_info.zone_names, @@ -163,7 +204,9 @@ async def async_setup_platform( known_zones.add(entity.zone_id) entities.append(entity) else: - _LOGGER.debug("Ignoring duplicate receiver: %s", config_info.name) + _LOGGER.debug( + "Ignoring duplicate zone: %s %s", config_info.name, zctrl.zone + ) async_add_entities(entities) @@ -184,16 +227,16 @@ async def async_setup_platform( platform.async_register_entity_service( SERVICE_MENU_CURSOR, {vol.Required(ATTR_CURSOR): vol.In(CURSOR_TYPE_MAP)}, - YamahaDevice.menu_cursor.__name__, + YamahaDeviceZone.menu_cursor.__name__, ) -class YamahaDevice(MediaPlayerEntity): - """Representation of a Yamaha device.""" +class YamahaDeviceZone(MediaPlayerEntity): + """Representation of a Yamaha device zone.""" - def __init__(self, name, receiver, source_ignore, source_names, zone_names): + def __init__(self, name, zctrl, source_ignore, source_names, zone_names): """Initialize the Yamaha Receiver.""" - self.receiver = receiver + self.zctrl = zctrl self._attr_is_volume_muted = False self._attr_volume_level = 0 self._attr_state = MediaPlayerState.OFF @@ -205,24 +248,24 @@ class YamahaDevice(MediaPlayerEntity): self._is_playback_supported = False self._play_status = None self._name = name - self._zone = receiver.zone - if self.receiver.serial_number is not None: + self._zone = zctrl.zone + if self.zctrl.serial_number is not None: # Since not all receivers will have a serial number and set a unique id # the default name of the integration may not be changed # to avoid a breaking change. - self._attr_unique_id = f"{self.receiver.serial_number}_{self._zone}" + self._attr_unique_id = f"{self.zctrl.serial_number}_{self._zone}" def update(self) -> None: """Get the latest details from the device.""" try: - self._play_status = self.receiver.play_status() + self._play_status = self.zctrl.play_status() except requests.exceptions.ConnectionError: - _LOGGER.info("Receiver is offline: %s", self._name) + _LOGGER.debug("Receiver is offline: %s", self._name) self._attr_available = False return self._attr_available = True - if self.receiver.on: + if self.zctrl.on: if self._play_status is None: self._attr_state = MediaPlayerState.ON elif self._play_status.playing: @@ -232,21 +275,21 @@ class YamahaDevice(MediaPlayerEntity): else: self._attr_state = MediaPlayerState.OFF - self._attr_is_volume_muted = self.receiver.mute - self._attr_volume_level = (self.receiver.volume / 100) + 1 + self._attr_is_volume_muted = self.zctrl.mute + self._attr_volume_level = (self.zctrl.volume / 100) + 1 if self.source_list is None: self.build_source_list() - current_source = self.receiver.input + current_source = self.zctrl.input self._attr_source = self._source_names.get(current_source, current_source) - self._playback_support = self.receiver.get_playback_support() - self._is_playback_supported = self.receiver.is_playback_supported( + self._playback_support = self.zctrl.get_playback_support() + self._is_playback_supported = self.zctrl.is_playback_supported( self._attr_source ) - surround_programs = self.receiver.surround_programs() + surround_programs = self.zctrl.surround_programs() if surround_programs: - self._attr_sound_mode = self.receiver.surround_program + self._attr_sound_mode = self.zctrl.surround_program self._attr_sound_mode_list = surround_programs else: self._attr_sound_mode = None @@ -260,7 +303,7 @@ class YamahaDevice(MediaPlayerEntity): self._attr_source_list = sorted( self._source_names.get(source, source) - for source in self.receiver.inputs() + for source in self.zctrl.inputs() if source not in self._source_ignore ) @@ -277,7 +320,7 @@ class YamahaDevice(MediaPlayerEntity): @property def zone_id(self): """Return a zone_id to ensure 1 media player per zone.""" - return f"{self.receiver.ctrl_url}:{self._zone}" + return f"{self.zctrl.ctrl_url}:{self._zone}" @property def supported_features(self) -> MediaPlayerEntityFeature: @@ -301,42 +344,42 @@ class YamahaDevice(MediaPlayerEntity): def turn_off(self) -> None: """Turn off media player.""" - self.receiver.on = False + self.zctrl.on = False def set_volume_level(self, volume: float) -> None: """Set volume level, range 0..1.""" - receiver_vol = 100 - (volume * 100) - negative_receiver_vol = -receiver_vol - self.receiver.volume = negative_receiver_vol + zone_vol = 100 - (volume * 100) + negative_zone_vol = -zone_vol + self.zctrl.volume = negative_zone_vol def mute_volume(self, mute: bool) -> None: """Mute (true) or unmute (false) media player.""" - self.receiver.mute = mute + self.zctrl.mute = mute def turn_on(self) -> None: """Turn the media player on.""" - self.receiver.on = True - self._attr_volume_level = (self.receiver.volume / 100) + 1 + self.zctrl.on = True + self._attr_volume_level = (self.zctrl.volume / 100) + 1 def media_play(self) -> None: """Send play command.""" - self._call_playback_function(self.receiver.play, "play") + self._call_playback_function(self.zctrl.play, "play") def media_pause(self) -> None: """Send pause command.""" - self._call_playback_function(self.receiver.pause, "pause") + self._call_playback_function(self.zctrl.pause, "pause") def media_stop(self) -> None: """Send stop command.""" - self._call_playback_function(self.receiver.stop, "stop") + self._call_playback_function(self.zctrl.stop, "stop") def media_previous_track(self) -> None: """Send previous track command.""" - self._call_playback_function(self.receiver.previous, "previous track") + self._call_playback_function(self.zctrl.previous, "previous track") def media_next_track(self) -> None: """Send next track command.""" - self._call_playback_function(self.receiver.next, "next track") + self._call_playback_function(self.zctrl.next, "next track") def _call_playback_function(self, function, function_text): try: @@ -346,7 +389,7 @@ class YamahaDevice(MediaPlayerEntity): def select_source(self, source: str) -> None: """Select input source.""" - self.receiver.input = self._reverse_mapping.get(source, source) + self.zctrl.input = self._reverse_mapping.get(source, source) def play_media( self, media_type: MediaType | str, media_id: str, **kwargs: Any @@ -370,41 +413,43 @@ class YamahaDevice(MediaPlayerEntity): menu must be fetched by the receiver from the vtuner service. """ if media_type == "NET RADIO": - self.receiver.net_radio(media_id) + self.zctrl.net_radio(media_id) def enable_output(self, port, enabled): """Enable or disable an output port..""" - self.receiver.enable_output(port, enabled) + self.zctrl.enable_output(port, enabled) def menu_cursor(self, cursor): """Press a menu cursor button.""" - getattr(self.receiver, CURSOR_TYPE_MAP[cursor])() + getattr(self.zctrl, CURSOR_TYPE_MAP[cursor])() def set_scene(self, scene): """Set the current scene.""" try: - self.receiver.scene = scene + self.zctrl.scene = scene except AssertionError: _LOGGER.warning("Scene '%s' does not exist!", scene) def select_sound_mode(self, sound_mode: str) -> None: """Set Sound Mode for Receiver..""" - self.receiver.surround_program = sound_mode + self.zctrl.surround_program = sound_mode @property - def media_artist(self): + def media_artist(self) -> str | None: """Artist of current playing media.""" if self._play_status is not None: return self._play_status.artist + return None @property - def media_album_name(self): + def media_album_name(self) -> str | None: """Album of current playing media.""" if self._play_status is not None: return self._play_status.album + return None @property - def media_content_type(self): + def media_content_type(self) -> MediaType | None: """Content type of current playing media.""" # Loose assumption that if playback is supported, we are playing music if self._is_playback_supported: @@ -412,7 +457,7 @@ class YamahaDevice(MediaPlayerEntity): return None @property - def media_title(self): + def media_title(self) -> str | None: """Artist of current playing media.""" if self._play_status is not None: song = self._play_status.song @@ -424,3 +469,4 @@ class YamahaDevice(MediaPlayerEntity): return f"{station}: {song}" return song or station + return None diff --git a/homeassistant/components/yandex_transport/manifest.json b/homeassistant/components/yandex_transport/manifest.json index 703f81d2823..1d1219d5a95 100644 --- a/homeassistant/components/yandex_transport/manifest.json +++ b/homeassistant/components/yandex_transport/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@rishatik92", "@devbis"], "documentation": "https://www.home-assistant.io/integrations/yandex_transport", "iot_class": "cloud_polling", - "requirements": ["aioymaps==1.2.2"] + "requirements": ["aioymaps==1.2.5"] } diff --git a/homeassistant/components/yandex_transport/sensor.py b/homeassistant/components/yandex_transport/sensor.py index bcef8248aa3..95c4785a341 100644 --- a/homeassistant/components/yandex_transport/sensor.py +++ b/homeassistant/components/yandex_transport/sensor.py @@ -5,11 +5,11 @@ from __future__ import annotations from datetime import timedelta import logging -from aioymaps import CaptchaError, YandexMapsRequester +from aioymaps import CaptchaError, NoSessionError, YandexMapsRequester import voluptuous as vol from homeassistant.components.sensor import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, ) @@ -34,7 +34,7 @@ DEFAULT_NAME = "Yandex Transport" SCAN_INTERVAL = timedelta(minutes=1) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_STOP_ID): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, @@ -88,7 +88,7 @@ class DiscoverYandexTransport(SensorEntity): closer_time = None try: yandex_reply = await self.requester.get_stop_info(self._stop_id) - except CaptchaError as ex: + except (CaptchaError, NoSessionError) as ex: _LOGGER.error( "%s. You may need to disable the integration for some time", ex, diff --git a/homeassistant/components/yandextts/tts.py b/homeassistant/components/yandextts/tts.py index 1a5fc4a7903..850afd05150 100644 --- a/homeassistant/components/yandextts/tts.py +++ b/homeassistant/components/yandextts/tts.py @@ -7,7 +7,11 @@ import logging import aiohttp import voluptuous as vol -from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider +from homeassistant.components.tts import ( + CONF_LANG, + PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA, + Provider, +) from homeassistant.const import CONF_API_KEY from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv @@ -64,7 +68,7 @@ DEFAULT_VOICE = "zahar" DEFAULT_EMOTION = "neutral" DEFAULT_SPEED = 1 -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = TTS_PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES), diff --git a/homeassistant/components/yardian/switch.py b/homeassistant/components/yardian/switch.py index 549331b6b5f..910bacc1c2e 100644 --- a/homeassistant/components/yardian/switch.py +++ b/homeassistant/components/yardian/switch.py @@ -11,13 +11,14 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import VolDictType from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DEFAULT_WATERING_DURATION, DOMAIN from .coordinator import YardianUpdateCoordinator SERVICE_START_IRRIGATION = "start_irrigation" -SERVICE_SCHEMA_START_IRRIGATION = { +SERVICE_SCHEMA_START_IRRIGATION: VolDictType = { vol.Required("duration"): cv.positive_int, } diff --git a/homeassistant/components/yeelight/__init__.py b/homeassistant/components/yeelight/__init__.py index 0ed75318ac7..9b71bbc3b16 100644 --- a/homeassistant/components/yeelight/__init__.py +++ b/homeassistant/components/yeelight/__init__.py @@ -20,7 +20,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers.typing import ConfigType, VolDictType from .const import ( ACTION_OFF, @@ -59,7 +59,7 @@ from .scanner import YeelightScanner _LOGGER = logging.getLogger(__name__) -YEELIGHT_FLOW_TRANSITION_SCHEMA = { +YEELIGHT_FLOW_TRANSITION_SCHEMA: VolDictType = { vol.Optional(ATTR_COUNT, default=0): cv.positive_int, vol.Optional(ATTR_ACTION, default=ACTION_RECOVER): vol.Any( ACTION_RECOVER, ACTION_OFF, ACTION_STAY diff --git a/homeassistant/components/yeelight/light.py b/homeassistant/components/yeelight/light.py index 1d514c131d2..d0d53510859 100644 --- a/homeassistant/components/yeelight/light.py +++ b/homeassistant/components/yeelight/light.py @@ -38,6 +38,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.typing import VolDictType import homeassistant.util.color as color_util from homeassistant.util.color import ( color_temperature_kelvin_to_mired as kelvin_to_mired, @@ -170,22 +171,22 @@ EFFECTS_MAP = { VALID_BRIGHTNESS = vol.All(vol.Coerce(int), vol.Range(min=1, max=100)) -SERVICE_SCHEMA_SET_MODE = { +SERVICE_SCHEMA_SET_MODE: VolDictType = { vol.Required(ATTR_MODE): vol.In([mode.name.lower() for mode in PowerMode]) } -SERVICE_SCHEMA_SET_MUSIC_MODE = {vol.Required(ATTR_MODE_MUSIC): cv.boolean} +SERVICE_SCHEMA_SET_MUSIC_MODE: VolDictType = {vol.Required(ATTR_MODE_MUSIC): cv.boolean} SERVICE_SCHEMA_START_FLOW = YEELIGHT_FLOW_TRANSITION_SCHEMA -SERVICE_SCHEMA_SET_COLOR_SCENE = { +SERVICE_SCHEMA_SET_COLOR_SCENE: VolDictType = { vol.Required(ATTR_RGB_COLOR): vol.All( vol.Coerce(tuple), vol.ExactSequence((cv.byte, cv.byte, cv.byte)) ), vol.Required(ATTR_BRIGHTNESS): VALID_BRIGHTNESS, } -SERVICE_SCHEMA_SET_HSV_SCENE = { +SERVICE_SCHEMA_SET_HSV_SCENE: VolDictType = { vol.Required(ATTR_HS_COLOR): vol.All( vol.Coerce(tuple), vol.ExactSequence( @@ -198,14 +199,14 @@ SERVICE_SCHEMA_SET_HSV_SCENE = { vol.Required(ATTR_BRIGHTNESS): VALID_BRIGHTNESS, } -SERVICE_SCHEMA_SET_COLOR_TEMP_SCENE = { +SERVICE_SCHEMA_SET_COLOR_TEMP_SCENE: VolDictType = { vol.Required(ATTR_KELVIN): vol.All(vol.Coerce(int), vol.Range(min=1700, max=6500)), vol.Required(ATTR_BRIGHTNESS): VALID_BRIGHTNESS, } SERVICE_SCHEMA_SET_COLOR_FLOW_SCENE = YEELIGHT_FLOW_TRANSITION_SCHEMA -SERVICE_SCHEMA_SET_AUTO_DELAY_OFF_SCENE = { +SERVICE_SCHEMA_SET_AUTO_DELAY_OFF_SCENE: VolDictType = { vol.Required(ATTR_MINUTES): vol.All(vol.Coerce(int), vol.Range(min=1, max=60)), vol.Required(ATTR_BRIGHTNESS): VALID_BRIGHTNESS, } diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index e9f304d38cb..efb08e26b5a 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -17,7 +17,7 @@ "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], "quality_scale": "platinum", - "requirements": ["yeelight==0.7.14", "async-upnp-client==0.38.3"], + "requirements": ["yeelight==0.7.14", "async-upnp-client==0.40.0"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/components/yeelight/scanner.py b/homeassistant/components/yeelight/scanner.py index 6ca12e9bd01..ac482504880 100644 --- a/homeassistant/components/yeelight/scanner.py +++ b/homeassistant/components/yeelight/scanner.py @@ -67,7 +67,8 @@ class YeelightScanner: async def async_setup(self) -> None: """Set up the scanner.""" if self._setup_future is not None: - return await self._setup_future + await self._setup_future + return self._setup_future = self._hass.loop.create_future() connected_futures: list[asyncio.Future[None]] = [] diff --git a/homeassistant/components/yeelightsunflower/light.py b/homeassistant/components/yeelightsunflower/light.py index 45b662846d5..0d8247fc865 100644 --- a/homeassistant/components/yeelightsunflower/light.py +++ b/homeassistant/components/yeelightsunflower/light.py @@ -11,7 +11,7 @@ import yeelightsunflower from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, ColorMode, LightEntity, ) @@ -24,7 +24,7 @@ import homeassistant.util.color as color_util _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) +PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) def setup_platform( diff --git a/homeassistant/components/yi/camera.py b/homeassistant/components/yi/camera.py index f512d31cb6b..b2fac03954d 100644 --- a/homeassistant/components/yi/camera.py +++ b/homeassistant/components/yi/camera.py @@ -9,7 +9,10 @@ from haffmpeg.camera import CameraMjpeg import voluptuous as vol from homeassistant.components import ffmpeg -from homeassistant.components.camera import PLATFORM_SCHEMA, Camera +from homeassistant.components.camera import ( + PLATFORM_SCHEMA as CAMERA_PLATFORM_SCHEMA, + Camera, +) from homeassistant.components.ffmpeg import get_ffmpeg_manager from homeassistant.const import ( CONF_HOST, @@ -37,7 +40,7 @@ DEFAULT_ARGUMENTS = "-pred 1" CONF_FFMPEG_ARGUMENTS = "ffmpeg_arguments" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = CAMERA_PLATFORM_SCHEMA.extend( { vol.Required(CONF_NAME): cv.string, vol.Required(CONF_HOST): cv.string, diff --git a/homeassistant/components/yolink/climate.py b/homeassistant/components/yolink/climate.py index 21e0a71ebcb..98f1b764498 100644 --- a/homeassistant/components/yolink/climate.py +++ b/homeassistant/components/yolink/climate.py @@ -77,6 +77,7 @@ class YoLinkClimateEntity(YoLinkEntity, ClimateEntity): self._attr_fan_modes = [FAN_ON, FAN_AUTO] self._attr_min_temp = -10 self._attr_max_temp = 50 + self._attr_hvac_mode = None self._attr_hvac_modes = [ HVACMode.COOL, HVACMode.HEAT, diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index e829fe08d32..686160d9248 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -17,3 +17,9 @@ YOLINK_OFFLINE_TIME = 32400 DEV_MODEL_WATER_METER_YS5007 = "YS5007" DEV_MODEL_MULTI_OUTLET_YS6801 = "YS6801" +DEV_MODEL_TH_SENSOR_YS8004_UC = "YS8004-UC" +DEV_MODEL_TH_SENSOR_YS8004_EC = "YS8004-EC" +DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" +DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" +DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" +DEV_MODEL_TH_SENSOR_YS8017_EC = "YS8017-EC" diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index 5353d5d5b8c..78b553d7978 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.4"] + "requirements": ["yolink-api==0.4.7"] } diff --git a/homeassistant/components/yolink/sensor.py b/homeassistant/components/yolink/sensor.py index 6badeefbdb3..77bbccb2f6a 100644 --- a/homeassistant/components/yolink/sensor.py +++ b/homeassistant/components/yolink/sensor.py @@ -48,7 +48,15 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import percentage -from .const import DOMAIN +from .const import ( + DEV_MODEL_TH_SENSOR_YS8004_EC, + DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8014_EC, + DEV_MODEL_TH_SENSOR_YS8014_UC, + DEV_MODEL_TH_SENSOR_YS8017_EC, + DEV_MODEL_TH_SENSOR_YS8017_UC, + DOMAIN, +) from .coordinator import YoLinkCoordinator from .entity import YoLinkEntity @@ -108,6 +116,15 @@ MCU_DEV_TEMPERATURE_SENSOR = [ ATTR_DEVICE_CO_SMOKE_SENSOR, ] +NONE_HUMIDITY_SENSOR_MODELS = [ + DEV_MODEL_TH_SENSOR_YS8004_EC, + DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8014_EC, + DEV_MODEL_TH_SENSOR_YS8014_UC, + DEV_MODEL_TH_SENSOR_YS8017_UC, + DEV_MODEL_TH_SENSOR_YS8017_EC, +] + def cvt_battery(val: int | None) -> int | None: """Convert battery to percentage.""" @@ -141,7 +158,8 @@ SENSOR_TYPES: tuple[YoLinkSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.HUMIDITY, native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, - exists_fn=lambda device: device.device_type in [ATTR_DEVICE_TH_SENSOR], + exists_fn=lambda device: device.device_type in [ATTR_DEVICE_TH_SENSOR] + and device.device_model_name not in NONE_HUMIDITY_SENSOR_MODELS, ), YoLinkSensorEntityDescription( key="temperature", diff --git a/homeassistant/components/yolink/valve.py b/homeassistant/components/yolink/valve.py index a24ad7d385d..d8c199697c3 100644 --- a/homeassistant/components/yolink/valve.py +++ b/homeassistant/components/yolink/valve.py @@ -37,7 +37,7 @@ DEVICE_TYPES: tuple[YoLinkValveEntityDescription, ...] = ( key="valve_state", translation_key="meter_valve_state", device_class=ValveDeviceClass.WATER, - value=lambda value: value == "closed" if value is not None else None, + value=lambda value: value != "open" if value is not None else None, exists_fn=lambda device: device.device_type == ATTR_DEVICE_WATER_METER_CONTROLLER and not device.device_model_name.startswith(DEV_MODEL_WATER_METER_YS5007), diff --git a/homeassistant/components/youless/manifest.json b/homeassistant/components/youless/manifest.json index 9a81de38388..1ccc8cda0ff 100644 --- a/homeassistant/components/youless/manifest.json +++ b/homeassistant/components/youless/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/youless", "iot_class": "local_polling", "loggers": ["youless_api"], - "requirements": ["youless-api==2.1.0"] + "requirements": ["youless-api==2.1.2"] } diff --git a/homeassistant/components/zabbix/sensor.py b/homeassistant/components/zabbix/sensor.py index 4c6af57f780..2187deb22e8 100644 --- a/homeassistant/components/zabbix/sensor.py +++ b/homeassistant/components/zabbix/sensor.py @@ -9,7 +9,10 @@ from typing import Any from pyzabbix import ZabbixAPI import voluptuous as vol -from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity +from homeassistant.components.sensor import ( + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorEntity, +) from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv @@ -35,7 +38,7 @@ _ZABBIX_TRIGGER_SCHEMA = vol.Schema( # SCAN_INTERVAL = 30 # -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( {vol.Required(_CONF_TRIGGERS): vol.Any(_ZABBIX_TRIGGER_SCHEMA, None)} ) diff --git a/homeassistant/components/zengge/light.py b/homeassistant/components/zengge/light.py index 6657bfb9edd..69b7c63476a 100644 --- a/homeassistant/components/zengge/light.py +++ b/homeassistant/components/zengge/light.py @@ -12,7 +12,7 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE, - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, ColorMode, LightEntity, ) @@ -27,7 +27,7 @@ _LOGGER = logging.getLogger(__name__) DEVICE_SCHEMA = vol.Schema({vol.Optional(CONF_NAME): cv.string}) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend( {vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA}} ) diff --git a/homeassistant/components/zestimate/sensor.py b/homeassistant/components/zestimate/sensor.py index 8bbda7de73a..12831c96932 100644 --- a/homeassistant/components/zestimate/sensor.py +++ b/homeassistant/components/zestimate/sensor.py @@ -9,7 +9,10 @@ import requests import voluptuous as vol import xmltodict -from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity +from homeassistant.components.sensor import ( + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorEntity, +) from homeassistant.const import CONF_API_KEY, CONF_NAME from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv @@ -33,7 +36,7 @@ ATTR_LAST_UPDATED = "amount_last_updated" ATTR_VAL_HI = "valuation_range_high" ATTR_VAL_LOW = "valuation_range_low" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_ZPID): vol.All(cv.ensure_list, [cv.string]), diff --git a/homeassistant/components/zeversolar/diagnostics.py b/homeassistant/components/zeversolar/diagnostics.py index b8901a7e793..6e6ed262f51 100644 --- a/homeassistant/components/zeversolar/diagnostics.py +++ b/homeassistant/components/zeversolar/diagnostics.py @@ -31,6 +31,7 @@ async def async_get_config_entry_diagnostics( "num_inverters": data.num_inverters, "serial_number": data.serial_number, "pac": data.pac, + "energy_today": data.energy_today, "status": data.status.value, "meter_status": data.meter_status.value, } diff --git a/homeassistant/components/zha/__init__.py b/homeassistant/components/zha/__init__.py index ed74cde47e1..1897b741d87 100644 --- a/homeassistant/components/zha/__init__.py +++ b/homeassistant/components/zha/__init__.py @@ -1,18 +1,25 @@ """Support for Zigbee Home Automation devices.""" import contextlib -import copy import logging -import re +from zoneinfo import ZoneInfo import voluptuous as vol -from zhaquirks import setup as setup_quirks +from zha.application.const import BAUD_RATES, RadioType +from zha.application.gateway import Gateway +from zha.application.helpers import ZHAData +from zha.zigbee.device import get_device_automation_triggers from zigpy.config import CONF_DATABASE, CONF_DEVICE, CONF_DEVICE_PATH from zigpy.exceptions import NetworkSettingsInconsistent, TransientConnectionError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_TYPE, EVENT_HOMEASSISTANT_STOP -from homeassistant.core import Event, HomeAssistant +from homeassistant.const import ( + CONF_TYPE, + EVENT_CORE_CONFIG_UPDATE, + EVENT_HOMEASSISTANT_STOP, + Platform, +) +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv @@ -20,9 +27,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType from . import repairs, websocket_api -from .core import ZHAGateway -from .core.const import ( - BAUD_RATES, +from .const import ( CONF_BAUDRATE, CONF_CUSTOM_QUIRKS_PATH, CONF_DEVICE_CONFIG, @@ -33,13 +38,14 @@ from .core.const import ( CONF_ZIGPY, DATA_ZHA, DOMAIN, - PLATFORMS, - SIGNAL_ADD_ENTITIES, - RadioType, ) -from .core.device import get_device_automation_triggers -from .core.discovery import GROUP_PROBE -from .core.helpers import ZHAData, get_zha_data +from .helpers import ( + SIGNAL_ADD_ENTITIES, + HAZHAData, + ZHAGatewayProxy, + create_zha_config, + get_zha_data, +) from .radio_manager import ZhaRadioManager from .repairs.network_settings_inconsistent import warn_on_inconsistent_network_settings from .repairs.wrong_silabs_firmware import ( @@ -74,6 +80,25 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) +PLATFORMS = ( + Platform.ALARM_CONTROL_PANEL, + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CLIMATE, + Platform.COVER, + Platform.DEVICE_TRACKER, + Platform.FAN, + Platform.LIGHT, + Platform.LOCK, + Platform.NUMBER, + Platform.SELECT, + Platform.SENSOR, + Platform.SIREN, + Platform.SWITCH, + Platform.UPDATE, +) + + # Zigbee definitions CENTICELSIUS = "C-100" @@ -83,49 +108,22 @@ _LOGGER = logging.getLogger(__name__) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up ZHA from config.""" - zha_data = ZHAData() - zha_data.yaml_config = config.get(DOMAIN, {}) - hass.data[DATA_ZHA] = zha_data + ha_zha_data = HAZHAData(yaml_config=config.get(DOMAIN, {})) + hass.data[DATA_ZHA] = ha_zha_data return True -def _clean_serial_port_path(path: str) -> str: - """Clean the serial port path, applying corrections where necessary.""" - - if path.startswith("socket://"): - path = path.strip() - - # Removes extraneous brackets from IP addresses (they don't parse in CPython 3.11.4) - if re.match(r"^socket://\[\d+\.\d+\.\d+\.\d+\]:\d+$", path): - path = path.replace("[", "").replace("]", "") - - return path - - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up ZHA. Will automatically load components to support devices found on the network. """ + ha_zha_data: HAZHAData = get_zha_data(hass) + ha_zha_data.config_entry = config_entry + zha_lib_data: ZHAData = create_zha_config(hass, ha_zha_data) - # Remove brackets around IP addresses, this no longer works in CPython 3.11.4 - # This will be removed in 2023.11.0 - path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] - cleaned_path = _clean_serial_port_path(path) - data = copy.deepcopy(dict(config_entry.data)) - - if path != cleaned_path: - _LOGGER.debug("Cleaned serial port path %r -> %r", path, cleaned_path) - data[CONF_DEVICE][CONF_DEVICE_PATH] = cleaned_path - hass.config_entries.async_update_entry(config_entry, data=data) - - zha_data = get_zha_data(hass) - - if zha_data.yaml_config.get(CONF_ENABLE_QUIRKS, True): - await hass.async_add_import_executor_job( - setup_quirks, zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH) - ) + zha_gateway = await Gateway.async_from_config(zha_lib_data) # Load and cache device trigger information early device_registry = dr.async_get(hass) @@ -141,19 +139,16 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b if dev_entry is None: continue - zha_data.device_trigger_cache[dev_entry.id] = ( + zha_lib_data.device_trigger_cache[dev_entry.id] = ( str(dev.ieee), get_device_automation_triggers(dev), ) + ha_zha_data.device_trigger_cache = zha_lib_data.device_trigger_cache - _LOGGER.debug("Trigger cache: %s", zha_data.device_trigger_cache) + _LOGGER.debug("Trigger cache: %s", zha_lib_data.device_trigger_cache) try: - zha_gateway = await ZHAGateway.async_from_config( - hass=hass, - config=zha_data.yaml_config, - config_entry=config_entry, - ) + await zha_gateway.async_initialize() except NetworkSettingsInconsistent as exc: await warn_on_inconsistent_network_settings( hass, @@ -185,6 +180,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b repairs.async_delete_blocking_issues(hass) + ha_zha_data.gateway_proxy = ZHAGatewayProxy(hass, config_entry, zha_gateway) + manufacturer = zha_gateway.state.node_info.manufacturer model = zha_gateway.state.node_info.model @@ -205,13 +202,24 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b websocket_api.async_load_api(hass) async def async_shutdown(_: Event) -> None: - await zha_gateway.shutdown() + """Handle shutdown tasks.""" + assert ha_zha_data.gateway_proxy is not None + await ha_zha_data.gateway_proxy.shutdown() config_entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown) ) - await zha_gateway.async_initialize_devices_and_entities() + @callback + def update_config(event: Event) -> None: + """Handle Core config update.""" + zha_gateway.config.local_timezone = ZoneInfo(hass.config.time_zone) + + config_entry.async_on_unload( + hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, update_config) + ) + + await ha_zha_data.gateway_proxy.async_initialize_devices_and_entities() await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES) return True @@ -219,11 +227,12 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload ZHA config entry.""" - zha_data = get_zha_data(hass) + ha_zha_data = get_zha_data(hass) + ha_zha_data.config_entry = None - if zha_data.gateway is not None: - await zha_data.gateway.shutdown() - zha_data.gateway = None + if ha_zha_data.gateway_proxy is not None: + await ha_zha_data.gateway_proxy.shutdown() + ha_zha_data.gateway_proxy = None # clean up any remaining entity metadata # (entities that have been discovered but not yet added to HA) @@ -231,15 +240,11 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> # be in when we get here in failure cases with contextlib.suppress(KeyError): for platform in PLATFORMS: - del zha_data.platforms[platform] + del ha_zha_data.platforms[platform] - GROUP_PROBE.cleanup() websocket_api.async_unload_api(hass) - # our components don't have unload methods so no need to look at return values - await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) - - return True + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/zha/alarm_control_panel.py b/homeassistant/components/zha/alarm_control_panel.py index 7750e7f280d..c54d7c7ab2d 100644 --- a/homeassistant/components/zha/alarm_control_panel.py +++ b/homeassistant/components/zha/alarm_control_panel.py @@ -3,9 +3,6 @@ from __future__ import annotations import functools -from typing import TYPE_CHECKING - -from zigpy.zcl.clusters.security import IasAce from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, @@ -13,50 +10,18 @@ from homeassistant.components.alarm_control_panel import ( CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - Platform, -) -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.cluster_handlers.security import ( - SIGNAL_ALARM_TRIGGERED, - SIGNAL_ARMED_STATE_CHANGED, - IasAceClusterHandler, -) -from .core.const import ( - CLUSTER_HANDLER_IAS_ACE, - CONF_ALARM_ARM_REQUIRES_CODE, - CONF_ALARM_FAILED_TRIES, - CONF_ALARM_MASTER_CODE, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - ZHA_ALARM_OPTIONS, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import async_get_zha_config_value, get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.device import ZHADevice - -STRICT_MATCH = functools.partial( - ZHA_ENTITIES.strict_match, Platform.ALARM_CONTROL_PANEL -) - -IAS_ACE_STATE_MAP = { - IasAce.PanelStatus.Panel_Disarmed: STATE_ALARM_DISARMED, - IasAce.PanelStatus.Armed_Stay: STATE_ALARM_ARMED_HOME, - IasAce.PanelStatus.Armed_Night: STATE_ALARM_ARMED_NIGHT, - IasAce.PanelStatus.Armed_Away: STATE_ALARM_ARMED_AWAY, - IasAce.PanelStatus.In_Alarm: STATE_ALARM_TRIGGERED, -} async def async_setup_entry( @@ -72,14 +37,16 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, + async_add_entities, + ZHAAlarmControlPanel, + entities_to_create, ), ) config_entry.async_on_unload(unsub) -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_ACE) -class ZHAAlarmControlPanel(ZhaEntity, AlarmControlPanelEntity): +class ZHAAlarmControlPanel(ZHAEntity, AlarmControlPanelEntity): """Entity for ZHA alarm control devices.""" _attr_translation_key: str = "alarm_control_panel" @@ -91,68 +58,42 @@ class ZHAAlarmControlPanel(ZhaEntity, AlarmControlPanelEntity): | AlarmControlPanelEntityFeature.TRIGGER ) - def __init__( - self, unique_id, zha_device: ZHADevice, cluster_handlers, **kwargs - ) -> None: - """Initialize the ZHA alarm control device.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - cfg_entry = zha_device.gateway.config_entry - self._cluster_handler: IasAceClusterHandler = cluster_handlers[0] - self._cluster_handler.panel_code = async_get_zha_config_value( - cfg_entry, ZHA_ALARM_OPTIONS, CONF_ALARM_MASTER_CODE, "1234" - ) - self._cluster_handler.code_required_arm_actions = async_get_zha_config_value( - cfg_entry, ZHA_ALARM_OPTIONS, CONF_ALARM_ARM_REQUIRES_CODE, False - ) - self._cluster_handler.max_invalid_tries = async_get_zha_config_value( - cfg_entry, ZHA_ALARM_OPTIONS, CONF_ALARM_FAILED_TRIES, 3 - ) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ARMED_STATE_CHANGED, self.async_set_armed_mode - ) - self.async_accept_signal( - self._cluster_handler, SIGNAL_ALARM_TRIGGERED, self.async_alarm_trigger - ) - - @callback - def async_set_armed_mode(self) -> None: - """Set the entity state.""" - self.async_write_ha_state() - @property def code_arm_required(self) -> bool: """Whether the code is required for arm actions.""" - return self._cluster_handler.code_required_arm_actions + return self.entity_data.entity.code_arm_required + @convert_zha_error_to_ha_error async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - self._cluster_handler.arm(IasAce.ArmMode.Disarm, code, 0) + await self.entity_data.entity.async_alarm_disarm(code) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - self._cluster_handler.arm(IasAce.ArmMode.Arm_Day_Home_Only, code, 0) + await self.entity_data.entity.async_alarm_arm_home(code) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - self._cluster_handler.arm(IasAce.ArmMode.Arm_All_Zones, code, 0) + await self.entity_data.entity.async_alarm_arm_away(code) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" - self._cluster_handler.arm(IasAce.ArmMode.Arm_Night_Sleep_Only, code, 0) + await self.entity_data.entity.async_alarm_arm_night(code) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_alarm_trigger(self, code: str | None = None) -> None: """Send alarm trigger command.""" + await self.entity_data.entity.async_alarm_trigger(code) self.async_write_ha_state() @property def state(self) -> str | None: """Return the state of the entity.""" - return IAS_ACE_STATE_MAP.get(self._cluster_handler.armed_state) + return self.entity_data.entity.state["state"] diff --git a/homeassistant/components/zha/api.py b/homeassistant/components/zha/api.py index db0658eb632..60960a3e9fc 100644 --- a/homeassistant/components/zha/api.py +++ b/homeassistant/components/zha/api.py @@ -4,13 +4,14 @@ from __future__ import annotations from typing import TYPE_CHECKING, Literal +from zha.application.const import RadioType from zigpy.backups import NetworkBackup from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH from zigpy.types import Channels from zigpy.util import pick_optimal_channel -from .core.const import CONF_RADIO_TYPE, DOMAIN, RadioType -from .core.helpers import get_zha_gateway +from .const import CONF_RADIO_TYPE, DOMAIN +from .helpers import get_zha_data, get_zha_gateway from .radio_manager import ZhaRadioManager if TYPE_CHECKING: @@ -22,14 +23,12 @@ def _get_config_entry(hass: HomeAssistant) -> ConfigEntry: """Find the singleton ZHA config entry, if one exists.""" # If ZHA is already running, use its config entry - try: - zha_gateway = get_zha_gateway(hass) - except ValueError: - pass - else: - return zha_gateway.config_entry + zha_data = get_zha_data(hass) - # Otherwise, find one + if zha_data.config_entry is not None: + return zha_data.config_entry + + # Otherwise, find an inactive one entries = hass.config_entries.async_entries(DOMAIN) if len(entries) != 1: diff --git a/homeassistant/components/zha/backup.py b/homeassistant/components/zha/backup.py index e31ae09eeb6..a3d9090eaba 100644 --- a/homeassistant/components/zha/backup.py +++ b/homeassistant/components/zha/backup.py @@ -4,7 +4,7 @@ import logging from homeassistant.core import HomeAssistant -from .core.helpers import get_zha_gateway +from .helpers import get_zha_gateway _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zha/binary_sensor.py b/homeassistant/components/zha/binary_sensor.py index bdd2fd03ca0..f45ebf0c5a5 100644 --- a/homeassistant/components/zha/binary_sensor.py +++ b/homeassistant/components/zha/binary_sensor.py @@ -3,58 +3,24 @@ from __future__ import annotations import functools -import logging - -from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT -from zigpy.quirks.v2 import BinarySensorMetadata -import zigpy.types as t -from zigpy.zcl.clusters.general import OnOff -from zigpy.zcl.clusters.security import IasZone from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_ON, EntityCategory, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_ACCELEROMETER, - CLUSTER_HANDLER_BINARY_INPUT, - CLUSTER_HANDLER_HUE_OCCUPANCY, - CLUSTER_HANDLER_OCCUPANCY, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_THERMOSTAT, - CLUSTER_HANDLER_ZONE, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + EntityData, + async_add_entities as zha_async_add_entities, + get_zha_data, ) -from .core.helpers import get_zha_data, validate_device_class -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -# Zigbee Cluster Library Zone Type to Home Assistant device class -IAS_ZONE_CLASS_MAPPING = { - IasZone.ZoneType.Motion_Sensor: BinarySensorDeviceClass.MOTION, - IasZone.ZoneType.Contact_Switch: BinarySensorDeviceClass.OPENING, - IasZone.ZoneType.Fire_Sensor: BinarySensorDeviceClass.SMOKE, - IasZone.ZoneType.Water_Sensor: BinarySensorDeviceClass.MOISTURE, - IasZone.ZoneType.Carbon_Monoxide_Sensor: BinarySensorDeviceClass.GAS, - IasZone.ZoneType.Vibration_Movement_Sensor: BinarySensorDeviceClass.VIBRATION, -} - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.BINARY_SENSOR) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.BINARY_SENSOR) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.BINARY_SENSOR -) - -_LOGGER = logging.getLogger(__name__) async def async_setup_entry( @@ -70,312 +36,24 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, BinarySensor, entities_to_create ), ) config_entry.async_on_unload(unsub) -class BinarySensor(ZhaEntity, BinarySensorEntity): +class BinarySensor(ZHAEntity, BinarySensorEntity): """ZHA BinarySensor.""" - _attribute_name: str - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs) -> None: + def __init__(self, entity_data: EntityData) -> None: """Initialize the ZHA binary sensor.""" - self._cluster_handler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: BinarySensorMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - if entity_metadata.device_class is not None: - self._attr_device_class = validate_device_class( - BinarySensorDeviceClass, - entity_metadata.device_class, - Platform.BINARY_SENSOR.value, - _LOGGER, + super().__init__(entity_data) + if self.entity_data.entity.info_object.device_class is not None: + self._attr_device_class = BinarySensorDeviceClass( + self.entity_data.entity.info_object.device_class ) - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - @property def is_on(self) -> bool: """Return True if the switch is on based on the state machine.""" - raw_state = self._cluster_handler.cluster.get(self._attribute_name) - if raw_state is None: - return False - return self.parse(raw_state) - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Set the state.""" - self.async_write_ha_state() - - @staticmethod - def parse(value: bool | int) -> bool: - """Parse the raw attribute into a bool state.""" - return bool(value) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ACCELEROMETER) -class Accelerometer(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "acceleration" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.MOVING - _attr_translation_key: str = "accelerometer" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY) -class Occupancy(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "occupancy" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OCCUPANCY - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY) -class HueOccupancy(Occupancy): - """ZHA Hue occupancy.""" - - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OCCUPANCY - - -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF) -class Opening(BinarySensor): - """ZHA OnOff BinarySensor.""" - - _attribute_name = "on_off" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OPENING - - # Client/out cluster attributes aren't stored in the zigpy database, but are properly stored in the runtime cache. - # We need to manually restore the last state from the sensor state to the runtime cache for now. - @callback - def async_restore_last_state(self, last_state): - """Restore previous state to zigpy cache.""" - self._cluster_handler.cluster.update_attribute( - OnOff.attributes_by_name[self._attribute_name].id, - t.Bool.true if last_state.state == STATE_ON else t.Bool.false, - ) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_BINARY_INPUT) -class BinaryInput(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "present_value" - _attr_translation_key: str = "binary_input" - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - manufacturers="IKEA of Sweden", - models=lambda model: isinstance(model, str) - and model is not None - and model.find("motion") != -1, -) -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - manufacturers="Philips", - models={"SML001", "SML002"}, -) -class Motion(Opening): - """ZHA OnOff BinarySensor with motion device class.""" - - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.MOTION - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ZONE) -class IASZone(BinarySensor): - """ZHA IAS BinarySensor.""" - - _attribute_name = "zone_status" - - @property - def translation_key(self) -> str | None: - """Return the name of the sensor.""" - zone_type = self._cluster_handler.cluster.get("zone_type") - if zone_type in IAS_ZONE_CLASS_MAPPING: - return None - return "ias_zone" - - @property - def device_class(self) -> BinarySensorDeviceClass | None: - """Return device class from component DEVICE_CLASSES.""" - zone_type = self._cluster_handler.cluster.get("zone_type") - return IAS_ZONE_CLASS_MAPPING.get(zone_type) - - @staticmethod - def parse(value: bool | int) -> bool: - """Parse the raw attribute into a bool state.""" - return BinarySensor.parse(value & 3) # use only bit 0 and 1 for alarm state - - -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ZONE, models={"WL4200", "WL4200S"}) -class SinopeLeakStatus(BinarySensor): - """Sinope water leak sensor.""" - - _attribute_name = "leak_status" - _attr_device_class = BinarySensorDeviceClass.MOISTURE - - -@MULTI_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_htnnfasr", - }, -) -class FrostLock(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "frost_lock" - _unique_id_suffix = "frost_lock" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.LOCK - _attr_translation_key: str = "frost_lock" - - -@MULTI_MATCH(cluster_handler_names="ikea_airpurifier") -class ReplaceFilter(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "replace_filter" - _unique_id_suffix = "replace_filter" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PROBLEM - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - _attr_translation_key: str = "replace_filter" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -class AqaraPetFeederErrorDetected(BinarySensor): - """ZHA aqara pet feeder error detected binary sensor.""" - - _attribute_name = "error_detected" - _unique_id_suffix = "error_detected" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PROBLEM - - -@MULTI_MATCH( - cluster_handler_names="opple_cluster", - models={"lumi.plug.mmeu01", "lumi.plug.maeu01"}, -) -class XiaomiPlugConsumerConnected(BinarySensor): - """ZHA Xiaomi plug consumer connected binary sensor.""" - - _attribute_name = "consumer_connected" - _unique_id_suffix = "consumer_connected" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PLUG - _attr_translation_key: str = "consumer_connected" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"}) -class AqaraThermostatWindowOpen(BinarySensor): - """ZHA Aqara thermostat window open binary sensor.""" - - _attribute_name = "window_open" - _unique_id_suffix = "window_open" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.WINDOW - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"}) -class AqaraThermostatValveAlarm(BinarySensor): - """ZHA Aqara thermostat valve alarm binary sensor.""" - - _attribute_name = "valve_alarm" - _unique_id_suffix = "valve_alarm" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PROBLEM - _attr_translation_key: str = "valve_alarm" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatCalibrated(BinarySensor): - """ZHA Aqara thermostat calibrated binary sensor.""" - - _attribute_name = "calibrated" - _unique_id_suffix = "calibrated" - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - _attr_translation_key: str = "calibrated" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatExternalSensor(BinarySensor): - """ZHA Aqara thermostat external sensor binary sensor.""" - - _attribute_name = "sensor" - _unique_id_suffix = "sensor" - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - _attr_translation_key: str = "external_sensor" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"}) -class AqaraLinkageAlarmState(BinarySensor): - """ZHA Aqara linkage alarm state binary sensor.""" - - _attribute_name = "linkage_alarm_state" - _unique_id_suffix = "linkage_alarm_state" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.SMOKE - _attr_translation_key: str = "linkage_alarm_state" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.curtain.agl001"} -) -class AqaraE1CurtainMotorOpenedByHandBinarySensor(BinarySensor): - """Opened by hand binary sensor.""" - - _unique_id_suffix = "hand_open" - _attribute_name = "hand_open" - _attr_translation_key = "hand_open" - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossMountingModeActive(BinarySensor): - """Danfoss TRV proprietary attribute exposing whether in mounting mode.""" - - _unique_id_suffix = "mounting_mode_active" - _attribute_name = "mounting_mode_active" - _attr_translation_key: str = "mounting_mode_active" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OPENING - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossHeatRequired(BinarySensor): - """Danfoss TRV proprietary attribute exposing whether heat is required.""" - - _unique_id_suffix = "heat_required" - _attribute_name = "heat_required" - _attr_translation_key: str = "heat_required" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossPreheatStatus(BinarySensor): - """Danfoss TRV proprietary attribute exposing whether in pre-heating mode.""" - - _unique_id_suffix = "preheat_status" - _attribute_name = "preheat_status" - _attr_translation_key: str = "preheat_status" - _attr_entity_registry_enabled_default = False - _attr_entity_category = EntityCategory.DIAGNOSTIC + return self.entity_data.entity.is_on diff --git a/homeassistant/components/zha/button.py b/homeassistant/components/zha/button.py index 33102062443..ecd5cd51f61 100644 --- a/homeassistant/components/zha/button.py +++ b/homeassistant/components/zha/button.py @@ -4,33 +4,22 @@ from __future__ import annotations import functools import logging -from typing import TYPE_CHECKING, Any, Self - -from zigpy.quirks.v2 import WriteAttributeButtonMetadata, ZCLCommandButtonMetadata from homeassistant.components.button import ButtonDeviceClass, ButtonEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import CLUSTER_HANDLER_IDENTIFY, ENTITY_METADATA, SIGNAL_ADD_ENTITIES -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - - -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.BUTTON) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.BUTTON +from .entity import ZHAEntity +from .helpers import ( + SIGNAL_ADD_ENTITIES, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -DEFAULT_DURATION = 5 # seconds _LOGGER = logging.getLogger(__name__) @@ -48,172 +37,24 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, ZHAButton, entities_to_create ), ) config_entry.async_on_unload(unsub) -class ZHAButton(ZhaEntity, ButtonEntity): +class ZHAButton(ZHAEntity, ButtonEntity): """Defines a ZHA button.""" - _command_name: str - _args: list[Any] - _kwargs: dict[str, Any] - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this button.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata( - self, entity_metadata: ZCLCommandButtonMetadata - ) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._command_name = entity_metadata.command_name - self._args = entity_metadata.args - self._kwargs = entity_metadata.kwargs - - def get_args(self) -> list[Any]: - """Return the arguments to use in the command.""" - return list(self._args) if self._args else [] - - def get_kwargs(self) -> dict[str, Any]: - """Return the keyword arguments to use in the command.""" - return self._kwargs + def __init__(self, entity_data: EntityData) -> None: + """Initialize the ZHA binary sensor.""" + super().__init__(entity_data) + if self.entity_data.entity.info_object.device_class is not None: + self._attr_device_class = ButtonDeviceClass( + self.entity_data.entity.info_object.device_class + ) + @convert_zha_error_to_ha_error async def async_press(self) -> None: """Send out a update command.""" - command = getattr(self._cluster_handler, self._command_name) - arguments = self.get_args() or [] - kwargs = self.get_kwargs() or {} - await command(*arguments, **kwargs) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_IDENTIFY) -class ZHAIdentifyButton(ZHAButton): - """Defines a ZHA identify button.""" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - if ZHA_ENTITIES.prevent_entity_creation( - Platform.BUTTON, zha_device.ieee, CLUSTER_HANDLER_IDENTIFY - ): - return None - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - _attr_device_class = ButtonDeviceClass.IDENTIFY - _attr_entity_category = EntityCategory.DIAGNOSTIC - _command_name = "identify" - _kwargs = {} - _args = [DEFAULT_DURATION] - - -class ZHAAttributeButton(ZhaEntity, ButtonEntity): - """Defines a ZHA button, which writes a value to an attribute.""" - - _attribute_name: str - _attribute_value: Any = None - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this button.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata( - self, entity_metadata: WriteAttributeButtonMetadata - ) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - self._attribute_value = entity_metadata.attribute_value - - async def async_press(self) -> None: - """Write attribute with defined value.""" - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: self._attribute_value} - ) - self.async_write_ha_state() - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_htnnfasr", - }, -) -class FrostLockResetButton(ZHAAttributeButton): - """Defines a ZHA frost lock reset button.""" - - _unique_id_suffix = "reset_frost_lock" - _attribute_name = "frost_lock_reset" - _attribute_value = 0 - _attr_device_class = ButtonDeviceClass.RESTART - _attr_entity_category = EntityCategory.CONFIG - _attr_translation_key = "reset_frost_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.motion.ac01"} -) -class NoPresenceStatusResetButton(ZHAAttributeButton): - """Defines a ZHA no presence status reset button.""" - - _unique_id_suffix = "reset_no_presence_status" - _attribute_name = "reset_no_presence_status" - _attribute_value = 1 - _attr_device_class = ButtonDeviceClass.RESTART - _attr_entity_category = EntityCategory.CONFIG - _attr_translation_key = "reset_no_presence_status" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -class AqaraPetFeederFeedButton(ZHAAttributeButton): - """Defines a feed button for the aqara c1 pet feeder.""" - - _unique_id_suffix = "feeding" - _attribute_name = "feeding" - _attribute_value = 1 - _attr_translation_key = "feed" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraSelfTestButton(ZHAAttributeButton): - """Defines a ZHA self-test button for Aqara smoke sensors.""" - - _unique_id_suffix = "self_test" - _attribute_name = "self_test" - _attribute_value = 1 - _attr_entity_category = EntityCategory.CONFIG - _attr_translation_key = "self_test" + await self.entity_data.entity.async_press() diff --git a/homeassistant/components/zha/climate.py b/homeassistant/components/zha/climate.py index 61c5f28ca8f..f4fb58c254a 100644 --- a/homeassistant/components/zha/climate.py +++ b/homeassistant/components/zha/climate.py @@ -6,109 +6,62 @@ at https://home-assistant.io/components/zha.climate/ from __future__ import annotations -from datetime import datetime, timedelta +from collections.abc import Mapping import functools -from random import randint from typing import Any -from zigpy.zcl.clusters.hvac import Fan as F, Thermostat as T +from zha.application.platforms.climate.const import ( + ClimateEntityFeature as ZHAClimateEntityFeature, + HVACAction as ZHAHVACAction, + HVACMode as ZHAHVACMode, +) from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, - FAN_AUTO, - FAN_ON, - PRESET_AWAY, - PRESET_BOOST, - PRESET_COMFORT, - PRESET_ECO, - PRESET_NONE, + ATTR_TEMPERATURE, ClimateEntity, ClimateEntityFeature, HVACAction, HVACMode, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_TEMPERATURE, - PRECISION_TENTHS, - Platform, - UnitOfTemperature, -) +from homeassistant.const import PRECISION_TENTHS, Platform, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_track_time_interval -import homeassistant.util.dt as dt_util -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_FAN, - CLUSTER_HANDLER_THERMOSTAT, - PRESET_COMPLEX, - PRESET_SCHEDULE, - PRESET_TEMP_MANUAL, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + exclude_none_values, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity -ATTR_SYS_MODE = "system_mode" -ATTR_RUNNING_MODE = "running_mode" -ATTR_SETPT_CHANGE_SRC = "setpoint_change_source" -ATTR_SETPT_CHANGE_AMT = "setpoint_change_amount" -ATTR_OCCUPANCY = "occupancy" -ATTR_PI_COOLING_DEMAND = "pi_cooling_demand" -ATTR_PI_HEATING_DEMAND = "pi_heating_demand" -ATTR_OCCP_COOL_SETPT = "occupied_cooling_setpoint" -ATTR_OCCP_HEAT_SETPT = "occupied_heating_setpoint" -ATTR_UNOCCP_HEAT_SETPT = "unoccupied_heating_setpoint" -ATTR_UNOCCP_COOL_SETPT = "unoccupied_cooling_setpoint" - - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.CLIMATE) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.CLIMATE) -RUNNING_MODE = {0x00: HVACMode.OFF, 0x03: HVACMode.COOL, 0x04: HVACMode.HEAT} - -SEQ_OF_OPERATION = { - 0x00: [HVACMode.OFF, HVACMode.COOL], # cooling only - 0x01: [HVACMode.OFF, HVACMode.COOL], # cooling with reheat - 0x02: [HVACMode.OFF, HVACMode.HEAT], # heating only - 0x03: [HVACMode.OFF, HVACMode.HEAT], # heating with reheat - # cooling and heating 4-pipes - 0x04: [HVACMode.OFF, HVACMode.HEAT_COOL, HVACMode.COOL, HVACMode.HEAT], - # cooling and heating 4-pipes - 0x05: [HVACMode.OFF, HVACMode.HEAT_COOL, HVACMode.COOL, HVACMode.HEAT], - 0x06: [HVACMode.COOL, HVACMode.HEAT, HVACMode.OFF], # centralite specific - 0x07: [HVACMode.HEAT_COOL, HVACMode.OFF], # centralite specific +ZHA_TO_HA_HVAC_MODE = { + ZHAHVACMode.OFF: HVACMode.OFF, + ZHAHVACMode.AUTO: HVACMode.AUTO, + ZHAHVACMode.HEAT: HVACMode.HEAT, + ZHAHVACMode.COOL: HVACMode.COOL, + ZHAHVACMode.HEAT_COOL: HVACMode.HEAT_COOL, + ZHAHVACMode.DRY: HVACMode.DRY, + ZHAHVACMode.FAN_ONLY: HVACMode.FAN_ONLY, } -HVAC_MODE_2_SYSTEM = { - HVACMode.OFF: T.SystemMode.Off, - HVACMode.HEAT_COOL: T.SystemMode.Auto, - HVACMode.COOL: T.SystemMode.Cool, - HVACMode.HEAT: T.SystemMode.Heat, - HVACMode.FAN_ONLY: T.SystemMode.Fan_only, - HVACMode.DRY: T.SystemMode.Dry, +ZHA_TO_HA_HVAC_ACTION = { + ZHAHVACAction.OFF: HVACAction.OFF, + ZHAHVACAction.HEATING: HVACAction.HEATING, + ZHAHVACAction.COOLING: HVACAction.COOLING, + ZHAHVACAction.DRYING: HVACAction.DRYING, + ZHAHVACAction.IDLE: HVACAction.IDLE, + ZHAHVACAction.FAN: HVACAction.FAN, + ZHAHVACAction.PREHEATING: HVACAction.PREHEATING, } -SYSTEM_MODE_2_HVAC = { - T.SystemMode.Off: HVACMode.OFF, - T.SystemMode.Auto: HVACMode.HEAT_COOL, - T.SystemMode.Cool: HVACMode.COOL, - T.SystemMode.Heat: HVACMode.HEAT, - T.SystemMode.Emergency_Heating: HVACMode.HEAT, - T.SystemMode.Pre_cooling: HVACMode.COOL, # this is 'precooling'. is it the same? - T.SystemMode.Fan_only: HVACMode.FAN_ONLY, - T.SystemMode.Dry: HVACMode.DRY, - T.SystemMode.Sleep: HVACMode.OFF, -} - -ZCL_TEMP = 100 - async def async_setup_entry( hass: HomeAssistant, @@ -118,708 +71,168 @@ async def async_setup_entry( """Set up the Zigbee Home Automation sensor from config entry.""" zha_data = get_zha_data(hass) entities_to_create = zha_data.platforms[Platform.CLIMATE] + unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, Thermostat, entities_to_create ), ) config_entry.async_on_unload(unsub) -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - aux_cluster_handlers=CLUSTER_HANDLER_FAN, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class Thermostat(ZhaEntity, ClimateEntity): +class Thermostat(ZHAEntity, ClimateEntity): """Representation of a ZHA Thermostat device.""" - DEFAULT_MAX_TEMP = 35 - DEFAULT_MIN_TEMP = 7 - _attr_precision = PRECISION_TENTHS _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key: str = "thermostat" _enable_turn_on_off_backwards_compatibility = False - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._thrm = self.cluster_handlers.get(CLUSTER_HANDLER_THERMOSTAT) - self._preset = PRESET_NONE - self._presets = [] - self._supported_flags = ( - ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA thermostat entity.""" + super().__init__(entity_data, **kwargs) + self._attr_hvac_modes = [ + ZHA_TO_HA_HVAC_MODE[mode] for mode in self.entity_data.entity.hvac_modes + ] + self._attr_hvac_mode = ZHA_TO_HA_HVAC_MODE.get( + self.entity_data.entity.hvac_mode ) - self._fan = self.cluster_handlers.get(CLUSTER_HANDLER_FAN) + self._attr_hvac_action = ZHA_TO_HA_HVAC_ACTION.get( + self.entity_data.entity.hvac_action + ) + + features: ClimateEntityFeature = ClimateEntityFeature(0) + zha_features: ZHAClimateEntityFeature = ( + self.entity_data.entity.supported_features + ) + + if ZHAClimateEntityFeature.TARGET_TEMPERATURE in zha_features: + features |= ClimateEntityFeature.TARGET_TEMPERATURE + if ZHAClimateEntityFeature.TARGET_TEMPERATURE_RANGE in zha_features: + features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + if ZHAClimateEntityFeature.TARGET_HUMIDITY in zha_features: + features |= ClimateEntityFeature.TARGET_HUMIDITY + if ZHAClimateEntityFeature.PRESET_MODE in zha_features: + features |= ClimateEntityFeature.PRESET_MODE + if ZHAClimateEntityFeature.FAN_MODE in zha_features: + features |= ClimateEntityFeature.FAN_MODE + if ZHAClimateEntityFeature.SWING_MODE in zha_features: + features |= ClimateEntityFeature.SWING_MODE + if ZHAClimateEntityFeature.AUX_HEAT in zha_features: + features |= ClimateEntityFeature.AUX_HEAT + if ZHAClimateEntityFeature.TURN_OFF in zha_features: + features |= ClimateEntityFeature.TURN_OFF + if ZHAClimateEntityFeature.TURN_ON in zha_features: + features |= ClimateEntityFeature.TURN_ON + + self._attr_supported_features = features @property - def current_temperature(self): + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return entity specific state attributes.""" + state = self.entity_data.entity.state + + return exclude_none_values( + { + "occupancy": state.get("occupancy"), + "occupied_cooling_setpoint": state.get("occupied_cooling_setpoint"), + "occupied_heating_setpoint": state.get("occupied_heating_setpoint"), + "pi_cooling_demand": state.get("pi_cooling_demand"), + "pi_heating_demand": state.get("pi_heating_demand"), + "system_mode": state.get("system_mode"), + "unoccupied_cooling_setpoint": state.get("unoccupied_cooling_setpoint"), + "unoccupied_heating_setpoint": state.get("unoccupied_heating_setpoint"), + } + ) + + @property + def current_temperature(self) -> float | None: """Return the current temperature.""" - if self._thrm.local_temperature is None: - return None - return self._thrm.local_temperature / ZCL_TEMP - - @property - def extra_state_attributes(self): - """Return device specific state attributes.""" - data = {} - if self.hvac_mode: - mode = SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode, "unknown") - data[ATTR_SYS_MODE] = f"[{self._thrm.system_mode}]/{mode}" - if self._thrm.occupancy is not None: - data[ATTR_OCCUPANCY] = self._thrm.occupancy - if self._thrm.occupied_cooling_setpoint is not None: - data[ATTR_OCCP_COOL_SETPT] = self._thrm.occupied_cooling_setpoint - if self._thrm.occupied_heating_setpoint is not None: - data[ATTR_OCCP_HEAT_SETPT] = self._thrm.occupied_heating_setpoint - if self._thrm.pi_heating_demand is not None: - data[ATTR_PI_HEATING_DEMAND] = self._thrm.pi_heating_demand - if self._thrm.pi_cooling_demand is not None: - data[ATTR_PI_COOLING_DEMAND] = self._thrm.pi_cooling_demand - - unoccupied_cooling_setpoint = self._thrm.unoccupied_cooling_setpoint - if unoccupied_cooling_setpoint is not None: - data[ATTR_UNOCCP_COOL_SETPT] = unoccupied_cooling_setpoint - - unoccupied_heating_setpoint = self._thrm.unoccupied_heating_setpoint - if unoccupied_heating_setpoint is not None: - data[ATTR_UNOCCP_HEAT_SETPT] = unoccupied_heating_setpoint - return data + return self.entity_data.entity.current_temperature @property def fan_mode(self) -> str | None: """Return current FAN mode.""" - if self._thrm.running_state is None: - return FAN_AUTO - - if self._thrm.running_state & ( - T.RunningState.Fan_State_On - | T.RunningState.Fan_2nd_Stage_On - | T.RunningState.Fan_3rd_Stage_On - ): - return FAN_ON - return FAN_AUTO + return self.entity_data.entity.fan_mode @property def fan_modes(self) -> list[str] | None: """Return supported FAN modes.""" - if not self._fan: - return None - return [FAN_AUTO, FAN_ON] - - @property - def hvac_action(self) -> HVACAction | None: - """Return the current HVAC action.""" - if ( - self._thrm.pi_heating_demand is None - and self._thrm.pi_cooling_demand is None - ): - return self._rm_rs_action - return self._pi_demand_action - - @property - def _rm_rs_action(self) -> HVACAction | None: - """Return the current HVAC action based on running mode and running state.""" - - if (running_state := self._thrm.running_state) is None: - return None - if running_state & ( - T.RunningState.Heat_State_On | T.RunningState.Heat_2nd_Stage_On - ): - return HVACAction.HEATING - if running_state & ( - T.RunningState.Cool_State_On | T.RunningState.Cool_2nd_Stage_On - ): - return HVACAction.COOLING - if running_state & ( - T.RunningState.Fan_State_On - | T.RunningState.Fan_2nd_Stage_On - | T.RunningState.Fan_3rd_Stage_On - ): - return HVACAction.FAN - if running_state & T.RunningState.Idle: - return HVACAction.IDLE - if self.hvac_mode != HVACMode.OFF: - return HVACAction.IDLE - return HVACAction.OFF - - @property - def _pi_demand_action(self) -> HVACAction | None: - """Return the current HVAC action based on pi_demands.""" - - heating_demand = self._thrm.pi_heating_demand - if heating_demand is not None and heating_demand > 0: - return HVACAction.HEATING - cooling_demand = self._thrm.pi_cooling_demand - if cooling_demand is not None and cooling_demand > 0: - return HVACAction.COOLING - - if self.hvac_mode != HVACMode.OFF: - return HVACAction.IDLE - return HVACAction.OFF - - @property - def hvac_mode(self) -> HVACMode | None: - """Return HVAC operation mode.""" - return SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode) - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return the list of available HVAC operation modes.""" - return SEQ_OF_OPERATION.get(self._thrm.ctrl_sequence_of_oper, [HVACMode.OFF]) + return self.entity_data.entity.fan_modes @property def preset_mode(self) -> str: """Return current preset mode.""" - return self._preset + return self.entity_data.entity.preset_mode @property def preset_modes(self) -> list[str] | None: """Return supported preset modes.""" - return self._presets + return self.entity_data.entity.preset_modes @property - def supported_features(self) -> ClimateEntityFeature: - """Return the list of supported features.""" - features = self._supported_flags - if HVACMode.HEAT_COOL in self.hvac_modes: - features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - if self._fan is not None: - self._supported_flags |= ClimateEntityFeature.FAN_MODE - return features - - @property - def target_temperature(self): + def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - temp = None - if self.hvac_mode == HVACMode.COOL: - if self.preset_mode == PRESET_AWAY: - temp = self._thrm.unoccupied_cooling_setpoint - else: - temp = self._thrm.occupied_cooling_setpoint - elif self.hvac_mode == HVACMode.HEAT: - if self.preset_mode == PRESET_AWAY: - temp = self._thrm.unoccupied_heating_setpoint - else: - temp = self._thrm.occupied_heating_setpoint - if temp is None: - return temp - return round(temp / ZCL_TEMP, 1) + return self.entity_data.entity.target_temperature @property - def target_temperature_high(self): + def target_temperature_high(self) -> float | None: """Return the upper bound temperature we try to reach.""" - if self.hvac_mode != HVACMode.HEAT_COOL: - return None - if self.preset_mode == PRESET_AWAY: - temp = self._thrm.unoccupied_cooling_setpoint - else: - temp = self._thrm.occupied_cooling_setpoint - - if temp is None: - return temp - - return round(temp / ZCL_TEMP, 1) + return self.entity_data.entity.target_temperature_high @property - def target_temperature_low(self): + def target_temperature_low(self) -> float | None: """Return the lower bound temperature we try to reach.""" - if self.hvac_mode != HVACMode.HEAT_COOL: - return None - if self.preset_mode == PRESET_AWAY: - temp = self._thrm.unoccupied_heating_setpoint - else: - temp = self._thrm.occupied_heating_setpoint - - if temp is None: - return temp - return round(temp / ZCL_TEMP, 1) + return self.entity_data.entity.target_temperature_low @property def max_temp(self) -> float: """Return the maximum temperature.""" - temps = [] - if HVACMode.HEAT in self.hvac_modes: - temps.append(self._thrm.max_heat_setpoint_limit) - if HVACMode.COOL in self.hvac_modes: - temps.append(self._thrm.max_cool_setpoint_limit) - - if not temps: - return self.DEFAULT_MAX_TEMP - return round(max(temps) / ZCL_TEMP, 1) + return self.entity_data.entity.max_temp @property def min_temp(self) -> float: """Return the minimum temperature.""" - temps = [] - if HVACMode.HEAT in self.hvac_modes: - temps.append(self._thrm.min_heat_setpoint_limit) - if HVACMode.COOL in self.hvac_modes: - temps.append(self._thrm.min_cool_setpoint_limit) - - if not temps: - return self.DEFAULT_MIN_TEMP - return round(min(temps) / ZCL_TEMP, 1) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._thrm, SIGNAL_ATTR_UPDATED, self.async_attribute_updated - ) - - async def async_attribute_updated(self, attr_id, attr_name, value): - """Handle attribute update from device.""" - if ( - attr_name in (ATTR_OCCP_COOL_SETPT, ATTR_OCCP_HEAT_SETPT) - and self.preset_mode == PRESET_AWAY - ): - # occupancy attribute is an unreportable attribute, but if we get - # an attribute update for an "occupied" setpoint, there's a chance - # occupancy has changed - if await self._thrm.get_occupancy() is True: - self._preset = PRESET_NONE - - self.debug("Attribute '%s' = %s update", attr_name, value) - self.async_write_ha_state() - - async def async_set_fan_mode(self, fan_mode: str) -> None: - """Set fan mode.""" - if not self.fan_modes or fan_mode not in self.fan_modes: - self.warning("Unsupported '%s' fan mode", fan_mode) - return - - if fan_mode == FAN_ON: - mode = F.FanMode.On - else: - mode = F.FanMode.Auto - - await self._fan.async_set_speed(mode) - - async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set new target operation mode.""" - if hvac_mode not in self.hvac_modes: - self.warning( - "can't set '%s' mode. Supported modes are: %s", - hvac_mode, - self.hvac_modes, - ) - return - - if await self._thrm.async_set_operation_mode(HVAC_MODE_2_SYSTEM[hvac_mode]): - self.async_write_ha_state() - - async def async_set_preset_mode(self, preset_mode: str) -> None: - """Set new preset mode.""" - if not self.preset_modes or preset_mode not in self.preset_modes: - self.debug("Preset mode '%s' is not supported", preset_mode) - return - - if self.preset_mode not in ( - preset_mode, - PRESET_NONE, - ): - await self.async_preset_handler(self.preset_mode, enable=False) - - if preset_mode != PRESET_NONE: - await self.async_preset_handler(preset_mode, enable=True) - - self._preset = preset_mode - self.async_write_ha_state() - - async def async_set_temperature(self, **kwargs: Any) -> None: - """Set new target temperature.""" - low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW) - high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH) - temp = kwargs.get(ATTR_TEMPERATURE) - hvac_mode = kwargs.get(ATTR_HVAC_MODE) - - if hvac_mode is not None: - await self.async_set_hvac_mode(hvac_mode) - - is_away = self.preset_mode == PRESET_AWAY - - if self.hvac_mode == HVACMode.HEAT_COOL: - if low_temp is not None: - await self._thrm.async_set_heating_setpoint( - temperature=int(low_temp * ZCL_TEMP), - is_away=is_away, - ) - if high_temp is not None: - await self._thrm.async_set_cooling_setpoint( - temperature=int(high_temp * ZCL_TEMP), - is_away=is_away, - ) - elif temp is not None: - if self.hvac_mode == HVACMode.COOL: - await self._thrm.async_set_cooling_setpoint( - temperature=int(temp * ZCL_TEMP), - is_away=is_away, - ) - elif self.hvac_mode == HVACMode.HEAT: - await self._thrm.async_set_heating_setpoint( - temperature=int(temp * ZCL_TEMP), - is_away=is_away, - ) - else: - self.debug("Not setting temperature for '%s' mode", self.hvac_mode) - return - else: - self.debug("incorrect %s setting for '%s' mode", kwargs, self.hvac_mode) - return - - self.async_write_ha_state() - - async def async_preset_handler(self, preset: str, enable: bool = False) -> None: - """Set the preset mode via handler.""" - - handler = getattr(self, f"async_preset_handler_{preset}") - await handler(enable) - - -@MULTI_MATCH( - cluster_handler_names={CLUSTER_HANDLER_THERMOSTAT, "sinope_manufacturer_specific"}, - manufacturers="Sinope Technologies", - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class SinopeTechnologiesThermostat(Thermostat): - """Sinope Technologies Thermostat.""" - - manufacturer = 0x119C - update_time_interval = timedelta(minutes=randint(45, 75)) - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._presets = [PRESET_AWAY, PRESET_NONE] - self._supported_flags |= ClimateEntityFeature.PRESET_MODE - self._manufacturer_ch = self.cluster_handlers["sinope_manufacturer_specific"] - - @property - def _rm_rs_action(self) -> HVACAction: - """Return the current HVAC action based on running mode and running state.""" - - running_mode = self._thrm.running_mode - if running_mode == T.SystemMode.Heat: - return HVACAction.HEATING - if running_mode == T.SystemMode.Cool: - return HVACAction.COOLING - - running_state = self._thrm.running_state - if running_state and running_state & ( - T.RunningState.Fan_State_On - | T.RunningState.Fan_2nd_Stage_On - | T.RunningState.Fan_3rd_Stage_On - ): - return HVACAction.FAN - if self.hvac_mode != HVACMode.OFF and running_mode == T.SystemMode.Off: - return HVACAction.IDLE - return HVACAction.OFF + return self.entity_data.entity.min_temp @callback - def _async_update_time(self, timestamp=None) -> None: - """Update thermostat's time display.""" - - secs_2k = ( - dt_util.now().replace(tzinfo=None) - datetime(2000, 1, 1, 0, 0, 0, 0) - ).total_seconds() - - self.debug("Updating time: %s", secs_2k) - self._manufacturer_ch.cluster.create_catching_task( - self._manufacturer_ch.write_attributes_safe( - {"secs_since_2k": secs_2k}, manufacturer=self.manufacturer - ) + def _handle_entity_events(self, event: Any) -> None: + """Entity state changed.""" + self._attr_hvac_mode = self._attr_hvac_mode = ZHA_TO_HA_HVAC_MODE.get( + self.entity_data.entity.hvac_mode ) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to Hass.""" - await super().async_added_to_hass() - self.async_on_remove( - async_track_time_interval( - self.hass, self._async_update_time, self.update_time_interval - ) + self._attr_hvac_action = ZHA_TO_HA_HVAC_ACTION.get( + self.entity_data.entity.hvac_action ) - self._async_update_time() + super()._handle_entity_events(event) - async def async_preset_handler_away(self, is_away: bool = False) -> None: - """Set occupancy.""" - mfg_code = self._zha_device.manufacturer_code - await self._thrm.write_attributes_safe( - {"set_occupancy": 0 if is_away else 1}, manufacturer=mfg_code + @convert_zha_error_to_ha_error + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set fan mode.""" + await self.entity_data.entity.async_set_fan_mode(fan_mode=fan_mode) + self.async_write_ha_state() + + @convert_zha_error_to_ha_error + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target operation mode.""" + await self.entity_data.entity.async_set_hvac_mode(hvac_mode=hvac_mode) + self.async_write_ha_state() + + @convert_zha_error_to_ha_error + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + await self.entity_data.entity.async_set_preset_mode(preset_mode=preset_mode) + self.async_write_ha_state() + + @convert_zha_error_to_ha_error + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + await self.entity_data.entity.async_set_temperature( + target_temp_low=kwargs.get(ATTR_TARGET_TEMP_LOW), + target_temp_high=kwargs.get(ATTR_TARGET_TEMP_HIGH), + temperature=kwargs.get(ATTR_TEMPERATURE), + hvac_mode=kwargs.get(ATTR_HVAC_MODE), ) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - aux_cluster_handlers=CLUSTER_HANDLER_FAN, - manufacturers={"Zen Within", "LUX"}, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class ZenWithinThermostat(Thermostat): - """Zen Within Thermostat implementation.""" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - aux_cluster_handlers=CLUSTER_HANDLER_FAN, - manufacturers="Centralite", - models={"3157100", "3157100-E"}, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class CentralitePearl(ZenWithinThermostat): - """Centralite Pearl Thermostat implementation.""" - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - manufacturers={ - "_TZE200_ckud7u2l", - "_TZE200_ywdxldoj", - "_TZE200_cwnjrr72", - "_TZE200_2atgpdho", - "_TZE200_pvvbommb", - "_TZE200_4eeyebrt", - "_TZE200_cpmgn2cf", - "_TZE200_9sfg7gm0", - "_TZE200_8whxpsiw", - "_TYST11_ckud7u2l", - "_TYST11_ywdxldoj", - "_TYST11_cwnjrr72", - "_TYST11_2atgpdho", - }, -) -class MoesThermostat(Thermostat): - """Moes Thermostat implementation.""" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._presets = [ - PRESET_NONE, - PRESET_AWAY, - PRESET_SCHEDULE, - PRESET_COMFORT, - PRESET_ECO, - PRESET_BOOST, - PRESET_COMPLEX, - ] - self._supported_flags |= ClimateEntityFeature.PRESET_MODE - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return only the heat mode, because the device can't be turned off.""" - return [HVACMode.HEAT] - - async def async_attribute_updated(self, attr_id, attr_name, value): - """Handle attribute update from device.""" - if attr_name == "operation_preset": - if value == 0: - self._preset = PRESET_AWAY - if value == 1: - self._preset = PRESET_SCHEDULE - if value == 2: - self._preset = PRESET_NONE - if value == 3: - self._preset = PRESET_COMFORT - if value == 4: - self._preset = PRESET_ECO - if value == 5: - self._preset = PRESET_BOOST - if value == 6: - self._preset = PRESET_COMPLEX - await super().async_attribute_updated(attr_id, attr_name, value) - - async def async_preset_handler(self, preset: str, enable: bool = False) -> None: - """Set the preset mode.""" - mfg_code = self._zha_device.manufacturer_code - if not enable: - return await self._thrm.write_attributes_safe( - {"operation_preset": 2}, manufacturer=mfg_code - ) - if preset == PRESET_AWAY: - return await self._thrm.write_attributes_safe( - {"operation_preset": 0}, manufacturer=mfg_code - ) - if preset == PRESET_SCHEDULE: - return await self._thrm.write_attributes_safe( - {"operation_preset": 1}, manufacturer=mfg_code - ) - if preset == PRESET_COMFORT: - return await self._thrm.write_attributes_safe( - {"operation_preset": 3}, manufacturer=mfg_code - ) - if preset == PRESET_ECO: - return await self._thrm.write_attributes_safe( - {"operation_preset": 4}, manufacturer=mfg_code - ) - if preset == PRESET_BOOST: - return await self._thrm.write_attributes_safe( - {"operation_preset": 5}, manufacturer=mfg_code - ) - if preset == PRESET_COMPLEX: - return await self._thrm.write_attributes_safe( - {"operation_preset": 6}, manufacturer=mfg_code - ) - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - manufacturers={ - "_TZE200_b6wax7g0", - }, -) -class BecaThermostat(Thermostat): - """Beca Thermostat implementation.""" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._presets = [ - PRESET_NONE, - PRESET_AWAY, - PRESET_SCHEDULE, - PRESET_ECO, - PRESET_BOOST, - PRESET_TEMP_MANUAL, - ] - self._supported_flags |= ClimateEntityFeature.PRESET_MODE - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return only the heat mode, because the device can't be turned off.""" - return [HVACMode.HEAT] - - async def async_attribute_updated(self, attr_id, attr_name, value): - """Handle attribute update from device.""" - if attr_name == "operation_preset": - if value == 0: - self._preset = PRESET_AWAY - if value == 1: - self._preset = PRESET_SCHEDULE - if value == 2: - self._preset = PRESET_NONE - if value == 4: - self._preset = PRESET_ECO - if value == 5: - self._preset = PRESET_BOOST - if value == 7: - self._preset = PRESET_TEMP_MANUAL - await super().async_attribute_updated(attr_id, attr_name, value) - - async def async_preset_handler(self, preset: str, enable: bool = False) -> None: - """Set the preset mode.""" - mfg_code = self._zha_device.manufacturer_code - if not enable: - return await self._thrm.write_attributes_safe( - {"operation_preset": 2}, manufacturer=mfg_code - ) - if preset == PRESET_AWAY: - return await self._thrm.write_attributes_safe( - {"operation_preset": 0}, manufacturer=mfg_code - ) - if preset == PRESET_SCHEDULE: - return await self._thrm.write_attributes_safe( - {"operation_preset": 1}, manufacturer=mfg_code - ) - if preset == PRESET_ECO: - return await self._thrm.write_attributes_safe( - {"operation_preset": 4}, manufacturer=mfg_code - ) - if preset == PRESET_BOOST: - return await self._thrm.write_attributes_safe( - {"operation_preset": 5}, manufacturer=mfg_code - ) - if preset == PRESET_TEMP_MANUAL: - return await self._thrm.write_attributes_safe( - {"operation_preset": 7}, manufacturer=mfg_code - ) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - manufacturers="Stelpro", - models={"SORB"}, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class StelproFanHeater(Thermostat): - """Stelpro Fan Heater implementation.""" - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return only the heat mode, because the device can't be turned off.""" - return [HVACMode.HEAT] - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - manufacturers={ - "_TZE200_7yoranx2", - "_TZE200_e9ba97vf", # TV01-ZG - "_TZE200_hue3yfsn", # TV02-ZG - "_TZE200_husqqvux", # TSL-TRV-TV01ZG - "_TZE200_kds0pmmv", # MOES TRV TV02 - "_TZE200_kly8gjlz", # TV05-ZG - "_TZE200_lnbfnyxd", - "_TZE200_mudxchsu", - }, -) -class ZONNSMARTThermostat(Thermostat): - """ZONNSMART Thermostat implementation. - - Notice that this device uses two holiday presets (2: HolidayMode, - 3: HolidayModeTemp), but only one of them can be set. - """ - - PRESET_HOLIDAY = "holiday" - PRESET_FROST = "frost protect" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._presets = [ - PRESET_NONE, - self.PRESET_HOLIDAY, - PRESET_SCHEDULE, - self.PRESET_FROST, - ] - self._supported_flags |= ClimateEntityFeature.PRESET_MODE - - async def async_attribute_updated(self, attr_id, attr_name, value): - """Handle attribute update from device.""" - if attr_name == "operation_preset": - if value == 0: - self._preset = PRESET_SCHEDULE - if value == 1: - self._preset = PRESET_NONE - if value == 2: - self._preset = self.PRESET_HOLIDAY - if value == 3: - self._preset = self.PRESET_HOLIDAY - if value == 4: - self._preset = self.PRESET_FROST - await super().async_attribute_updated(attr_id, attr_name, value) - - async def async_preset_handler(self, preset: str, enable: bool = False) -> None: - """Set the preset mode.""" - mfg_code = self._zha_device.manufacturer_code - if not enable: - return await self._thrm.write_attributes_safe( - {"operation_preset": 1}, manufacturer=mfg_code - ) - if preset == PRESET_SCHEDULE: - return await self._thrm.write_attributes_safe( - {"operation_preset": 0}, manufacturer=mfg_code - ) - if preset == self.PRESET_HOLIDAY: - return await self._thrm.write_attributes_safe( - {"operation_preset": 3}, manufacturer=mfg_code - ) - if preset == self.PRESET_FROST: - return await self._thrm.write_attributes_safe( - {"operation_preset": 4}, manufacturer=mfg_code - ) + self.async_write_ha_state() diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index 037ad4192bd..3a7b54652d9 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -10,6 +10,7 @@ from typing import Any import serial.tools.list_ports from serial.tools.list_ports_common import ListPortInfo import voluptuous as vol +from zha.application.const import RadioType import zigpy.backups from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH @@ -35,13 +36,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.selector import FileSelector, FileSelectorConfig from homeassistant.util import dt as dt_util -from .core.const import ( - CONF_BAUDRATE, - CONF_FLOW_CONTROL, - CONF_RADIO_TYPE, - DOMAIN, - RadioType, -) +from .const import CONF_BAUDRATE, CONF_FLOW_CONTROL, CONF_RADIO_TYPE, DOMAIN from .radio_manager import ( DEVICE_SCHEMA, HARDWARE_DISCOVERY_SCHEMA, @@ -146,12 +141,12 @@ class BaseZhaFlow(ConfigEntryBaseFlow): self._title: str | None = None @property - def hass(self): + def hass(self) -> HomeAssistant: """Return hass.""" return self._hass @hass.setter - def hass(self, hass): + def hass(self, hass: HomeAssistant) -> None: """Set hass.""" self._hass = hass self._radio_mgr.hass = hass @@ -221,7 +216,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow): return await self.async_step_verify_radio() # Pre-select the currently configured port - default_port = vol.UNDEFINED + default_port: vol.Undefined | str = vol.UNDEFINED if self._radio_mgr.device_path is not None: for description, port in zip(list_of_ports, ports, strict=False): @@ -251,7 +246,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow): return await self.async_step_manual_port_config() # Pre-select the current radio type - default = vol.UNDEFINED + default: vol.Undefined | str = vol.UNDEFINED if self._radio_mgr.radio_type is not None: default = self._radio_mgr.radio_type.description diff --git a/homeassistant/components/zha/const.py b/homeassistant/components/zha/const.py new file mode 100644 index 00000000000..3986a99cf3f --- /dev/null +++ b/homeassistant/components/zha/const.py @@ -0,0 +1,76 @@ +"""Constants for the ZHA integration.""" + +EZSP_OVERWRITE_EUI64 = ( + "i_understand_i_can_update_eui64_only_once_and_i_still_want_to_do_it" +) + +ATTR_ACTIVE_COORDINATOR = "active_coordinator" +ATTR_ATTRIBUTES = "attributes" +ATTR_AVAILABLE = "available" +ATTR_DEVICE_TYPE = "device_type" +ATTR_CLUSTER_NAME = "cluster_name" +ATTR_ENDPOINT_NAMES = "endpoint_names" +ATTR_IEEE = "ieee" +ATTR_LAST_SEEN = "last_seen" +ATTR_LQI = "lqi" +ATTR_MANUFACTURER = "manufacturer" +ATTR_MANUFACTURER_CODE = "manufacturer_code" +ATTR_NEIGHBORS = "neighbors" +ATTR_NWK = "nwk" +ATTR_POWER_SOURCE = "power_source" +ATTR_QUIRK_APPLIED = "quirk_applied" +ATTR_QUIRK_CLASS = "quirk_class" +ATTR_QUIRK_ID = "quirk_id" +ATTR_ROUTES = "routes" +ATTR_RSSI = "rssi" +ATTR_SIGNATURE = "signature" +ATTR_SUCCESS = "success" + + +CONF_ALARM_MASTER_CODE = "alarm_master_code" +CONF_ALARM_FAILED_TRIES = "alarm_failed_tries" +CONF_ALARM_ARM_REQUIRES_CODE = "alarm_arm_requires_code" + +CONF_RADIO_TYPE = "radio_type" +CONF_USB_PATH = "usb_path" +CONF_USE_THREAD = "use_thread" +CONF_BAUDRATE = "baudrate" +CONF_FLOW_CONTROL = "flow_control" + +CONF_ENABLE_QUIRKS = "enable_quirks" +CONF_CUSTOM_QUIRKS_PATH = "custom_quirks_path" + +CONF_DEFAULT_LIGHT_TRANSITION = "default_light_transition" +CONF_ENABLE_ENHANCED_LIGHT_TRANSITION = "enhanced_light_transition" +CONF_ENABLE_LIGHT_TRANSITIONING_FLAG = "light_transitioning_flag" +CONF_ALWAYS_PREFER_XY_COLOR_MODE = "always_prefer_xy_color_mode" +CONF_GROUP_MEMBERS_ASSUME_STATE = "group_members_assume_state" + +CONF_ENABLE_IDENTIFY_ON_JOIN = "enable_identify_on_join" +CONF_CONSIDER_UNAVAILABLE_MAINS = "consider_unavailable_mains" +CONF_CONSIDER_UNAVAILABLE_BATTERY = "consider_unavailable_battery" + +CONF_ZIGPY = "zigpy_config" +CONF_DEVICE_CONFIG = "device_config" + +CUSTOM_CONFIGURATION = "custom_configuration" + +DATA_ZHA = "zha" +DATA_ZHA_DEVICE_TRIGGER_CACHE = "zha_device_trigger_cache" + +DEFAULT_DATABASE_NAME = "zigbee.db" + +DEVICE_PAIRING_STATUS = "pairing_status" + +DOMAIN = "zha" + +GROUP_ID = "group_id" + + +GROUP_IDS = "group_ids" +GROUP_NAME = "group_name" + +MFG_CLUSTER_ID_START = 0xFC00 + +ZHA_ALARM_OPTIONS = "zha_alarm_options" +ZHA_OPTIONS = "zha_options" diff --git a/homeassistant/components/zha/core/__init__.py b/homeassistant/components/zha/core/__init__.py deleted file mode 100644 index 755eac3c4ce..00000000000 --- a/homeassistant/components/zha/core/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Core module for Zigbee Home Automation.""" - -from .device import ZHADevice -from .gateway import ZHAGateway - -__all__ = ["ZHADevice", "ZHAGateway"] diff --git a/homeassistant/components/zha/core/cluster_handlers/__init__.py b/homeassistant/components/zha/core/cluster_handlers/__init__.py deleted file mode 100644 index 8833d5c116f..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/__init__.py +++ /dev/null @@ -1,654 +0,0 @@ -"""Cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from collections.abc import Awaitable, Callable, Coroutine, Iterator -import contextlib -from enum import Enum -import functools -import logging -from typing import TYPE_CHECKING, Any, TypedDict - -import zigpy.exceptions -import zigpy.util -import zigpy.zcl -from zigpy.zcl.foundation import ( - CommandSchema, - ConfigureReportingResponseRecord, - Status, - ZCLAttributeDef, -) - -from homeassistant.const import ATTR_COMMAND -from homeassistant.core import callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.dispatcher import async_dispatcher_send - -from ..const import ( - ATTR_ARGS, - ATTR_ATTRIBUTE_ID, - ATTR_ATTRIBUTE_NAME, - ATTR_CLUSTER_ID, - ATTR_PARAMS, - ATTR_TYPE, - ATTR_UNIQUE_ID, - ATTR_VALUE, - CLUSTER_HANDLER_ZDO, - REPORT_CONFIG_ATTR_PER_REQ, - SIGNAL_ATTR_UPDATED, - ZHA_CLUSTER_HANDLER_MSG, - ZHA_CLUSTER_HANDLER_MSG_BIND, - ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, - ZHA_CLUSTER_HANDLER_MSG_DATA, - ZHA_CLUSTER_HANDLER_READS_PER_REQ, -) -from ..helpers import LogMixin, safe_read - -if TYPE_CHECKING: - from ..endpoint import Endpoint - -_LOGGER = logging.getLogger(__name__) -RETRYABLE_REQUEST_DECORATOR = zigpy.util.retryable_request(tries=3) -UNPROXIED_CLUSTER_METHODS = {"general_command"} - -type _FuncType[**_P] = Callable[_P, Awaitable[Any]] -type _ReturnFuncType[**_P] = Callable[_P, Coroutine[Any, Any, Any]] - - -@contextlib.contextmanager -def wrap_zigpy_exceptions() -> Iterator[None]: - """Wrap zigpy exceptions in `HomeAssistantError` exceptions.""" - try: - yield - except TimeoutError as exc: - raise HomeAssistantError( - "Failed to send request: device did not respond" - ) from exc - except zigpy.exceptions.ZigbeeException as exc: - message = "Failed to send request" - - if str(exc): - message = f"{message}: {exc}" - - raise HomeAssistantError(message) from exc - - -def retry_request[**_P](func: _FuncType[_P]) -> _ReturnFuncType[_P]: - """Send a request with retries and wrap expected zigpy exceptions.""" - - @functools.wraps(func) - async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> Any: - with wrap_zigpy_exceptions(): - return await RETRYABLE_REQUEST_DECORATOR(func)(*args, **kwargs) - - return wrapper - - -class AttrReportConfig(TypedDict, total=True): - """Configuration to report for the attributes.""" - - # An attribute name - attr: str - # The config for the attribute reporting configuration consists of a tuple for - # (minimum_reported_time_interval_s, maximum_reported_time_interval_s, value_delta) - config: tuple[int, int, int | float] - - -def parse_and_log_command(cluster_handler, tsn, command_id, args): - """Parse and log a zigbee cluster command.""" - try: - name = cluster_handler.cluster.server_commands[command_id].name - except KeyError: - name = f"0x{command_id:02X}" - - cluster_handler.debug( - "received '%s' command with %s args on cluster_id '%s' tsn '%s'", - name, - args, - cluster_handler.cluster.cluster_id, - tsn, - ) - return name - - -class ClusterHandlerStatus(Enum): - """Status of a cluster handler.""" - - CREATED = 1 - CONFIGURED = 2 - INITIALIZED = 3 - - -class ClusterHandler(LogMixin): - """Base cluster handler for a Zigbee cluster.""" - - REPORT_CONFIG: tuple[AttrReportConfig, ...] = () - BIND: bool = True - - # Dict of attributes to read on cluster handler initialization. - # Dict keys -- attribute ID or names, with bool value indicating whether a cached - # attribute read is acceptable. - ZCL_INIT_ATTRS: dict[str, bool] = {} - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize ClusterHandler.""" - self._generic_id = f"cluster_handler_0x{cluster.cluster_id:04x}" - self._endpoint: Endpoint = endpoint - self._cluster = cluster - self._id = f"{endpoint.id}:0x{cluster.cluster_id:04x}" - unique_id = endpoint.unique_id.replace("-", ":") - self._unique_id = f"{unique_id}:0x{cluster.cluster_id:04x}" - if not hasattr(self, "_value_attribute") and self.REPORT_CONFIG: - attr_def: ZCLAttributeDef = self.cluster.attributes_by_name[ - self.REPORT_CONFIG[0]["attr"] - ] - self.value_attribute = attr_def.id - self._status = ClusterHandlerStatus.CREATED - self._cluster.add_listener(self) - self.data_cache: dict[str, Enum] = {} - - @classmethod - def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool: - """Filter the cluster match for specific devices.""" - return True - - @property - def id(self) -> str: - """Return cluster handler id unique for this device only.""" - return self._id - - @property - def generic_id(self): - """Return the generic id for this cluster handler.""" - return self._generic_id - - @property - def unique_id(self): - """Return the unique id for this cluster handler.""" - return self._unique_id - - @property - def cluster(self): - """Return the zigpy cluster for this cluster handler.""" - return self._cluster - - @property - def name(self) -> str: - """Return friendly name.""" - return self.cluster.ep_attribute or self._generic_id - - @property - def status(self): - """Return the status of the cluster handler.""" - return self._status - - def __hash__(self) -> int: - """Make this a hashable.""" - return hash(self._unique_id) - - @callback - def async_send_signal(self, signal: str, *args: Any) -> None: - """Send a signal through hass dispatcher.""" - self._endpoint.async_send_signal(signal, *args) - - async def bind(self): - """Bind a zigbee cluster. - - This also swallows ZigbeeException exceptions that are thrown when - devices are unreachable. - """ - try: - res = await self.cluster.bind() - self.debug("bound '%s' cluster: %s", self.cluster.ep_attribute, res[0]) - async_dispatcher_send( - self._endpoint.device.hass, - ZHA_CLUSTER_HANDLER_MSG, - { - ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_BIND, - ZHA_CLUSTER_HANDLER_MSG_DATA: { - "cluster_name": self.cluster.name, - "cluster_id": self.cluster.cluster_id, - "success": res[0] == 0, - }, - }, - ) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "Failed to bind '%s' cluster: %s", - self.cluster.ep_attribute, - str(ex), - exc_info=ex, - ) - async_dispatcher_send( - self._endpoint.device.hass, - ZHA_CLUSTER_HANDLER_MSG, - { - ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_BIND, - ZHA_CLUSTER_HANDLER_MSG_DATA: { - "cluster_name": self.cluster.name, - "cluster_id": self.cluster.cluster_id, - "success": False, - }, - }, - ) - - async def configure_reporting(self) -> None: - """Configure attribute reporting for a cluster. - - This also swallows ZigbeeException exceptions that are thrown when - devices are unreachable. - """ - event_data = {} - kwargs = {} - if ( - self.cluster.cluster_id >= 0xFC00 - and self._endpoint.device.manufacturer_code - ): - kwargs["manufacturer"] = self._endpoint.device.manufacturer_code - - for attr_report in self.REPORT_CONFIG: - attr, config = attr_report["attr"], attr_report["config"] - - try: - attr_name = self.cluster.find_attribute(attr).name - except KeyError: - attr_name = attr - - event_data[attr_name] = { - "min": config[0], - "max": config[1], - "id": attr, - "name": attr_name, - "change": config[2], - "status": None, - } - - to_configure = [*self.REPORT_CONFIG] - chunk, rest = ( - to_configure[:REPORT_CONFIG_ATTR_PER_REQ], - to_configure[REPORT_CONFIG_ATTR_PER_REQ:], - ) - while chunk: - reports = {rec["attr"]: rec["config"] for rec in chunk} - try: - res = await self.cluster.configure_reporting_multiple(reports, **kwargs) - self._configure_reporting_status(reports, res[0], event_data) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "failed to set reporting on '%s' cluster for: %s", - self.cluster.ep_attribute, - str(ex), - ) - break - chunk, rest = ( - rest[:REPORT_CONFIG_ATTR_PER_REQ], - rest[REPORT_CONFIG_ATTR_PER_REQ:], - ) - - async_dispatcher_send( - self._endpoint.device.hass, - ZHA_CLUSTER_HANDLER_MSG, - { - ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, - ZHA_CLUSTER_HANDLER_MSG_DATA: { - "cluster_name": self.cluster.name, - "cluster_id": self.cluster.cluster_id, - "attributes": event_data, - }, - }, - ) - - def _configure_reporting_status( - self, - attrs: dict[str, tuple[int, int, float | int]], - res: list | tuple, - event_data: dict[str, dict[str, Any]], - ) -> None: - """Parse configure reporting result.""" - if isinstance(res, (Exception, ConfigureReportingResponseRecord)): - # assume default response - self.debug( - "attr reporting for '%s' on '%s': %s", - attrs, - self.name, - res, - ) - for attr in attrs: - event_data[attr]["status"] = Status.FAILURE.name - return - if res[0].status == Status.SUCCESS and len(res) == 1: - self.debug( - "Successfully configured reporting for '%s' on '%s' cluster: %s", - attrs, - self.name, - res, - ) - # 2.5.8.1.3 Status Field - # The status field specifies the status of the Configure Reporting operation attempted on this attribute, as detailed in 2.5.7.3. - # Note that attribute status records are not included for successfully configured attributes, in order to save bandwidth. - # In the case of successful configuration of all attributes, only a single attribute status record SHALL be included in the command, - # with the status field set to SUCCESS and the direction and attribute identifier fields omitted. - for attr in attrs: - event_data[attr]["status"] = Status.SUCCESS.name - return - - for record in res: - event_data[self.cluster.find_attribute(record.attrid).name]["status"] = ( - record.status.name - ) - failed = [ - self.cluster.find_attribute(record.attrid).name - for record in res - if record.status != Status.SUCCESS - ] - self.debug( - "Failed to configure reporting for '%s' on '%s' cluster: %s", - failed, - self.name, - res, - ) - success = set(attrs) - set(failed) - self.debug( - "Successfully configured reporting for '%s' on '%s' cluster", - set(attrs) - set(failed), - self.name, - ) - for attr in success: - event_data[attr]["status"] = Status.SUCCESS.name - - async def async_configure(self) -> None: - """Set cluster binding and attribute reporting.""" - if not self._endpoint.device.skip_configuration: - if self.BIND: - self.debug("Performing cluster binding") - await self.bind() - if self.cluster.is_server: - self.debug("Configuring cluster attribute reporting") - await self.configure_reporting() - ch_specific_cfg = getattr( - self, "async_configure_cluster_handler_specific", None - ) - if ch_specific_cfg: - self.debug("Performing cluster handler specific configuration") - await ch_specific_cfg() - self.debug("finished cluster handler configuration") - else: - self.debug("skipping cluster handler configuration") - self._status = ClusterHandlerStatus.CONFIGURED - - async def async_initialize(self, from_cache: bool) -> None: - """Initialize cluster handler.""" - if not from_cache and self._endpoint.device.skip_configuration: - self.debug("Skipping cluster handler initialization") - self._status = ClusterHandlerStatus.INITIALIZED - return - - self.debug("initializing cluster handler: from_cache: %s", from_cache) - cached = [a for a, cached in self.ZCL_INIT_ATTRS.items() if cached] - uncached = [a for a, cached in self.ZCL_INIT_ATTRS.items() if not cached] - uncached.extend([cfg["attr"] for cfg in self.REPORT_CONFIG]) - - if cached: - self.debug("initializing cached cluster handler attributes: %s", cached) - await self._get_attributes( - True, cached, from_cache=True, only_cache=from_cache - ) - if uncached: - self.debug( - "initializing uncached cluster handler attributes: %s - from cache[%s]", - uncached, - from_cache, - ) - await self._get_attributes( - True, uncached, from_cache=from_cache, only_cache=from_cache - ) - - ch_specific_init = getattr( - self, "async_initialize_cluster_handler_specific", None - ) - if ch_specific_init: - self.debug( - "Performing cluster handler specific initialization: %s", uncached - ) - await ch_specific_init(from_cache=from_cache) - - self.debug("finished cluster handler initialization") - self._status = ClusterHandlerStatus.INITIALIZED - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "cluster_handler[%s] attribute_updated - cluster[%s] attr[%s] value[%s]", - self.name, - self.cluster.name, - attr_name, - value, - ) - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - attr_name, - value, - ) - - @callback - def zdo_command(self, *args, **kwargs): - """Handle ZDO commands on this cluster.""" - - @callback - def zha_send_event(self, command: str, arg: list | dict | CommandSchema) -> None: - """Relay events to hass.""" - - args: list | dict - if isinstance(arg, CommandSchema): - args = [a for a in arg if a is not None] - params = arg.as_dict() - elif isinstance(arg, (list, dict)): - # Quirks can directly send lists and dicts to ZHA this way - args = arg - params = {} - else: - raise TypeError(f"Unexpected zha_send_event {command!r} argument: {arg!r}") - - self._endpoint.send_event( - { - ATTR_UNIQUE_ID: self.unique_id, - ATTR_CLUSTER_ID: self.cluster.cluster_id, - ATTR_COMMAND: command, - # Maintain backwards compatibility with the old zigpy response format - ATTR_ARGS: args, - ATTR_PARAMS: params, - } - ) - - async def async_update(self): - """Retrieve latest state from cluster.""" - - def _get_attribute_name(self, attrid: int) -> str | int: - if attrid not in self.cluster.attributes: - return attrid - - return self.cluster.attributes[attrid].name - - async def get_attribute_value(self, attribute, from_cache=True): - """Get the value for an attribute.""" - manufacturer = None - manufacturer_code = self._endpoint.device.manufacturer_code - if self.cluster.cluster_id >= 0xFC00 and manufacturer_code: - manufacturer = manufacturer_code - result = await safe_read( - self._cluster, - [attribute], - allow_cache=from_cache, - only_cache=from_cache, - manufacturer=manufacturer, - ) - return result.get(attribute) - - async def _get_attributes( - self, - raise_exceptions: bool, - attributes: list[str], - from_cache: bool = True, - only_cache: bool = True, - ) -> dict[int | str, Any]: - """Get the values for a list of attributes.""" - manufacturer = None - manufacturer_code = self._endpoint.device.manufacturer_code - if self.cluster.cluster_id >= 0xFC00 and manufacturer_code: - manufacturer = manufacturer_code - chunk = attributes[:ZHA_CLUSTER_HANDLER_READS_PER_REQ] - rest = attributes[ZHA_CLUSTER_HANDLER_READS_PER_REQ:] - result = {} - while chunk: - try: - self.debug("Reading attributes in chunks: %s", chunk) - read, _ = await self.cluster.read_attributes( - chunk, - allow_cache=from_cache, - only_cache=only_cache, - manufacturer=manufacturer, - ) - result.update(read) - except (TimeoutError, zigpy.exceptions.ZigbeeException) as ex: - self.debug( - "failed to get attributes '%s' on '%s' cluster: %s", - chunk, - self.cluster.ep_attribute, - str(ex), - ) - if raise_exceptions: - raise - chunk = rest[:ZHA_CLUSTER_HANDLER_READS_PER_REQ] - rest = rest[ZHA_CLUSTER_HANDLER_READS_PER_REQ:] - return result - - get_attributes = functools.partialmethod(_get_attributes, False) - - async def write_attributes_safe( - self, attributes: dict[str, Any], manufacturer: int | None = None - ) -> None: - """Wrap `write_attributes` to throw an exception on attribute write failure.""" - - res = await self.write_attributes(attributes, manufacturer=manufacturer) - - for record in res[0]: - if record.status != Status.SUCCESS: - try: - name = self.cluster.attributes[record.attrid].name - value = attributes.get(name, "unknown") - except KeyError: - name = f"0x{record.attrid:04x}" - value = "unknown" - - raise HomeAssistantError( - f"Failed to write attribute {name}={value}: {record.status}", - ) - - def log(self, level, msg, *args, **kwargs): - """Log a message.""" - msg = f"[%s:%s]: {msg}" - args = (self._endpoint.device.nwk, self._id, *args) - _LOGGER.log(level, msg, *args, **kwargs) - - def __getattr__(self, name): - """Get attribute or a decorated cluster command.""" - if ( - hasattr(self._cluster, name) - and callable(getattr(self._cluster, name)) - and name not in UNPROXIED_CLUSTER_METHODS - ): - command = getattr(self._cluster, name) - wrapped_command = retry_request(command) - wrapped_command.__name__ = name - - return wrapped_command - return self.__getattribute__(name) - - -class ZDOClusterHandler(LogMixin): - """Cluster handler for ZDO events.""" - - def __init__(self, device) -> None: - """Initialize ZDOClusterHandler.""" - self.name = CLUSTER_HANDLER_ZDO - self._cluster = device.device.endpoints[0] - self._zha_device = device - self._status = ClusterHandlerStatus.CREATED - self._unique_id = f"{device.ieee!s}:{device.name}_ZDO" - self._cluster.add_listener(self) - - @property - def unique_id(self): - """Return the unique id for this cluster handler.""" - return self._unique_id - - @property - def cluster(self): - """Return the aigpy cluster for this cluster handler.""" - return self._cluster - - @property - def status(self): - """Return the status of the cluster handler.""" - return self._status - - @callback - def device_announce(self, zigpy_device): - """Device announce handler.""" - - @callback - def permit_duration(self, duration): - """Permit handler.""" - - async def async_initialize(self, from_cache): - """Initialize cluster handler.""" - self._status = ClusterHandlerStatus.INITIALIZED - - async def async_configure(self): - """Configure cluster handler.""" - self._status = ClusterHandlerStatus.CONFIGURED - - def log(self, level, msg, *args, **kwargs): - """Log a message.""" - msg = f"[%s:ZDO](%s): {msg}" - args = (self._zha_device.nwk, self._zha_device.model, *args) - _LOGGER.log(level, msg, *args, **kwargs) - - -class ClientClusterHandler(ClusterHandler): - """ClusterHandler for Zigbee client (output) clusters.""" - - @callback - def attribute_updated(self, attrid: int, value: Any, timestamp: Any) -> None: - """Handle an attribute updated on this cluster.""" - super().attribute_updated(attrid, value, timestamp) - - try: - attr_name = self._cluster.attributes[attrid].name - except KeyError: - attr_name = "Unknown" - - self.zha_send_event( - SIGNAL_ATTR_UPDATED, - { - ATTR_ATTRIBUTE_ID: attrid, - ATTR_ATTRIBUTE_NAME: attr_name, - ATTR_VALUE: value, - }, - ) - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle a cluster command received on this cluster.""" - if ( - self._cluster.server_commands is not None - and self._cluster.server_commands.get(command_id) is not None - ): - self.zha_send_event(self._cluster.server_commands[command_id].name, args) diff --git a/homeassistant/components/zha/core/cluster_handlers/closures.py b/homeassistant/components/zha/core/cluster_handlers/closures.py deleted file mode 100644 index e96d6492beb..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/closures.py +++ /dev/null @@ -1,271 +0,0 @@ -"""Closures cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from typing import Any - -import zigpy.types as t -from zigpy.zcl.clusters.closures import ConfigStatus, DoorLock, Shade, WindowCovering - -from homeassistant.core import callback - -from .. import registries -from ..const import REPORT_CONFIG_IMMEDIATE, SIGNAL_ATTR_UPDATED -from . import AttrReportConfig, ClientClusterHandler, ClusterHandler - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(DoorLock.cluster_id) -class DoorLockClusterHandler(ClusterHandler): - """Door lock cluster handler.""" - - _value_attribute = 0 - REPORT_CONFIG = ( - AttrReportConfig( - attr=DoorLock.AttributeDefs.lock_state.name, - config=REPORT_CONFIG_IMMEDIATE, - ), - ) - - async def async_update(self): - """Retrieve latest state.""" - result = await self.get_attribute_value( - DoorLock.AttributeDefs.lock_state.name, from_cache=True - ) - if result is not None: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - DoorLock.AttributeDefs.lock_state.id, - DoorLock.AttributeDefs.lock_state.name, - result, - ) - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle a cluster command received on this cluster.""" - - if ( - self._cluster.client_commands is None - or self._cluster.client_commands.get(command_id) is None - ): - return - - command_name = self._cluster.client_commands[command_id].name - - if command_name == DoorLock.ClientCommandDefs.operation_event_notification.name: - self.zha_send_event( - command_name, - { - "source": args[0].name, - "operation": args[1].name, - "code_slot": (args[2] + 1), # start code slots at 1 - }, - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute update from lock cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value - ) - if attrid == self._value_attribute: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value - ) - - async def async_set_user_code(self, code_slot: int, user_code: str) -> None: - """Set the user code for the code slot.""" - - await self.set_pin_code( - code_slot - 1, # start code slots at 1, Zigbee internals use 0 - DoorLock.UserStatus.Enabled, - DoorLock.UserType.Unrestricted, - user_code, - ) - - async def async_enable_user_code(self, code_slot: int) -> None: - """Enable the code slot.""" - - await self.set_user_status(code_slot - 1, DoorLock.UserStatus.Enabled) - - async def async_disable_user_code(self, code_slot: int) -> None: - """Disable the code slot.""" - - await self.set_user_status(code_slot - 1, DoorLock.UserStatus.Disabled) - - async def async_get_user_code(self, code_slot: int) -> int: - """Get the user code from the code slot.""" - - return await self.get_pin_code(code_slot - 1) - - async def async_clear_user_code(self, code_slot: int) -> None: - """Clear the code slot.""" - - await self.clear_pin_code(code_slot - 1) - - async def async_clear_all_user_codes(self) -> None: - """Clear all code slots.""" - - await self.clear_all_pin_codes() - - async def async_set_user_type(self, code_slot: int, user_type: str) -> None: - """Set user type.""" - - await self.set_user_type(code_slot - 1, user_type) - - async def async_get_user_type(self, code_slot: int) -> str: - """Get user type.""" - - return await self.get_user_type(code_slot - 1) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Shade.cluster_id) -class ShadeClusterHandler(ClusterHandler): - """Shade cluster handler.""" - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(WindowCovering.cluster_id) -class WindowCoveringClientClusterHandler(ClientClusterHandler): - """Window client cluster handler.""" - - -@registries.BINDABLE_CLUSTERS.register(WindowCovering.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(WindowCovering.cluster_id) -class WindowCoveringClusterHandler(ClusterHandler): - """Window cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=WindowCovering.AttributeDefs.current_position_lift_percentage.name, - config=REPORT_CONFIG_IMMEDIATE, - ), - AttrReportConfig( - attr=WindowCovering.AttributeDefs.current_position_tilt_percentage.name, - config=REPORT_CONFIG_IMMEDIATE, - ), - ) - - ZCL_INIT_ATTRS = { - WindowCovering.AttributeDefs.window_covering_type.name: True, - WindowCovering.AttributeDefs.window_covering_mode.name: True, - WindowCovering.AttributeDefs.config_status.name: True, - WindowCovering.AttributeDefs.installed_closed_limit_lift.name: True, - WindowCovering.AttributeDefs.installed_closed_limit_tilt.name: True, - WindowCovering.AttributeDefs.installed_open_limit_lift.name: True, - WindowCovering.AttributeDefs.installed_open_limit_tilt.name: True, - } - - async def async_update(self): - """Retrieve latest state.""" - results = await self.get_attributes( - [ - WindowCovering.AttributeDefs.current_position_lift_percentage.name, - WindowCovering.AttributeDefs.current_position_tilt_percentage.name, - ], - from_cache=False, - only_cache=False, - ) - self.debug( - "read current_position_lift_percentage and current_position_tilt_percentage - results: %s", - results, - ) - if ( - results - and results.get( - WindowCovering.AttributeDefs.current_position_lift_percentage.name - ) - is not None - ): - # the 100 - value is because we need to invert the value before giving it to the entity - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - WindowCovering.AttributeDefs.current_position_lift_percentage.id, - WindowCovering.AttributeDefs.current_position_lift_percentage.name, - 100 - - results.get( - WindowCovering.AttributeDefs.current_position_lift_percentage.name - ), - ) - if ( - results - and results.get( - WindowCovering.AttributeDefs.current_position_tilt_percentage.name - ) - is not None - ): - # the 100 - value is because we need to invert the value before giving it to the entity - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - WindowCovering.AttributeDefs.current_position_tilt_percentage.id, - WindowCovering.AttributeDefs.current_position_tilt_percentage.name, - 100 - - results.get( - WindowCovering.AttributeDefs.current_position_tilt_percentage.name - ), - ) - - @property - def inverted(self): - """Return true if the window covering is inverted.""" - config_status = self.cluster.get( - WindowCovering.AttributeDefs.config_status.name - ) - return ( - config_status is not None - and ConfigStatus.Open_up_commands_reversed in ConfigStatus(config_status) - ) - - @property - def current_position_lift_percentage(self) -> t.uint16_t | None: - """Return the current lift percentage of the window covering.""" - lift_percentage = self.cluster.get( - WindowCovering.AttributeDefs.current_position_lift_percentage.name - ) - if lift_percentage is not None: - # the 100 - value is because we need to invert the value before giving it to the entity - lift_percentage = 100 - lift_percentage - return lift_percentage - - @property - def current_position_tilt_percentage(self) -> t.uint16_t | None: - """Return the current tilt percentage of the window covering.""" - tilt_percentage = self.cluster.get( - WindowCovering.AttributeDefs.current_position_tilt_percentage.name - ) - if tilt_percentage is not None: - # the 100 - value is because we need to invert the value before giving it to the entity - tilt_percentage = 100 - tilt_percentage - return tilt_percentage - - @property - def installed_open_limit_lift(self) -> t.uint16_t | None: - """Return the installed open lift limit of the window covering.""" - return self.cluster.get( - WindowCovering.AttributeDefs.installed_open_limit_lift.name - ) - - @property - def installed_closed_limit_lift(self) -> t.uint16_t | None: - """Return the installed closed lift limit of the window covering.""" - return self.cluster.get( - WindowCovering.AttributeDefs.installed_closed_limit_lift.name - ) - - @property - def installed_open_limit_tilt(self) -> t.uint16_t | None: - """Return the installed open tilt limit of the window covering.""" - return self.cluster.get( - WindowCovering.AttributeDefs.installed_open_limit_tilt.name - ) - - @property - def installed_closed_limit_tilt(self) -> t.uint16_t | None: - """Return the installed closed tilt limit of the window covering.""" - return self.cluster.get( - WindowCovering.AttributeDefs.installed_closed_limit_tilt.name - ) - - @property - def window_covering_type(self) -> WindowCovering.WindowCoveringType | None: - """Return the window covering type.""" - return self.cluster.get(WindowCovering.AttributeDefs.window_covering_type.name) diff --git a/homeassistant/components/zha/core/cluster_handlers/general.py b/homeassistant/components/zha/core/cluster_handlers/general.py deleted file mode 100644 index 438fc6b1723..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/general.py +++ /dev/null @@ -1,690 +0,0 @@ -"""General cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from collections.abc import Coroutine -from typing import TYPE_CHECKING, Any - -from zhaquirks.quirk_ids import TUYA_PLUG_ONOFF -import zigpy.exceptions -import zigpy.types as t -import zigpy.zcl -from zigpy.zcl.clusters.general import ( - Alarms, - AnalogInput, - AnalogOutput, - AnalogValue, - ApplianceControl, - Basic, - BinaryInput, - BinaryOutput, - BinaryValue, - Commissioning, - DeviceTemperature, - GreenPowerProxy, - Groups, - Identify, - LevelControl, - MultistateInput, - MultistateOutput, - MultistateValue, - OnOff, - OnOffConfiguration, - Ota, - Partition, - PollControl, - PowerConfiguration, - PowerProfile, - RSSILocation, - Scenes, - Time, -) -from zigpy.zcl.foundation import Status - -from homeassistant.core import callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.event import async_call_later - -from .. import registries -from ..const import ( - REPORT_CONFIG_ASAP, - REPORT_CONFIG_BATTERY_SAVE, - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_IMMEDIATE, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_MIN_INT, - SIGNAL_ATTR_UPDATED, - SIGNAL_MOVE_LEVEL, - SIGNAL_SET_LEVEL, - SIGNAL_UPDATE_DEVICE, -) -from . import ( - AttrReportConfig, - ClientClusterHandler, - ClusterHandler, - parse_and_log_command, -) -from .helpers import is_hue_motion_sensor - -if TYPE_CHECKING: - from ..endpoint import Endpoint - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Alarms.cluster_id) -class AlarmsClusterHandler(ClusterHandler): - """Alarms cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogInput.cluster_id) -class AnalogInputClusterHandler(ClusterHandler): - """Analog Input cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=AnalogInput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.BINDABLE_CLUSTERS.register(AnalogOutput.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogOutput.cluster_id) -class AnalogOutputClusterHandler(ClusterHandler): - """Analog Output cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=AnalogOutput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - ZCL_INIT_ATTRS = { - AnalogOutput.AttributeDefs.min_present_value.name: True, - AnalogOutput.AttributeDefs.max_present_value.name: True, - AnalogOutput.AttributeDefs.resolution.name: True, - AnalogOutput.AttributeDefs.relinquish_default.name: True, - AnalogOutput.AttributeDefs.description.name: True, - AnalogOutput.AttributeDefs.engineering_units.name: True, - AnalogOutput.AttributeDefs.application_type.name: True, - } - - @property - def present_value(self) -> float | None: - """Return cached value of present_value.""" - return self.cluster.get(AnalogOutput.AttributeDefs.present_value.name) - - @property - def min_present_value(self) -> float | None: - """Return cached value of min_present_value.""" - return self.cluster.get(AnalogOutput.AttributeDefs.min_present_value.name) - - @property - def max_present_value(self) -> float | None: - """Return cached value of max_present_value.""" - return self.cluster.get(AnalogOutput.AttributeDefs.max_present_value.name) - - @property - def resolution(self) -> float | None: - """Return cached value of resolution.""" - return self.cluster.get(AnalogOutput.AttributeDefs.resolution.name) - - @property - def relinquish_default(self) -> float | None: - """Return cached value of relinquish_default.""" - return self.cluster.get(AnalogOutput.AttributeDefs.relinquish_default.name) - - @property - def description(self) -> str | None: - """Return cached value of description.""" - return self.cluster.get(AnalogOutput.AttributeDefs.description.name) - - @property - def engineering_units(self) -> int | None: - """Return cached value of engineering_units.""" - return self.cluster.get(AnalogOutput.AttributeDefs.engineering_units.name) - - @property - def application_type(self) -> int | None: - """Return cached value of application_type.""" - return self.cluster.get(AnalogOutput.AttributeDefs.application_type.name) - - async def async_set_present_value(self, value: float) -> None: - """Update present_value.""" - await self.write_attributes_safe( - {AnalogOutput.AttributeDefs.present_value.name: value} - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogValue.cluster_id) -class AnalogValueClusterHandler(ClusterHandler): - """Analog Value cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=AnalogValue.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceControl.cluster_id) -class ApplianceControlClusterHandler(ClusterHandler): - """Appliance Control cluster handler.""" - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(Basic.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Basic.cluster_id) -class BasicClusterHandler(ClusterHandler): - """Cluster handler to interact with the basic cluster.""" - - UNKNOWN = 0 - BATTERY = 3 - BIND: bool = False - - POWER_SOURCES = { - UNKNOWN: "Unknown", - 1: "Mains (single phase)", - 2: "Mains (3 phase)", - BATTERY: "Battery", - 4: "DC source", - 5: "Emergency mains constantly powered", - 6: "Emergency mains and transfer switch", - } - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Basic cluster handler.""" - super().__init__(cluster, endpoint) - if is_hue_motion_sensor(self) and self.cluster.endpoint.endpoint_id == 2: - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["trigger_indicator"] = True - elif ( - self.cluster.endpoint.manufacturer == "TexasInstruments" - and self.cluster.endpoint.model == "ti.router" - ): - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["transmit_power"] = True - elif self.cluster.endpoint.model == "lumi.curtain.agl001": - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["power_source"] = True - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryInput.cluster_id) -class BinaryInputClusterHandler(ClusterHandler): - """Binary Input cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=BinaryInput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryOutput.cluster_id) -class BinaryOutputClusterHandler(ClusterHandler): - """Binary Output cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=BinaryOutput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryValue.cluster_id) -class BinaryValueClusterHandler(ClusterHandler): - """Binary Value cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=BinaryValue.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Commissioning.cluster_id) -class CommissioningClusterHandler(ClusterHandler): - """Commissioning cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(DeviceTemperature.cluster_id) -class DeviceTemperatureClusterHandler(ClusterHandler): - """Device Temperature cluster handler.""" - - REPORT_CONFIG = ( - { - "attr": DeviceTemperature.AttributeDefs.current_temperature.name, - "config": (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), - }, - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(GreenPowerProxy.cluster_id) -class GreenPowerProxyClusterHandler(ClusterHandler): - """Green Power Proxy cluster handler.""" - - BIND: bool = False - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Groups.cluster_id) -class GroupsClusterHandler(ClusterHandler): - """Groups cluster handler.""" - - BIND: bool = False - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Identify.cluster_id) -class IdentifyClusterHandler(ClusterHandler): - """Identify cluster handler.""" - - BIND: bool = False - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - cmd = parse_and_log_command(self, tsn, command_id, args) - - if cmd == Identify.ServerCommandDefs.trigger_effect.name: - self.async_send_signal(f"{self.unique_id}_{cmd}", args[0]) - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(LevelControl.cluster_id) -class LevelControlClientClusterHandler(ClientClusterHandler): - """LevelControl client cluster.""" - - -@registries.BINDABLE_CLUSTERS.register(LevelControl.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(LevelControl.cluster_id) -class LevelControlClusterHandler(ClusterHandler): - """Cluster handler for the LevelControl Zigbee cluster.""" - - CURRENT_LEVEL = 0 - REPORT_CONFIG = ( - AttrReportConfig( - attr=LevelControl.AttributeDefs.current_level.name, - config=REPORT_CONFIG_ASAP, - ), - ) - ZCL_INIT_ATTRS = { - LevelControl.AttributeDefs.on_off_transition_time.name: True, - LevelControl.AttributeDefs.on_level.name: True, - LevelControl.AttributeDefs.on_transition_time.name: True, - LevelControl.AttributeDefs.off_transition_time.name: True, - LevelControl.AttributeDefs.default_move_rate.name: True, - LevelControl.AttributeDefs.start_up_current_level.name: True, - } - - @property - def current_level(self) -> int | None: - """Return cached value of the current_level attribute.""" - return self.cluster.get(LevelControl.AttributeDefs.current_level.name) - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - cmd = parse_and_log_command(self, tsn, command_id, args) - - if cmd in ( - LevelControl.ServerCommandDefs.move_to_level.name, - LevelControl.ServerCommandDefs.move_to_level_with_on_off.name, - ): - self.dispatch_level_change(SIGNAL_SET_LEVEL, args[0]) - elif cmd in ( - LevelControl.ServerCommandDefs.move.name, - LevelControl.ServerCommandDefs.move_with_on_off.name, - ): - # We should dim slowly -- for now, just step once - rate = args[1] - if args[0] == 0xFF: - rate = 10 # Should read default move rate - self.dispatch_level_change(SIGNAL_MOVE_LEVEL, -rate if args[0] else rate) - elif cmd in ( - LevelControl.ServerCommandDefs.step.name, - LevelControl.ServerCommandDefs.step_with_on_off.name, - ): - # Step (technically may change on/off) - self.dispatch_level_change( - SIGNAL_MOVE_LEVEL, -args[1] if args[0] else args[1] - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - self.debug("received attribute: %s update with value: %s", attrid, value) - if attrid == self.CURRENT_LEVEL: - self.dispatch_level_change(SIGNAL_SET_LEVEL, value) - - def dispatch_level_change(self, command, level): - """Dispatch level change.""" - self.async_send_signal(f"{self.unique_id}_{command}", level) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateInput.cluster_id) -class MultistateInputClusterHandler(ClusterHandler): - """Multistate Input cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=MultistateInput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateOutput.cluster_id) -class MultistateOutputClusterHandler(ClusterHandler): - """Multistate Output cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=MultistateOutput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateValue.cluster_id) -class MultistateValueClusterHandler(ClusterHandler): - """Multistate Value cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=MultistateValue.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(OnOff.cluster_id) -class OnOffClientClusterHandler(ClientClusterHandler): - """OnOff client cluster handler.""" - - -@registries.BINDABLE_CLUSTERS.register(OnOff.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(OnOff.cluster_id) -class OnOffClusterHandler(ClusterHandler): - """Cluster handler for the OnOff Zigbee cluster.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=OnOff.AttributeDefs.on_off.name, config=REPORT_CONFIG_IMMEDIATE - ), - ) - ZCL_INIT_ATTRS = { - OnOff.AttributeDefs.start_up_on_off.name: True, - } - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize OnOffClusterHandler.""" - super().__init__(cluster, endpoint) - self._off_listener = None - - if endpoint.device.quirk_id == TUYA_PLUG_ONOFF: - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["backlight_mode"] = True - self.ZCL_INIT_ATTRS["power_on_state"] = True - self.ZCL_INIT_ATTRS["child_lock"] = True - - @classmethod - def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool: - """Filter the cluster match for specific devices.""" - return not ( - cluster.endpoint.device.manufacturer == "Konke" - and cluster.endpoint.device.model - in ("3AFE280100510001", "3AFE170100510001") - ) - - @property - def on_off(self) -> bool | None: - """Return cached value of on/off attribute.""" - return self.cluster.get(OnOff.AttributeDefs.on_off.name) - - async def turn_on(self) -> None: - """Turn the on off cluster on.""" - result = await self.on() - if result[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to turn on: {result[1]}") - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.true) - - async def turn_off(self) -> None: - """Turn the on off cluster off.""" - result = await self.off() - if result[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to turn off: {result[1]}") - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.false) - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - cmd = parse_and_log_command(self, tsn, command_id, args) - - if cmd in ( - OnOff.ServerCommandDefs.off.name, - OnOff.ServerCommandDefs.off_with_effect.name, - ): - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.false) - elif cmd in ( - OnOff.ServerCommandDefs.on.name, - OnOff.ServerCommandDefs.on_with_recall_global_scene.name, - ): - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.true) - elif cmd == OnOff.ServerCommandDefs.on_with_timed_off.name: - should_accept = args[0] - on_time = args[1] - # 0 is always accept 1 is only accept when already on - if should_accept == 0 or (should_accept == 1 and bool(self.on_off)): - if self._off_listener is not None: - self._off_listener() - self._off_listener = None - self.cluster.update_attribute( - OnOff.AttributeDefs.on_off.id, t.Bool.true - ) - if on_time > 0: - self._off_listener = async_call_later( - self._endpoint.device.hass, - (on_time / 10), # value is in 10ths of a second - self.set_to_off, - ) - elif cmd == "toggle": - self.cluster.update_attribute( - OnOff.AttributeDefs.on_off.id, not bool(self.on_off) - ) - - @callback - def set_to_off(self, *_): - """Set the state to off.""" - self._off_listener = None - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.false) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - if attrid == OnOff.AttributeDefs.on_off.id: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - OnOff.AttributeDefs.on_off.name, - value, - ) - - async def async_update(self): - """Initialize cluster handler.""" - if self.cluster.is_client: - return - from_cache = not self._endpoint.device.is_mains_powered - self.debug("attempting to update onoff state - from cache: %s", from_cache) - await self.get_attribute_value( - OnOff.AttributeDefs.on_off.id, from_cache=from_cache - ) - await super().async_update() - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(OnOffConfiguration.cluster_id) -class OnOffConfigurationClusterHandler(ClusterHandler): - """OnOff Configuration cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Ota.cluster_id) -class OtaClusterHandler(ClusterHandler): - """OTA cluster handler.""" - - BIND: bool = False - - # Some devices have this cluster in the wrong collection (e.g. Third Reality) - ZCL_INIT_ATTRS = { - Ota.AttributeDefs.current_file_version.name: True, - } - - @property - def current_file_version(self) -> int | None: - """Return cached value of current_file_version attribute.""" - return self.cluster.get(Ota.AttributeDefs.current_file_version.name) - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(Ota.cluster_id) -class OtaClientClusterHandler(ClientClusterHandler): - """OTA client cluster handler.""" - - BIND: bool = False - - ZCL_INIT_ATTRS = { - Ota.AttributeDefs.current_file_version.name: True, - } - - @callback - def attribute_updated(self, attrid: int, value: Any, timestamp: Any) -> None: - """Handle an attribute updated on this cluster.""" - # We intentionally avoid the `ClientClusterHandler` attribute update handler: - # it emits a logbook event on every update, which pollutes the logbook - ClusterHandler.attribute_updated(self, attrid, value, timestamp) - - @property - def current_file_version(self) -> int | None: - """Return cached value of current_file_version attribute.""" - return self.cluster.get(Ota.AttributeDefs.current_file_version.name) - - @callback - def cluster_command( - self, tsn: int, command_id: int, args: list[Any] | None - ) -> None: - """Handle OTA commands.""" - if command_id not in self.cluster.server_commands: - return - - signal_id = self._endpoint.unique_id.split("-")[0] - cmd_name = self.cluster.server_commands[command_id].name - - if cmd_name == Ota.ServerCommandDefs.query_next_image.name: - assert args - - current_file_version = args[3] - self.cluster.update_attribute( - Ota.AttributeDefs.current_file_version.id, current_file_version - ) - self.async_send_signal( - SIGNAL_UPDATE_DEVICE.format(signal_id), current_file_version - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Partition.cluster_id) -class PartitionClusterHandler(ClusterHandler): - """Partition cluster handler.""" - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(PollControl.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PollControl.cluster_id) -class PollControlClusterHandler(ClusterHandler): - """Poll Control cluster handler.""" - - CHECKIN_INTERVAL = 55 * 60 * 4 # 55min - CHECKIN_FAST_POLL_TIMEOUT = 2 * 4 # 2s - LONG_POLL = 6 * 4 # 6s - _IGNORED_MANUFACTURER_ID = { - 4476, - } # IKEA - - async def async_configure_cluster_handler_specific(self) -> None: - """Configure cluster handler: set check-in interval.""" - await self.write_attributes_safe( - {PollControl.AttributeDefs.checkin_interval.name: self.CHECKIN_INTERVAL} - ) - - @callback - def cluster_command( - self, tsn: int, command_id: int, args: list[Any] | None - ) -> None: - """Handle commands received to this cluster.""" - if command_id in self.cluster.client_commands: - cmd_name = self.cluster.client_commands[command_id].name - else: - cmd_name = command_id - - self.debug("Received %s tsn command '%s': %s", tsn, cmd_name, args) - self.zha_send_event(cmd_name, args) - if cmd_name == PollControl.ClientCommandDefs.checkin.name: - self.cluster.create_catching_task(self.check_in_response(tsn)) - - async def check_in_response(self, tsn: int) -> None: - """Respond to checkin command.""" - await self.checkin_response(True, self.CHECKIN_FAST_POLL_TIMEOUT, tsn=tsn) - if self._endpoint.device.manufacturer_code not in self._IGNORED_MANUFACTURER_ID: - await self.set_long_poll_interval(self.LONG_POLL) - await self.fast_poll_stop() - - @callback - def skip_manufacturer_id(self, manufacturer_code: int) -> None: - """Block a specific manufacturer id from changing default polling.""" - self._IGNORED_MANUFACTURER_ID.add(manufacturer_code) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PowerConfiguration.cluster_id) -class PowerConfigurationClusterHandler(ClusterHandler): - """Cluster handler for the zigbee power configuration cluster.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=PowerConfiguration.AttributeDefs.battery_voltage.name, - config=REPORT_CONFIG_BATTERY_SAVE, - ), - AttrReportConfig( - attr=PowerConfiguration.AttributeDefs.battery_percentage_remaining.name, - config=REPORT_CONFIG_BATTERY_SAVE, - ), - ) - - def async_initialize_cluster_handler_specific(self, from_cache: bool) -> Coroutine: - """Initialize cluster handler specific attrs.""" - attributes = [ - PowerConfiguration.AttributeDefs.battery_size.name, - PowerConfiguration.AttributeDefs.battery_quantity.name, - ] - return self.get_attributes( - attributes, from_cache=from_cache, only_cache=from_cache - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PowerProfile.cluster_id) -class PowerProfileClusterHandler(ClusterHandler): - """Power Profile cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(RSSILocation.cluster_id) -class RSSILocationClusterHandler(ClusterHandler): - """RSSI Location cluster handler.""" - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(Scenes.cluster_id) -class ScenesClientClusterHandler(ClientClusterHandler): - """Scenes cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Scenes.cluster_id) -class ScenesClusterHandler(ClusterHandler): - """Scenes cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Time.cluster_id) -class TimeClusterHandler(ClusterHandler): - """Time cluster handler.""" diff --git a/homeassistant/components/zha/core/cluster_handlers/helpers.py b/homeassistant/components/zha/core/cluster_handlers/helpers.py deleted file mode 100644 index 46557bf23a8..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/helpers.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Helpers for use with ZHA Zigbee cluster handlers.""" - -from . import ClusterHandler - - -def is_hue_motion_sensor(cluster_handler: ClusterHandler) -> bool: - """Return true if the manufacturer and model match known Hue motion sensor models.""" - return cluster_handler.cluster.endpoint.manufacturer in ( - "Philips", - "Signify Netherlands B.V.", - ) and cluster_handler.cluster.endpoint.model in ( - "SML001", - "SML002", - "SML003", - "SML004", - ) - - -def is_sonoff_presence_sensor(cluster_handler: ClusterHandler) -> bool: - """Return true if the manufacturer and model match known Sonoff sensor models.""" - return cluster_handler.cluster.endpoint.manufacturer in ( - "SONOFF", - ) and cluster_handler.cluster.endpoint.model in ("SNZB-06P",) diff --git a/homeassistant/components/zha/core/cluster_handlers/homeautomation.py b/homeassistant/components/zha/core/cluster_handlers/homeautomation.py deleted file mode 100644 index b287cb98f6a..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/homeautomation.py +++ /dev/null @@ -1,236 +0,0 @@ -"""Home automation cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -import enum - -from zigpy.zcl.clusters.homeautomation import ( - ApplianceEventAlerts, - ApplianceIdentification, - ApplianceStatistics, - Diagnostic, - ElectricalMeasurement, - MeterIdentification, -) - -from .. import registries -from ..const import ( - CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_OP, - SIGNAL_ATTR_UPDATED, -) -from . import AttrReportConfig, ClusterHandler - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceEventAlerts.cluster_id) -class ApplianceEventAlertsClusterHandler(ClusterHandler): - """Appliance Event Alerts cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceIdentification.cluster_id) -class ApplianceIdentificationClusterHandler(ClusterHandler): - """Appliance Identification cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceStatistics.cluster_id) -class ApplianceStatisticsClusterHandler(ClusterHandler): - """Appliance Statistics cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Diagnostic.cluster_id) -class DiagnosticClusterHandler(ClusterHandler): - """Diagnostic cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ElectricalMeasurement.cluster_id) -class ElectricalMeasurementClusterHandler(ClusterHandler): - """Cluster handler that polls active power level.""" - - CLUSTER_HANDLER_NAME = CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT - - class MeasurementType(enum.IntFlag): - """Measurement types.""" - - ACTIVE_MEASUREMENT = 1 - REACTIVE_MEASUREMENT = 2 - APPARENT_MEASUREMENT = 4 - PHASE_A_MEASUREMENT = 8 - PHASE_B_MEASUREMENT = 16 - PHASE_C_MEASUREMENT = 32 - DC_MEASUREMENT = 64 - HARMONICS_MEASUREMENT = 128 - POWER_QUALITY_MEASUREMENT = 256 - - REPORT_CONFIG = ( - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.active_power.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.active_power_max.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.apparent_power.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.rms_current.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.rms_current_max.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.rms_voltage.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.rms_voltage_max.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.ac_frequency.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.ac_frequency_max.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - ZCL_INIT_ATTRS = { - ElectricalMeasurement.AttributeDefs.ac_current_divisor.name: True, - ElectricalMeasurement.AttributeDefs.ac_current_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.ac_power_divisor.name: True, - ElectricalMeasurement.AttributeDefs.ac_power_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.ac_voltage_divisor.name: True, - ElectricalMeasurement.AttributeDefs.ac_voltage_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.ac_frequency_divisor.name: True, - ElectricalMeasurement.AttributeDefs.ac_frequency_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.measurement_type.name: True, - ElectricalMeasurement.AttributeDefs.power_divisor.name: True, - ElectricalMeasurement.AttributeDefs.power_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.power_factor.name: True, - } - - async def async_update(self): - """Retrieve latest state.""" - self.debug("async_update") - - # This is a polling cluster handler. Don't allow cache. - attrs = [ - a["attr"] - for a in self.REPORT_CONFIG - if a["attr"] not in self.cluster.unsupported_attributes - ] - result = await self.get_attributes(attrs, from_cache=False, only_cache=False) - if result: - for attr, value in result.items(): - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - self.cluster.find_attribute(attr).id, - attr, - value, - ) - - @property - def ac_current_divisor(self) -> int: - """Return ac current divisor.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_current_divisor.name - ) - or 1 - ) - - @property - def ac_current_multiplier(self) -> int: - """Return ac current multiplier.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_current_multiplier.name - ) - or 1 - ) - - @property - def ac_voltage_divisor(self) -> int: - """Return ac voltage divisor.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_voltage_divisor.name - ) - or 1 - ) - - @property - def ac_voltage_multiplier(self) -> int: - """Return ac voltage multiplier.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_voltage_multiplier.name - ) - or 1 - ) - - @property - def ac_frequency_divisor(self) -> int: - """Return ac frequency divisor.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_frequency_divisor.name - ) - or 1 - ) - - @property - def ac_frequency_multiplier(self) -> int: - """Return ac frequency multiplier.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_frequency_multiplier.name - ) - or 1 - ) - - @property - def ac_power_divisor(self) -> int: - """Return active power divisor.""" - return self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_power_divisor.name, - self.cluster.get(ElectricalMeasurement.AttributeDefs.power_divisor.name) - or 1, - ) - - @property - def ac_power_multiplier(self) -> int: - """Return active power divisor.""" - return self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_power_multiplier.name, - self.cluster.get(ElectricalMeasurement.AttributeDefs.power_multiplier.name) - or 1, - ) - - @property - def measurement_type(self) -> str | None: - """Return Measurement type.""" - if ( - meas_type := self.cluster.get( - ElectricalMeasurement.AttributeDefs.measurement_type.name - ) - ) is None: - return None - - meas_type = self.MeasurementType(meas_type) - return ", ".join( - m.name - for m in self.MeasurementType - if m in meas_type and m.name is not None - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MeterIdentification.cluster_id) -class MeterIdentificationClusterHandler(ClusterHandler): - """Metering Identification cluster handler.""" diff --git a/homeassistant/components/zha/core/cluster_handlers/hvac.py b/homeassistant/components/zha/core/cluster_handlers/hvac.py deleted file mode 100644 index 1230549832b..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/hvac.py +++ /dev/null @@ -1,347 +0,0 @@ -"""HVAC cluster handlers module for Zigbee Home Automation. - -For more details about this component, please refer to the documentation at -https://home-assistant.io/integrations/zha/ -""" - -from __future__ import annotations - -from typing import Any - -from zigpy.zcl.clusters.hvac import ( - Dehumidification, - Fan, - Pump, - Thermostat, - UserInterface, -) - -from homeassistant.core import callback - -from .. import registries -from ..const import ( - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_MIN_INT, - REPORT_CONFIG_OP, - SIGNAL_ATTR_UPDATED, -) -from . import AttrReportConfig, ClusterHandler - -REPORT_CONFIG_CLIMATE = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 25) -REPORT_CONFIG_CLIMATE_DEMAND = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 5) -REPORT_CONFIG_CLIMATE_DISCRETE = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 1) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Dehumidification.cluster_id) -class DehumidificationClusterHandler(ClusterHandler): - """Dehumidification cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Fan.cluster_id) -class FanClusterHandler(ClusterHandler): - """Fan cluster handler.""" - - _value_attribute = 0 - - REPORT_CONFIG = ( - AttrReportConfig(attr=Fan.AttributeDefs.fan_mode.name, config=REPORT_CONFIG_OP), - ) - ZCL_INIT_ATTRS = {Fan.AttributeDefs.fan_mode_sequence.name: True} - - @property - def fan_mode(self) -> int | None: - """Return current fan mode.""" - return self.cluster.get(Fan.AttributeDefs.fan_mode.name) - - @property - def fan_mode_sequence(self) -> int | None: - """Return possible fan mode speeds.""" - return self.cluster.get(Fan.AttributeDefs.fan_mode_sequence.name) - - async def async_set_speed(self, value) -> None: - """Set the speed of the fan.""" - await self.write_attributes_safe({Fan.AttributeDefs.fan_mode.name: value}) - - async def async_update(self) -> None: - """Retrieve latest state.""" - await self.get_attribute_value( - Fan.AttributeDefs.fan_mode.name, from_cache=False - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute update from fan cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value - ) - if attr_name == "fan_mode": - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Pump.cluster_id) -class PumpClusterHandler(ClusterHandler): - """Pump cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Thermostat.cluster_id) -class ThermostatClusterHandler(ClusterHandler): - """Thermostat cluster handler.""" - - REPORT_CONFIG: tuple[AttrReportConfig, ...] = ( - AttrReportConfig( - attr=Thermostat.AttributeDefs.local_temperature.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.occupied_cooling_setpoint.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.occupied_heating_setpoint.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.unoccupied_cooling_setpoint.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.unoccupied_heating_setpoint.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.running_mode.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.running_state.name, - config=REPORT_CONFIG_CLIMATE_DEMAND, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.system_mode.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.occupancy.name, - config=REPORT_CONFIG_CLIMATE_DISCRETE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.pi_cooling_demand.name, - config=REPORT_CONFIG_CLIMATE_DEMAND, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.pi_heating_demand.name, - config=REPORT_CONFIG_CLIMATE_DEMAND, - ), - ) - ZCL_INIT_ATTRS: dict[str, bool] = { - Thermostat.AttributeDefs.abs_min_heat_setpoint_limit.name: True, - Thermostat.AttributeDefs.abs_max_heat_setpoint_limit.name: True, - Thermostat.AttributeDefs.abs_min_cool_setpoint_limit.name: True, - Thermostat.AttributeDefs.abs_max_cool_setpoint_limit.name: True, - Thermostat.AttributeDefs.ctrl_sequence_of_oper.name: False, - Thermostat.AttributeDefs.max_cool_setpoint_limit.name: True, - Thermostat.AttributeDefs.max_heat_setpoint_limit.name: True, - Thermostat.AttributeDefs.min_cool_setpoint_limit.name: True, - Thermostat.AttributeDefs.min_heat_setpoint_limit.name: True, - Thermostat.AttributeDefs.local_temperature_calibration.name: True, - Thermostat.AttributeDefs.setpoint_change_source.name: True, - } - - @property - def abs_max_cool_setpoint_limit(self) -> int: - """Absolute maximum cooling setpoint.""" - return self.cluster.get( - Thermostat.AttributeDefs.abs_max_cool_setpoint_limit.name, 3200 - ) - - @property - def abs_min_cool_setpoint_limit(self) -> int: - """Absolute minimum cooling setpoint.""" - return self.cluster.get( - Thermostat.AttributeDefs.abs_min_cool_setpoint_limit.name, 1600 - ) - - @property - def abs_max_heat_setpoint_limit(self) -> int: - """Absolute maximum heating setpoint.""" - return self.cluster.get( - Thermostat.AttributeDefs.abs_max_heat_setpoint_limit.name, 3000 - ) - - @property - def abs_min_heat_setpoint_limit(self) -> int: - """Absolute minimum heating setpoint.""" - return self.cluster.get( - Thermostat.AttributeDefs.abs_min_heat_setpoint_limit.name, 700 - ) - - @property - def ctrl_sequence_of_oper(self) -> int: - """Control Sequence of operations attribute.""" - return self.cluster.get( - Thermostat.AttributeDefs.ctrl_sequence_of_oper.name, 0xFF - ) - - @property - def max_cool_setpoint_limit(self) -> int: - """Maximum cooling setpoint.""" - sp_limit = self.cluster.get( - Thermostat.AttributeDefs.max_cool_setpoint_limit.name - ) - if sp_limit is None: - return self.abs_max_cool_setpoint_limit - return sp_limit - - @property - def min_cool_setpoint_limit(self) -> int: - """Minimum cooling setpoint.""" - sp_limit = self.cluster.get( - Thermostat.AttributeDefs.min_cool_setpoint_limit.name - ) - if sp_limit is None: - return self.abs_min_cool_setpoint_limit - return sp_limit - - @property - def max_heat_setpoint_limit(self) -> int: - """Maximum heating setpoint.""" - sp_limit = self.cluster.get( - Thermostat.AttributeDefs.max_heat_setpoint_limit.name - ) - if sp_limit is None: - return self.abs_max_heat_setpoint_limit - return sp_limit - - @property - def min_heat_setpoint_limit(self) -> int: - """Minimum heating setpoint.""" - sp_limit = self.cluster.get( - Thermostat.AttributeDefs.min_heat_setpoint_limit.name - ) - if sp_limit is None: - return self.abs_min_heat_setpoint_limit - return sp_limit - - @property - def local_temperature(self) -> int | None: - """Thermostat temperature.""" - return self.cluster.get(Thermostat.AttributeDefs.local_temperature.name) - - @property - def occupancy(self) -> int | None: - """Is occupancy detected.""" - return self.cluster.get(Thermostat.AttributeDefs.occupancy.name) - - @property - def occupied_cooling_setpoint(self) -> int | None: - """Temperature when room is occupied.""" - return self.cluster.get(Thermostat.AttributeDefs.occupied_cooling_setpoint.name) - - @property - def occupied_heating_setpoint(self) -> int | None: - """Temperature when room is occupied.""" - return self.cluster.get(Thermostat.AttributeDefs.occupied_heating_setpoint.name) - - @property - def pi_cooling_demand(self) -> int: - """Cooling demand.""" - return self.cluster.get(Thermostat.AttributeDefs.pi_cooling_demand.name) - - @property - def pi_heating_demand(self) -> int: - """Heating demand.""" - return self.cluster.get(Thermostat.AttributeDefs.pi_heating_demand.name) - - @property - def running_mode(self) -> int | None: - """Thermostat running mode.""" - return self.cluster.get(Thermostat.AttributeDefs.running_mode.name) - - @property - def running_state(self) -> int | None: - """Thermostat running state, state of heat, cool, fan relays.""" - return self.cluster.get(Thermostat.AttributeDefs.running_state.name) - - @property - def system_mode(self) -> int | None: - """System mode.""" - return self.cluster.get(Thermostat.AttributeDefs.system_mode.name) - - @property - def unoccupied_cooling_setpoint(self) -> int | None: - """Temperature when room is not occupied.""" - return self.cluster.get( - Thermostat.AttributeDefs.unoccupied_cooling_setpoint.name - ) - - @property - def unoccupied_heating_setpoint(self) -> int | None: - """Temperature when room is not occupied.""" - return self.cluster.get( - Thermostat.AttributeDefs.unoccupied_heating_setpoint.name - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute update cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value - ) - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - attr_name, - value, - ) - - async def async_set_operation_mode(self, mode) -> bool: - """Set Operation mode.""" - await self.write_attributes_safe( - {Thermostat.AttributeDefs.system_mode.name: mode} - ) - return True - - async def async_set_heating_setpoint( - self, temperature: int, is_away: bool = False - ) -> bool: - """Set heating setpoint.""" - attr = ( - Thermostat.AttributeDefs.unoccupied_heating_setpoint.name - if is_away - else Thermostat.AttributeDefs.occupied_heating_setpoint.name - ) - await self.write_attributes_safe({attr: temperature}) - return True - - async def async_set_cooling_setpoint( - self, temperature: int, is_away: bool = False - ) -> bool: - """Set cooling setpoint.""" - attr = ( - Thermostat.AttributeDefs.unoccupied_cooling_setpoint.name - if is_away - else Thermostat.AttributeDefs.occupied_cooling_setpoint.name - ) - await self.write_attributes_safe({attr: temperature}) - return True - - async def get_occupancy(self) -> bool | None: - """Get unreportable occupancy attribute.""" - res, fail = await self.read_attributes( - [Thermostat.AttributeDefs.occupancy.name] - ) - self.debug("read 'occupancy' attr, success: %s, fail: %s", res, fail) - if Thermostat.AttributeDefs.occupancy.name not in res: - return None - return bool(self.occupancy) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(UserInterface.cluster_id) -class UserInterfaceClusterHandler(ClusterHandler): - """User interface (thermostat) cluster handler.""" - - ZCL_INIT_ATTRS = {UserInterface.AttributeDefs.keypad_lockout.name: True} diff --git a/homeassistant/components/zha/core/cluster_handlers/lighting.py b/homeassistant/components/zha/core/cluster_handlers/lighting.py deleted file mode 100644 index bde0fdbb0e7..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/lighting.py +++ /dev/null @@ -1,196 +0,0 @@ -"""Lighting cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from functools import cached_property - -from zigpy.zcl.clusters.lighting import Ballast, Color - -from .. import registries -from ..const import REPORT_CONFIG_DEFAULT -from . import AttrReportConfig, ClientClusterHandler, ClusterHandler - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Ballast.cluster_id) -class BallastClusterHandler(ClusterHandler): - """Ballast cluster handler.""" - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(Color.cluster_id) -class ColorClientClusterHandler(ClientClusterHandler): - """Color client cluster handler.""" - - -@registries.BINDABLE_CLUSTERS.register(Color.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Color.cluster_id) -class ColorClusterHandler(ClusterHandler): - """Color cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=Color.AttributeDefs.current_x.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Color.AttributeDefs.current_y.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Color.AttributeDefs.current_hue.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Color.AttributeDefs.current_saturation.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Color.AttributeDefs.color_temperature.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - MAX_MIREDS: int = 500 - MIN_MIREDS: int = 153 - ZCL_INIT_ATTRS = { - Color.AttributeDefs.color_mode.name: False, - Color.AttributeDefs.color_temp_physical_min.name: True, - Color.AttributeDefs.color_temp_physical_max.name: True, - Color.AttributeDefs.color_capabilities.name: True, - Color.AttributeDefs.color_loop_active.name: False, - Color.AttributeDefs.enhanced_current_hue.name: False, - Color.AttributeDefs.start_up_color_temperature.name: True, - Color.AttributeDefs.options.name: True, - } - - @cached_property - def color_capabilities(self) -> Color.ColorCapabilities: - """Return ZCL color capabilities of the light.""" - color_capabilities = self.cluster.get( - Color.AttributeDefs.color_capabilities.name - ) - if color_capabilities is None: - return Color.ColorCapabilities.XY_attributes - return Color.ColorCapabilities(color_capabilities) - - @property - def color_mode(self) -> int | None: - """Return cached value of the color_mode attribute.""" - return self.cluster.get(Color.AttributeDefs.color_mode.name) - - @property - def color_loop_active(self) -> int | None: - """Return cached value of the color_loop_active attribute.""" - return self.cluster.get(Color.AttributeDefs.color_loop_active.name) - - @property - def color_temperature(self) -> int | None: - """Return cached value of color temperature.""" - return self.cluster.get(Color.AttributeDefs.color_temperature.name) - - @property - def current_x(self) -> int | None: - """Return cached value of the current_x attribute.""" - return self.cluster.get(Color.AttributeDefs.current_x.name) - - @property - def current_y(self) -> int | None: - """Return cached value of the current_y attribute.""" - return self.cluster.get(Color.AttributeDefs.current_y.name) - - @property - def current_hue(self) -> int | None: - """Return cached value of the current_hue attribute.""" - return self.cluster.get(Color.AttributeDefs.current_hue.name) - - @property - def enhanced_current_hue(self) -> int | None: - """Return cached value of the enhanced_current_hue attribute.""" - return self.cluster.get(Color.AttributeDefs.enhanced_current_hue.name) - - @property - def current_saturation(self) -> int | None: - """Return cached value of the current_saturation attribute.""" - return self.cluster.get(Color.AttributeDefs.current_saturation.name) - - @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this cluster handler supports.""" - min_mireds = self.cluster.get( - Color.AttributeDefs.color_temp_physical_min.name, self.MIN_MIREDS - ) - if min_mireds == 0: - self.warning( - ( - "[Min mireds is 0, setting to %s] Please open an issue on the" - " quirks repo to have this device corrected" - ), - self.MIN_MIREDS, - ) - min_mireds = self.MIN_MIREDS - return min_mireds - - @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this cluster handler supports.""" - max_mireds = self.cluster.get( - Color.AttributeDefs.color_temp_physical_max.name, self.MAX_MIREDS - ) - if max_mireds == 0: - self.warning( - ( - "[Max mireds is 0, setting to %s] Please open an issue on the" - " quirks repo to have this device corrected" - ), - self.MAX_MIREDS, - ) - max_mireds = self.MAX_MIREDS - return max_mireds - - @property - def hs_supported(self) -> bool: - """Return True if the cluster handler supports hue and saturation.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.Hue_and_saturation in self.color_capabilities - ) - - @property - def enhanced_hue_supported(self) -> bool: - """Return True if the cluster handler supports enhanced hue and saturation.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.Enhanced_hue in self.color_capabilities - ) - - @property - def xy_supported(self) -> bool: - """Return True if the cluster handler supports xy.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.XY_attributes in self.color_capabilities - ) - - @property - def color_temp_supported(self) -> bool: - """Return True if the cluster handler supports color temperature.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.Color_temperature in self.color_capabilities - ) or self.color_temperature is not None - - @property - def color_loop_supported(self) -> bool: - """Return True if the cluster handler supports color loop.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.Color_loop in self.color_capabilities - ) - - @property - def options(self) -> Color.Options: - """Return ZCL options of the cluster handler.""" - return Color.Options(self.cluster.get(Color.AttributeDefs.options.name, 0)) - - @property - def execute_if_off_supported(self) -> bool: - """Return True if the cluster handler can execute commands when off.""" - return Color.Options.Execute_if_off in self.options diff --git a/homeassistant/components/zha/core/cluster_handlers/lightlink.py b/homeassistant/components/zha/core/cluster_handlers/lightlink.py deleted file mode 100644 index 85ec6905069..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/lightlink.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Lightlink cluster handlers module for Zigbee Home Automation.""" - -import zigpy.exceptions -from zigpy.zcl.clusters.lightlink import LightLink -from zigpy.zcl.foundation import GENERAL_COMMANDS, GeneralCommand - -from .. import registries -from . import ClusterHandler, ClusterHandlerStatus - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(LightLink.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(LightLink.cluster_id) -class LightLinkClusterHandler(ClusterHandler): - """Lightlink cluster handler.""" - - BIND: bool = False - - async def async_configure(self) -> None: - """Add Coordinator to LightLink group.""" - - if self._endpoint.device.skip_configuration: - self._status = ClusterHandlerStatus.CONFIGURED - return - - application = self._endpoint.zigpy_endpoint.device.application - try: - coordinator = application.get_device(application.state.node_info.ieee) - except KeyError: - self.warning("Aborting - unable to locate required coordinator device.") - return - - try: - rsp = await self.cluster.get_group_identifiers(0) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as exc: - self.warning("Couldn't get list of groups: %s", str(exc)) - return - - if isinstance(rsp, GENERAL_COMMANDS[GeneralCommand.Default_Response].schema): - groups = [] - else: - groups = rsp.group_info_records - - if groups: - for group in groups: - self.debug("Adding coordinator to 0x%04x group id", group.group_id) - await coordinator.add_to_group(group.group_id) - else: - await coordinator.add_to_group(0x0000, name="Default Lightlink Group") diff --git a/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py b/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py deleted file mode 100644 index 9d5d68d2c7e..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py +++ /dev/null @@ -1,515 +0,0 @@ -"""Manufacturer specific cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Any - -from zhaquirks.inovelli.types import AllLEDEffectType, SingleLEDEffectType -from zhaquirks.quirk_ids import ( - DANFOSS_ALLY_THERMOSTAT, - TUYA_PLUG_MANUFACTURER, - XIAOMI_AQARA_VIBRATION_AQ1, -) -import zigpy.zcl -from zigpy.zcl import clusters -from zigpy.zcl.clusters.closures import DoorLock - -from homeassistant.core import callback - -from .. import registries -from ..const import ( - ATTR_ATTRIBUTE_ID, - ATTR_ATTRIBUTE_NAME, - ATTR_VALUE, - REPORT_CONFIG_ASAP, - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_IMMEDIATE, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_MIN_INT, - SIGNAL_ATTR_UPDATED, - UNKNOWN, -) -from . import AttrReportConfig, ClientClusterHandler, ClusterHandler -from .general import MultistateInputClusterHandler -from .homeautomation import DiagnosticClusterHandler -from .hvac import ThermostatClusterHandler, UserInterfaceClusterHandler - -if TYPE_CHECKING: - from ..endpoint import Endpoint - -_LOGGER = logging.getLogger(__name__) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - registries.SMARTTHINGS_HUMIDITY_CLUSTER -) -class SmartThingsHumidityClusterHandler(ClusterHandler): - """Smart Things Humidity cluster handler.""" - - REPORT_CONFIG = ( - { - "attr": "measured_value", - "config": (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), - }, - ) - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFD00) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFD00) -class OsramButtonClusterHandler(ClusterHandler): - """Osram button cluster handler.""" - - REPORT_CONFIG = () - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(registries.PHILLIPS_REMOTE_CLUSTER) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(registries.PHILLIPS_REMOTE_CLUSTER) -class PhillipsRemoteClusterHandler(ClusterHandler): - """Phillips remote cluster handler.""" - - REPORT_CONFIG = () - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(registries.TUYA_MANUFACTURER_CLUSTER) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - registries.TUYA_MANUFACTURER_CLUSTER -) -class TuyaClusterHandler(ClusterHandler): - """Cluster handler for the Tuya manufacturer Zigbee cluster.""" - - REPORT_CONFIG = () - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize TuyaClusterHandler.""" - super().__init__(cluster, endpoint) - if endpoint.device.quirk_id == TUYA_PLUG_MANUFACTURER: - self.ZCL_INIT_ATTRS = { - "backlight_mode": True, - "power_on_state": True, - } - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFCC0) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFCC0) -class OppleRemoteClusterHandler(ClusterHandler): - """Opple cluster handler.""" - - REPORT_CONFIG = () - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Opple cluster handler.""" - super().__init__(cluster, endpoint) - if self.cluster.endpoint.model == "lumi.motion.ac02": - self.ZCL_INIT_ATTRS = { - "detection_interval": True, - "motion_sensitivity": True, - "trigger_indicator": True, - } - elif self.cluster.endpoint.model == "lumi.motion.agl04": - self.ZCL_INIT_ATTRS = { - "detection_interval": True, - "motion_sensitivity": True, - } - elif self.cluster.endpoint.model == "lumi.motion.ac01": - self.ZCL_INIT_ATTRS = { - "presence": True, - "monitoring_mode": True, - "motion_sensitivity": True, - "approach_distance": True, - } - elif self.cluster.endpoint.model in ("lumi.plug.mmeu01", "lumi.plug.maeu01"): - self.ZCL_INIT_ATTRS = { - "power_outage_memory": True, - "consumer_connected": True, - } - elif self.cluster.endpoint.model == "aqara.feeder.acn001": - self.ZCL_INIT_ATTRS = { - "portions_dispensed": True, - "weight_dispensed": True, - "error_detected": True, - "disable_led_indicator": True, - "child_lock": True, - "feeding_mode": True, - "serving_size": True, - "portion_weight": True, - } - elif self.cluster.endpoint.model == "lumi.airrtc.agl001": - self.ZCL_INIT_ATTRS = { - "system_mode": True, - "preset": True, - "window_detection": True, - "valve_detection": True, - "valve_alarm": True, - "child_lock": True, - "away_preset_temperature": True, - "window_open": True, - "calibrated": True, - "schedule": True, - "sensor": True, - } - elif self.cluster.endpoint.model == "lumi.sensor_smoke.acn03": - self.ZCL_INIT_ATTRS = { - "buzzer_manual_mute": True, - "smoke_density": True, - "heartbeat_indicator": True, - "buzzer_manual_alarm": True, - "buzzer": True, - "linkage_alarm": True, - } - elif self.cluster.endpoint.model == "lumi.magnet.ac01": - self.ZCL_INIT_ATTRS = { - "detection_distance": True, - } - elif self.cluster.endpoint.model == "lumi.switch.acn047": - self.ZCL_INIT_ATTRS = { - "switch_mode": True, - "switch_type": True, - "startup_on_off": True, - "decoupled_mode": True, - } - elif self.cluster.endpoint.model == "lumi.curtain.agl001": - self.ZCL_INIT_ATTRS = { - "hooks_state": True, - "hooks_lock": True, - "positions_stored": True, - "light_level": True, - "hand_open": True, - } - - async def async_initialize_cluster_handler_specific(self, from_cache: bool) -> None: - """Initialize cluster handler specific.""" - if self.cluster.endpoint.model in ("lumi.motion.ac02", "lumi.motion.agl04"): - interval = self.cluster.get("detection_interval", self.cluster.get(0x0102)) - if interval is not None: - self.debug("Loaded detection interval at startup: %s", interval) - self.cluster.endpoint.ias_zone.reset_s = int(interval) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - registries.SMARTTHINGS_ACCELERATION_CLUSTER -) -class SmartThingsAccelerationClusterHandler(ClusterHandler): - """Smart Things Acceleration cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig(attr="acceleration", config=REPORT_CONFIG_ASAP), - AttrReportConfig(attr="x_axis", config=REPORT_CONFIG_ASAP), - AttrReportConfig(attr="y_axis", config=REPORT_CONFIG_ASAP), - AttrReportConfig(attr="z_axis", config=REPORT_CONFIG_ASAP), - ) - - @classmethod - def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool: - """Filter the cluster match for specific devices.""" - return cluster.endpoint.device.manufacturer in ( - "CentraLite", - "Samjin", - "SmartThings", - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - try: - attr_name = self._cluster.attributes[attrid].name - except KeyError: - attr_name = UNKNOWN - - if attrid == self.value_attribute: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - attr_name, - value, - ) - return - - self.zha_send_event( - SIGNAL_ATTR_UPDATED, - { - ATTR_ATTRIBUTE_ID: attrid, - ATTR_ATTRIBUTE_NAME: attr_name, - ATTR_VALUE: value, - }, - ) - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(0xFC31) -class InovelliNotificationClientClusterHandler(ClientClusterHandler): - """Inovelli Notification cluster handler.""" - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle an attribute updated on this cluster.""" - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle a cluster command received on this cluster.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFC31) -class InovelliConfigEntityClusterHandler(ClusterHandler): - """Inovelli Configuration Entity cluster handler.""" - - REPORT_CONFIG = () - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Inovelli cluster handler.""" - super().__init__(cluster, endpoint) - if self.cluster.endpoint.model == "VZM31-SN": - self.ZCL_INIT_ATTRS = { - "dimming_speed_up_remote": True, - "dimming_speed_up_local": True, - "ramp_rate_off_to_on_local": True, - "ramp_rate_off_to_on_remote": True, - "dimming_speed_down_remote": True, - "dimming_speed_down_local": True, - "ramp_rate_on_to_off_local": True, - "ramp_rate_on_to_off_remote": True, - "minimum_level": True, - "maximum_level": True, - "invert_switch": True, - "auto_off_timer": True, - "default_level_local": True, - "default_level_remote": True, - "state_after_power_restored": True, - "load_level_indicator_timeout": True, - "active_power_reports": True, - "periodic_power_and_energy_reports": True, - "active_energy_reports": True, - "power_type": False, - "switch_type": False, - "increased_non_neutral_output": True, - "button_delay": False, - "smart_bulb_mode": False, - "double_tap_up_enabled": True, - "double_tap_down_enabled": True, - "double_tap_up_level": True, - "double_tap_down_level": True, - "led_color_when_on": True, - "led_color_when_off": True, - "led_intensity_when_on": True, - "led_intensity_when_off": True, - "led_scaling_mode": True, - "aux_switch_scenes": True, - "binding_off_to_on_sync_level": True, - "local_protection": False, - "output_mode": False, - "on_off_led_mode": True, - "firmware_progress_led": True, - "relay_click_in_on_off_mode": True, - "disable_clear_notifications_double_tap": True, - } - elif self.cluster.endpoint.model == "VZM35-SN": - self.ZCL_INIT_ATTRS = { - "dimming_speed_up_remote": True, - "dimming_speed_up_local": True, - "ramp_rate_off_to_on_local": True, - "ramp_rate_off_to_on_remote": True, - "dimming_speed_down_remote": True, - "dimming_speed_down_local": True, - "ramp_rate_on_to_off_local": True, - "ramp_rate_on_to_off_remote": True, - "minimum_level": True, - "maximum_level": True, - "invert_switch": True, - "auto_off_timer": True, - "default_level_local": True, - "default_level_remote": True, - "state_after_power_restored": True, - "load_level_indicator_timeout": True, - "power_type": False, - "switch_type": False, - "non_neutral_aux_med_gear_learn_value": True, - "non_neutral_aux_low_gear_learn_value": True, - "quick_start_time": False, - "button_delay": False, - "smart_fan_mode": False, - "double_tap_up_enabled": True, - "double_tap_down_enabled": True, - "double_tap_up_level": True, - "double_tap_down_level": True, - "led_color_when_on": True, - "led_color_when_off": True, - "led_intensity_when_on": True, - "led_intensity_when_off": True, - "aux_switch_scenes": True, - "local_protection": False, - "output_mode": False, - "on_off_led_mode": True, - "firmware_progress_led": True, - "smart_fan_led_display_levels": True, - } - - async def issue_all_led_effect( - self, - effect_type: AllLEDEffectType | int = AllLEDEffectType.Fast_Blink, - color: int = 200, - level: int = 100, - duration: int = 3, - **kwargs: Any, - ) -> None: - """Issue all LED effect command. - - This command is used to issue an LED effect to all LEDs on the device. - """ - - await self.led_effect(effect_type, color, level, duration, expect_reply=False) - - async def issue_individual_led_effect( - self, - led_number: int = 1, - effect_type: SingleLEDEffectType | int = SingleLEDEffectType.Fast_Blink, - color: int = 200, - level: int = 100, - duration: int = 3, - **kwargs: Any, - ) -> None: - """Issue individual LED effect command. - - This command is used to issue an LED effect to the specified LED on the device. - """ - - await self.individual_led_effect( - led_number, effect_type, color, level, duration, expect_reply=False - ) - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(registries.IKEA_AIR_PURIFIER_CLUSTER) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - registries.IKEA_AIR_PURIFIER_CLUSTER -) -class IkeaAirPurifierClusterHandler(ClusterHandler): - """IKEA Air Purifier cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig(attr="filter_run_time", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="replace_filter", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="filter_life_time", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="disable_led", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="air_quality_25pm", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="child_lock", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="fan_mode", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="fan_speed", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="device_run_time", config=REPORT_CONFIG_DEFAULT), - ) - - @property - def fan_mode(self) -> int | None: - """Return current fan mode.""" - return self.cluster.get("fan_mode") - - @property - def fan_mode_sequence(self) -> int | None: - """Return possible fan mode speeds.""" - return self.cluster.get("fan_mode_sequence") - - async def async_set_speed(self, value) -> None: - """Set the speed of the fan.""" - await self.write_attributes_safe({"fan_mode": value}) - - async def async_update(self) -> None: - """Retrieve latest state.""" - await self.get_attribute_value("fan_mode", from_cache=False) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute update from fan cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value - ) - if attr_name == "fan_mode": - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value - ) - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFC80) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFC80) -class IkeaRemoteClusterHandler(ClusterHandler): - """Ikea Matter remote cluster handler.""" - - REPORT_CONFIG = () - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - DoorLock.cluster_id, XIAOMI_AQARA_VIBRATION_AQ1 -) -class XiaomiVibrationAQ1ClusterHandler(MultistateInputClusterHandler): - """Xiaomi DoorLock Cluster is in fact a MultiStateInput Cluster.""" - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFC11) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFC11) -class SonoffPresenceSenorClusterHandler(ClusterHandler): - """SonoffPresenceSensor cluster handler.""" - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize SonoffPresenceSensor cluster handler.""" - super().__init__(cluster, endpoint) - if self.cluster.endpoint.model == "SNZB-06P": - self.ZCL_INIT_ATTRS = {"last_illumination_state": True} - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - clusters.hvac.Thermostat.cluster_id, DANFOSS_ALLY_THERMOSTAT -) -class DanfossThermostatClusterHandler(ThermostatClusterHandler): - """Thermostat cluster handler for the Danfoss TRV and derivatives.""" - - REPORT_CONFIG = ( - *ThermostatClusterHandler.REPORT_CONFIG, - AttrReportConfig(attr="open_window_detection", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="heat_required", config=REPORT_CONFIG_ASAP), - AttrReportConfig(attr="mounting_mode_active", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="load_estimate", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="adaptation_run_status", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="preheat_status", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="preheat_time", config=REPORT_CONFIG_DEFAULT), - ) - - ZCL_INIT_ATTRS = { - **ThermostatClusterHandler.ZCL_INIT_ATTRS, - "external_open_window_detected": True, - "window_open_feature": True, - "exercise_day_of_week": True, - "exercise_trigger_time": True, - "mounting_mode_control": False, # Can change - "orientation": True, - "external_measured_room_sensor": False, # Can change - "radiator_covered": True, - "heat_available": True, - "load_balancing_enable": True, - "load_room_mean": False, # Can change - "control_algorithm_scale_factor": True, - "regulation_setpoint_offset": True, - "adaptation_run_control": True, - "adaptation_run_settings": True, - } - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - clusters.hvac.UserInterface.cluster_id, DANFOSS_ALLY_THERMOSTAT -) -class DanfossUserInterfaceClusterHandler(UserInterfaceClusterHandler): - """Interface cluster handler for the Danfoss TRV and derivatives.""" - - ZCL_INIT_ATTRS = { - **UserInterfaceClusterHandler.ZCL_INIT_ATTRS, - "viewing_direction": True, - } - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - clusters.homeautomation.Diagnostic.cluster_id, DANFOSS_ALLY_THERMOSTAT -) -class DanfossDiagnosticClusterHandler(DiagnosticClusterHandler): - """Diagnostic cluster handler for the Danfoss TRV and derivatives.""" - - REPORT_CONFIG = ( - *DiagnosticClusterHandler.REPORT_CONFIG, - AttrReportConfig(attr="sw_error_code", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="motor_step_counter", config=REPORT_CONFIG_DEFAULT), - ) diff --git a/homeassistant/components/zha/core/cluster_handlers/measurement.py b/homeassistant/components/zha/core/cluster_handlers/measurement.py deleted file mode 100644 index 768de8c4c73..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/measurement.py +++ /dev/null @@ -1,208 +0,0 @@ -"""Measurement cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from typing import TYPE_CHECKING - -import zigpy.zcl -from zigpy.zcl.clusters.measurement import ( - PM25, - CarbonDioxideConcentration, - CarbonMonoxideConcentration, - FlowMeasurement, - FormaldehydeConcentration, - IlluminanceLevelSensing, - IlluminanceMeasurement, - LeafWetness, - OccupancySensing, - PressureMeasurement, - RelativeHumidity, - SoilMoisture, - TemperatureMeasurement, -) - -from .. import registries -from ..const import ( - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_IMMEDIATE, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_MIN_INT, -) -from . import AttrReportConfig, ClusterHandler -from .helpers import is_hue_motion_sensor, is_sonoff_presence_sensor - -if TYPE_CHECKING: - from ..endpoint import Endpoint - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(FlowMeasurement.cluster_id) -class FlowMeasurementClusterHandler(ClusterHandler): - """Flow Measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=FlowMeasurement.AttributeDefs.measured_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IlluminanceLevelSensing.cluster_id) -class IlluminanceLevelSensingClusterHandler(ClusterHandler): - """Illuminance Level Sensing cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=IlluminanceLevelSensing.AttributeDefs.level_status.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IlluminanceMeasurement.cluster_id) -class IlluminanceMeasurementClusterHandler(ClusterHandler): - """Illuminance Measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=IlluminanceMeasurement.AttributeDefs.measured_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(OccupancySensing.cluster_id) -class OccupancySensingClusterHandler(ClusterHandler): - """Occupancy Sensing cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=OccupancySensing.AttributeDefs.occupancy.name, - config=REPORT_CONFIG_IMMEDIATE, - ), - ) - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Occupancy cluster handler.""" - super().__init__(cluster, endpoint) - if is_hue_motion_sensor(self): - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["sensitivity"] = True - if is_sonoff_presence_sensor(self): - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["ultrasonic_o_to_u_delay"] = True - self.ZCL_INIT_ATTRS["ultrasonic_u_to_o_threshold"] = True - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PressureMeasurement.cluster_id) -class PressureMeasurementClusterHandler(ClusterHandler): - """Pressure measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=PressureMeasurement.AttributeDefs.measured_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(RelativeHumidity.cluster_id) -class RelativeHumidityClusterHandler(ClusterHandler): - """Relative Humidity measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=RelativeHumidity.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 100), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(SoilMoisture.cluster_id) -class SoilMoistureClusterHandler(ClusterHandler): - """Soil Moisture measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=SoilMoisture.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 100), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(LeafWetness.cluster_id) -class LeafWetnessClusterHandler(ClusterHandler): - """Leaf Wetness measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=LeafWetness.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 100), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(TemperatureMeasurement.cluster_id) -class TemperatureMeasurementClusterHandler(ClusterHandler): - """Temperature measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=TemperatureMeasurement.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - CarbonMonoxideConcentration.cluster_id -) -class CarbonMonoxideConcentrationClusterHandler(ClusterHandler): - """Carbon Monoxide measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=CarbonMonoxideConcentration.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.000001), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - CarbonDioxideConcentration.cluster_id -) -class CarbonDioxideConcentrationClusterHandler(ClusterHandler): - """Carbon Dioxide measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=CarbonDioxideConcentration.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.000001), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PM25.cluster_id) -class PM25ClusterHandler(ClusterHandler): - """Particulate Matter 2.5 microns or less measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=PM25.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.1), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - FormaldehydeConcentration.cluster_id -) -class FormaldehydeConcentrationClusterHandler(ClusterHandler): - """Formaldehyde measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=FormaldehydeConcentration.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.000001), - ), - ) diff --git a/homeassistant/components/zha/core/cluster_handlers/protocol.py b/homeassistant/components/zha/core/cluster_handlers/protocol.py deleted file mode 100644 index e1e3d7a5413..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/protocol.py +++ /dev/null @@ -1,129 +0,0 @@ -"""Protocol cluster handlers module for Zigbee Home Automation.""" - -from zigpy.zcl.clusters.protocol import ( - AnalogInputExtended, - AnalogInputRegular, - AnalogOutputExtended, - AnalogOutputRegular, - AnalogValueExtended, - AnalogValueRegular, - BacnetProtocolTunnel, - BinaryInputExtended, - BinaryInputRegular, - BinaryOutputExtended, - BinaryOutputRegular, - BinaryValueExtended, - BinaryValueRegular, - GenericTunnel, - MultistateInputExtended, - MultistateInputRegular, - MultistateOutputExtended, - MultistateOutputRegular, - MultistateValueExtended, - MultistateValueRegular, -) - -from .. import registries -from . import ClusterHandler - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogInputExtended.cluster_id) -class AnalogInputExtendedClusterHandler(ClusterHandler): - """Analog Input Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogInputRegular.cluster_id) -class AnalogInputRegularClusterHandler(ClusterHandler): - """Analog Input Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogOutputExtended.cluster_id) -class AnalogOutputExtendedClusterHandler(ClusterHandler): - """Analog Output Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogOutputRegular.cluster_id) -class AnalogOutputRegularClusterHandler(ClusterHandler): - """Analog Output Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogValueExtended.cluster_id) -class AnalogValueExtendedClusterHandler(ClusterHandler): - """Analog Value Extended edition cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogValueRegular.cluster_id) -class AnalogValueRegularClusterHandler(ClusterHandler): - """Analog Value Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BacnetProtocolTunnel.cluster_id) -class BacnetProtocolTunnelClusterHandler(ClusterHandler): - """Bacnet Protocol Tunnel cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryInputExtended.cluster_id) -class BinaryInputExtendedClusterHandler(ClusterHandler): - """Binary Input Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryInputRegular.cluster_id) -class BinaryInputRegularClusterHandler(ClusterHandler): - """Binary Input Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryOutputExtended.cluster_id) -class BinaryOutputExtendedClusterHandler(ClusterHandler): - """Binary Output Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryOutputRegular.cluster_id) -class BinaryOutputRegularClusterHandler(ClusterHandler): - """Binary Output Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryValueExtended.cluster_id) -class BinaryValueExtendedClusterHandler(ClusterHandler): - """Binary Value Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryValueRegular.cluster_id) -class BinaryValueRegularClusterHandler(ClusterHandler): - """Binary Value Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(GenericTunnel.cluster_id) -class GenericTunnelClusterHandler(ClusterHandler): - """Generic Tunnel cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateInputExtended.cluster_id) -class MultiStateInputExtendedClusterHandler(ClusterHandler): - """Multistate Input Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateInputRegular.cluster_id) -class MultiStateInputRegularClusterHandler(ClusterHandler): - """Multistate Input Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - MultistateOutputExtended.cluster_id -) -class MultiStateOutputExtendedClusterHandler(ClusterHandler): - """Multistate Output Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateOutputRegular.cluster_id) -class MultiStateOutputRegularClusterHandler(ClusterHandler): - """Multistate Output Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateValueExtended.cluster_id) -class MultiStateValueExtendedClusterHandler(ClusterHandler): - """Multistate Value Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateValueRegular.cluster_id) -class MultiStateValueRegularClusterHandler(ClusterHandler): - """Multistate Value Regular cluster handler.""" diff --git a/homeassistant/components/zha/core/cluster_handlers/security.py b/homeassistant/components/zha/core/cluster_handlers/security.py deleted file mode 100644 index 8ebe09cef03..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/security.py +++ /dev/null @@ -1,400 +0,0 @@ -"""Security cluster handlers module for Zigbee Home Automation. - -For more details about this component, please refer to the documentation at -https://home-assistant.io/integrations/zha/ -""" - -from __future__ import annotations - -from collections.abc import Callable -from typing import TYPE_CHECKING, Any - -import zigpy.zcl -from zigpy.zcl.clusters.security import IasAce as AceCluster, IasWd, IasZone - -from homeassistant.core import callback -from homeassistant.exceptions import HomeAssistantError - -from .. import registries -from ..const import ( - SIGNAL_ATTR_UPDATED, - WARNING_DEVICE_MODE_EMERGENCY, - WARNING_DEVICE_SOUND_HIGH, - WARNING_DEVICE_SQUAWK_MODE_ARMED, - WARNING_DEVICE_STROBE_HIGH, - WARNING_DEVICE_STROBE_YES, -) -from . import ClusterHandler, ClusterHandlerStatus - -if TYPE_CHECKING: - from ..endpoint import Endpoint - -SIGNAL_ARMED_STATE_CHANGED = "zha_armed_state_changed" -SIGNAL_ALARM_TRIGGERED = "zha_armed_triggered" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AceCluster.cluster_id) -class IasAceClusterHandler(ClusterHandler): - """IAS Ancillary Control Equipment cluster handler.""" - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize IAS Ancillary Control Equipment cluster handler.""" - super().__init__(cluster, endpoint) - self.command_map: dict[int, Callable[..., Any]] = { - AceCluster.ServerCommandDefs.arm.id: self.arm, - AceCluster.ServerCommandDefs.bypass.id: self._bypass, - AceCluster.ServerCommandDefs.emergency.id: self._emergency, - AceCluster.ServerCommandDefs.fire.id: self._fire, - AceCluster.ServerCommandDefs.panic.id: self._panic, - AceCluster.ServerCommandDefs.get_zone_id_map.id: self._get_zone_id_map, - AceCluster.ServerCommandDefs.get_zone_info.id: self._get_zone_info, - AceCluster.ServerCommandDefs.get_panel_status.id: self._send_panel_status_response, - AceCluster.ServerCommandDefs.get_bypassed_zone_list.id: self._get_bypassed_zone_list, - AceCluster.ServerCommandDefs.get_zone_status.id: self._get_zone_status, - } - self.arm_map: dict[AceCluster.ArmMode, Callable[..., Any]] = { - AceCluster.ArmMode.Disarm: self._disarm, - AceCluster.ArmMode.Arm_All_Zones: self._arm_away, - AceCluster.ArmMode.Arm_Day_Home_Only: self._arm_day, - AceCluster.ArmMode.Arm_Night_Sleep_Only: self._arm_night, - } - self.armed_state: AceCluster.PanelStatus = AceCluster.PanelStatus.Panel_Disarmed - self.invalid_tries: int = 0 - - # These will all be setup by the entity from ZHA configuration - self.panel_code: str = "1234" - self.code_required_arm_actions = False - self.max_invalid_tries: int = 3 - - # where do we store this to handle restarts - self.alarm_status: AceCluster.AlarmStatus = AceCluster.AlarmStatus.No_Alarm - - @callback - def cluster_command(self, tsn, command_id, args) -> None: - """Handle commands received to this cluster.""" - self.debug( - "received command %s", self._cluster.server_commands[command_id].name - ) - self.command_map[command_id](*args) - - def arm(self, arm_mode: int, code: str | None, zone_id: int) -> None: - """Handle the IAS ACE arm command.""" - mode = AceCluster.ArmMode(arm_mode) - - self.zha_send_event( - AceCluster.ServerCommandDefs.arm.name, - { - "arm_mode": mode.value, - "arm_mode_description": mode.name, - "code": code, - "zone_id": zone_id, - }, - ) - - zigbee_reply = self.arm_map[mode](code) - self._endpoint.device.hass.async_create_task(zigbee_reply) - - if self.invalid_tries >= self.max_invalid_tries: - self.alarm_status = AceCluster.AlarmStatus.Emergency - self.armed_state = AceCluster.PanelStatus.In_Alarm - self.async_send_signal(f"{self.unique_id}_{SIGNAL_ALARM_TRIGGERED}") - else: - self.async_send_signal(f"{self.unique_id}_{SIGNAL_ARMED_STATE_CHANGED}") - self._send_panel_status_changed() - - def _disarm(self, code: str): - """Test the code and disarm the panel if the code is correct.""" - if ( - code != self.panel_code - and self.armed_state != AceCluster.PanelStatus.Panel_Disarmed - ): - self.debug("Invalid code supplied to IAS ACE") - self.invalid_tries += 1 - zigbee_reply = self.arm_response( - AceCluster.ArmNotification.Invalid_Arm_Disarm_Code - ) - else: - self.invalid_tries = 0 - if ( - self.armed_state == AceCluster.PanelStatus.Panel_Disarmed - and self.alarm_status == AceCluster.AlarmStatus.No_Alarm - ): - self.debug("IAS ACE already disarmed") - zigbee_reply = self.arm_response( - AceCluster.ArmNotification.Already_Disarmed - ) - else: - self.debug("Disarming all IAS ACE zones") - zigbee_reply = self.arm_response( - AceCluster.ArmNotification.All_Zones_Disarmed - ) - - self.armed_state = AceCluster.PanelStatus.Panel_Disarmed - self.alarm_status = AceCluster.AlarmStatus.No_Alarm - return zigbee_reply - - def _arm_day(self, code: str) -> None: - """Arm the panel for day / home zones.""" - return self._handle_arm( - code, - AceCluster.PanelStatus.Armed_Stay, - AceCluster.ArmNotification.Only_Day_Home_Zones_Armed, - ) - - def _arm_night(self, code: str) -> None: - """Arm the panel for night / sleep zones.""" - return self._handle_arm( - code, - AceCluster.PanelStatus.Armed_Night, - AceCluster.ArmNotification.Only_Night_Sleep_Zones_Armed, - ) - - def _arm_away(self, code: str) -> None: - """Arm the panel for away mode.""" - return self._handle_arm( - code, - AceCluster.PanelStatus.Armed_Away, - AceCluster.ArmNotification.All_Zones_Armed, - ) - - def _handle_arm( - self, - code: str, - panel_status: AceCluster.PanelStatus, - armed_type: AceCluster.ArmNotification, - ) -> None: - """Arm the panel with the specified statuses.""" - if self.code_required_arm_actions and code != self.panel_code: - self.debug("Invalid code supplied to IAS ACE") - zigbee_reply = self.arm_response( - AceCluster.ArmNotification.Invalid_Arm_Disarm_Code - ) - else: - self.debug("Arming all IAS ACE zones") - self.armed_state = panel_status - zigbee_reply = self.arm_response(armed_type) - return zigbee_reply - - def _bypass(self, zone_list, code) -> None: - """Handle the IAS ACE bypass command.""" - self.zha_send_event( - AceCluster.ServerCommandDefs.bypass.name, - {"zone_list": zone_list, "code": code}, - ) - - def _emergency(self) -> None: - """Handle the IAS ACE emergency command.""" - self._set_alarm(AceCluster.AlarmStatus.Emergency) - - def _fire(self) -> None: - """Handle the IAS ACE fire command.""" - self._set_alarm(AceCluster.AlarmStatus.Fire) - - def _panic(self) -> None: - """Handle the IAS ACE panic command.""" - self._set_alarm(AceCluster.AlarmStatus.Emergency_Panic) - - def _set_alarm(self, status: AceCluster.AlarmStatus) -> None: - """Set the specified alarm status.""" - self.alarm_status = status - self.armed_state = AceCluster.PanelStatus.In_Alarm - self.async_send_signal(f"{self.unique_id}_{SIGNAL_ALARM_TRIGGERED}") - self._send_panel_status_changed() - - def _get_zone_id_map(self): - """Handle the IAS ACE zone id map command.""" - - def _get_zone_info(self, zone_id): - """Handle the IAS ACE zone info command.""" - - def _send_panel_status_response(self) -> None: - """Handle the IAS ACE panel status response command.""" - response = self.panel_status_response( - self.armed_state, - 0x00, - AceCluster.AudibleNotification.Default_Sound, - self.alarm_status, - ) - self._endpoint.device.hass.async_create_task(response) - - def _send_panel_status_changed(self) -> None: - """Handle the IAS ACE panel status changed command.""" - response = self.panel_status_changed( - self.armed_state, - 0x00, - AceCluster.AudibleNotification.Default_Sound, - self.alarm_status, - ) - self._endpoint.device.hass.async_create_task(response) - - def _get_bypassed_zone_list(self): - """Handle the IAS ACE bypassed zone list command.""" - - def _get_zone_status( - self, starting_zone_id, max_zone_ids, zone_status_mask_flag, zone_status_mask - ): - """Handle the IAS ACE zone status command.""" - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(IasWd.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IasWd.cluster_id) -class IasWdClusterHandler(ClusterHandler): - """IAS Warning Device cluster handler.""" - - @staticmethod - def set_bit(destination_value, destination_bit, source_value, source_bit): - """Set the specified bit in the value.""" - - if IasWdClusterHandler.get_bit(source_value, source_bit): - return destination_value | (1 << destination_bit) - return destination_value - - @staticmethod - def get_bit(value, bit): - """Get the specified bit from the value.""" - return (value & (1 << bit)) != 0 - - async def issue_squawk( - self, - mode=WARNING_DEVICE_SQUAWK_MODE_ARMED, - strobe=WARNING_DEVICE_STROBE_YES, - squawk_level=WARNING_DEVICE_SOUND_HIGH, - ): - """Issue a squawk command. - - This command uses the WD capabilities to emit a quick audible/visible - pulse called a "squawk". The squawk command has no effect if the WD - is currently active (warning in progress). - """ - value = 0 - value = IasWdClusterHandler.set_bit(value, 0, squawk_level, 0) - value = IasWdClusterHandler.set_bit(value, 1, squawk_level, 1) - - value = IasWdClusterHandler.set_bit(value, 3, strobe, 0) - - value = IasWdClusterHandler.set_bit(value, 4, mode, 0) - value = IasWdClusterHandler.set_bit(value, 5, mode, 1) - value = IasWdClusterHandler.set_bit(value, 6, mode, 2) - value = IasWdClusterHandler.set_bit(value, 7, mode, 3) - - await self.squawk(value) - - async def issue_start_warning( - self, - mode=WARNING_DEVICE_MODE_EMERGENCY, - strobe=WARNING_DEVICE_STROBE_YES, - siren_level=WARNING_DEVICE_SOUND_HIGH, - warning_duration=5, # seconds - strobe_duty_cycle=0x00, - strobe_intensity=WARNING_DEVICE_STROBE_HIGH, - ): - """Issue a start warning command. - - This command starts the WD operation. The WD alerts the surrounding area - by audible (siren) and visual (strobe) signals. - - strobe_duty_cycle indicates the length of the flash cycle. This provides a means - of varying the flash duration for different alarm types (e.g., fire, police, - burglar). Valid range is 0-100 in increments of 10. All other values SHALL - be rounded to the nearest valid value. Strobe SHALL calculate duty cycle over - a duration of one second. - - The ON state SHALL precede the OFF state. For example, if Strobe Duty Cycle - Field specifies “40,” then the strobe SHALL flash ON for 4/10ths of a second - and then turn OFF for 6/10ths of a second. - """ - value = 0 - value = IasWdClusterHandler.set_bit(value, 0, siren_level, 0) - value = IasWdClusterHandler.set_bit(value, 1, siren_level, 1) - - value = IasWdClusterHandler.set_bit(value, 2, strobe, 0) - - value = IasWdClusterHandler.set_bit(value, 4, mode, 0) - value = IasWdClusterHandler.set_bit(value, 5, mode, 1) - value = IasWdClusterHandler.set_bit(value, 6, mode, 2) - value = IasWdClusterHandler.set_bit(value, 7, mode, 3) - - await self.start_warning( - value, warning_duration, strobe_duty_cycle, strobe_intensity - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IasZone.cluster_id) -class IASZoneClusterHandler(ClusterHandler): - """Cluster handler for the IASZone Zigbee cluster.""" - - ZCL_INIT_ATTRS = { - IasZone.AttributeDefs.zone_status.name: False, - IasZone.AttributeDefs.zone_state.name: True, - IasZone.AttributeDefs.zone_type.name: True, - } - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - if command_id == IasZone.ClientCommandDefs.status_change_notification.id: - zone_status = args[0] - # update attribute cache with new zone status - self.cluster.update_attribute( - IasZone.AttributeDefs.zone_status.id, zone_status - ) - self.debug("Updated alarm state: %s", zone_status) - elif command_id == IasZone.ClientCommandDefs.enroll.id: - self.debug("Enroll requested") - self._cluster.create_catching_task( - self.enroll_response( - enroll_response_code=IasZone.EnrollResponse.Success, zone_id=0 - ) - ) - - async def async_configure(self): - """Configure IAS device.""" - await self.get_attribute_value( - IasZone.AttributeDefs.zone_type.name, from_cache=False - ) - if self._endpoint.device.skip_configuration: - self.debug("skipping IASZoneClusterHandler configuration") - return - - self.debug("started IASZoneClusterHandler configuration") - - await self.bind() - ieee = self.cluster.endpoint.device.application.state.node_info.ieee - - try: - await self.write_attributes_safe( - {IasZone.AttributeDefs.cie_addr.name: ieee} - ) - self.debug( - "wrote cie_addr: %s to '%s' cluster", - str(ieee), - self._cluster.ep_attribute, - ) - except HomeAssistantError as ex: - self.debug( - "Failed to write cie_addr: %s to '%s' cluster: %s", - str(ieee), - self._cluster.ep_attribute, - str(ex), - ) - - self.debug("Sending pro-active IAS enroll response") - self._cluster.create_catching_task( - self.enroll_response( - enroll_response_code=IasZone.EnrollResponse.Success, zone_id=0 - ) - ) - - self._status = ClusterHandlerStatus.CONFIGURED - self.debug("finished IASZoneClusterHandler configuration") - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - if attrid == IasZone.AttributeDefs.zone_status.id: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - IasZone.AttributeDefs.zone_status.name, - value, - ) diff --git a/homeassistant/components/zha/core/cluster_handlers/smartenergy.py b/homeassistant/components/zha/core/cluster_handlers/smartenergy.py deleted file mode 100644 index d167b8b1752..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/smartenergy.py +++ /dev/null @@ -1,388 +0,0 @@ -"""Smart energy cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -import enum -from functools import partialmethod -from typing import TYPE_CHECKING - -import zigpy.zcl -from zigpy.zcl.clusters.smartenergy import ( - Calendar, - DeviceManagement, - Drlc, - EnergyManagement, - Events, - KeyEstablishment, - MduPairing, - Messaging, - Metering, - Prepayment, - Price, - Tunneling, -) - -from .. import registries -from ..const import ( - REPORT_CONFIG_ASAP, - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_OP, - SIGNAL_ATTR_UPDATED, -) -from . import AttrReportConfig, ClusterHandler - -if TYPE_CHECKING: - from ..endpoint import Endpoint - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Calendar.cluster_id) -class CalendarClusterHandler(ClusterHandler): - """Calendar cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(DeviceManagement.cluster_id) -class DeviceManagementClusterHandler(ClusterHandler): - """Device Management cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Drlc.cluster_id) -class DrlcClusterHandler(ClusterHandler): - """Demand Response and Load Control cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(EnergyManagement.cluster_id) -class EnergyManagementClusterHandler(ClusterHandler): - """Energy Management cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Events.cluster_id) -class EventsClusterHandler(ClusterHandler): - """Event cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(KeyEstablishment.cluster_id) -class KeyEstablishmentClusterHandler(ClusterHandler): - """Key Establishment cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MduPairing.cluster_id) -class MduPairingClusterHandler(ClusterHandler): - """Pairing cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Messaging.cluster_id) -class MessagingClusterHandler(ClusterHandler): - """Messaging cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Metering.cluster_id) -class MeteringClusterHandler(ClusterHandler): - """Metering cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=Metering.AttributeDefs.instantaneous_demand.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier1_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier2_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier3_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier4_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier5_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier6_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_summ_received.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.status.name, - config=REPORT_CONFIG_ASAP, - ), - ) - ZCL_INIT_ATTRS = { - Metering.AttributeDefs.demand_formatting.name: True, - Metering.AttributeDefs.divisor.name: True, - Metering.AttributeDefs.metering_device_type.name: True, - Metering.AttributeDefs.multiplier.name: True, - Metering.AttributeDefs.summation_formatting.name: True, - Metering.AttributeDefs.unit_of_measure.name: True, - } - - METERING_DEVICE_TYPES_ELECTRIC = { - 0, - 7, - 8, - 9, - 10, - 11, - 13, - 14, - 15, - 127, - 134, - 135, - 136, - 137, - 138, - 140, - 141, - 142, - } - METERING_DEVICE_TYPES_GAS = {1, 128} - METERING_DEVICE_TYPES_WATER = {2, 129} - METERING_DEVICE_TYPES_HEATING_COOLING = {3, 5, 6, 130, 132, 133} - - metering_device_type = { - 0: "Electric Metering", - 1: "Gas Metering", - 2: "Water Metering", - 3: "Thermal Metering", # deprecated - 4: "Pressure Metering", - 5: "Heat Metering", - 6: "Cooling Metering", - 7: "End Use Measurement Device (EUMD) for metering electric vehicle charging", - 8: "PV Generation Metering", - 9: "Wind Turbine Generation Metering", - 10: "Water Turbine Generation Metering", - 11: "Micro Generation Metering", - 12: "Solar Hot Water Generation Metering", - 13: "Electric Metering Element/Phase 1", - 14: "Electric Metering Element/Phase 2", - 15: "Electric Metering Element/Phase 3", - 127: "Mirrored Electric Metering", - 128: "Mirrored Gas Metering", - 129: "Mirrored Water Metering", - 130: "Mirrored Thermal Metering", # deprecated - 131: "Mirrored Pressure Metering", - 132: "Mirrored Heat Metering", - 133: "Mirrored Cooling Metering", - 134: "Mirrored End Use Measurement Device (EUMD) for metering electric vehicle charging", - 135: "Mirrored PV Generation Metering", - 136: "Mirrored Wind Turbine Generation Metering", - 137: "Mirrored Water Turbine Generation Metering", - 138: "Mirrored Micro Generation Metering", - 139: "Mirrored Solar Hot Water Generation Metering", - 140: "Mirrored Electric Metering Element/Phase 1", - 141: "Mirrored Electric Metering Element/Phase 2", - 142: "Mirrored Electric Metering Element/Phase 3", - } - - class DeviceStatusElectric(enum.IntFlag): - """Electric Metering Device Status.""" - - NO_ALARMS = 0 - CHECK_METER = 1 - LOW_BATTERY = 2 - TAMPER_DETECT = 4 - POWER_FAILURE = 8 - POWER_QUALITY = 16 - LEAK_DETECT = 32 # Really? - SERVICE_DISCONNECT = 64 - RESERVED = 128 - - class DeviceStatusGas(enum.IntFlag): - """Gas Metering Device Status.""" - - NO_ALARMS = 0 - CHECK_METER = 1 - LOW_BATTERY = 2 - TAMPER_DETECT = 4 - NOT_DEFINED = 8 - LOW_PRESSURE = 16 - LEAK_DETECT = 32 - SERVICE_DISCONNECT = 64 - REVERSE_FLOW = 128 - - class DeviceStatusWater(enum.IntFlag): - """Water Metering Device Status.""" - - NO_ALARMS = 0 - CHECK_METER = 1 - LOW_BATTERY = 2 - TAMPER_DETECT = 4 - PIPE_EMPTY = 8 - LOW_PRESSURE = 16 - LEAK_DETECT = 32 - SERVICE_DISCONNECT = 64 - REVERSE_FLOW = 128 - - class DeviceStatusHeatingCooling(enum.IntFlag): - """Heating and Cooling Metering Device Status.""" - - NO_ALARMS = 0 - CHECK_METER = 1 - LOW_BATTERY = 2 - TAMPER_DETECT = 4 - TEMPERATURE_SENSOR = 8 - BURST_DETECT = 16 - LEAK_DETECT = 32 - SERVICE_DISCONNECT = 64 - REVERSE_FLOW = 128 - - class DeviceStatusDefault(enum.IntFlag): - """Metering Device Status.""" - - NO_ALARMS = 0 - - class FormatSelector(enum.IntEnum): - """Format specified selector.""" - - DEMAND = 0 - SUMMATION = 1 - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Metering.""" - super().__init__(cluster, endpoint) - self._format_spec: str | None = None - self._summa_format: str | None = None - - @property - def divisor(self) -> int: - """Return divisor for the value.""" - return self.cluster.get(Metering.AttributeDefs.divisor.name) or 1 - - @property - def device_type(self) -> str | int | None: - """Return metering device type.""" - dev_type = self.cluster.get(Metering.AttributeDefs.metering_device_type.name) - if dev_type is None: - return None - return self.metering_device_type.get(dev_type, dev_type) - - @property - def multiplier(self) -> int: - """Return multiplier for the value.""" - return self.cluster.get(Metering.AttributeDefs.multiplier.name) or 1 - - @property - def status(self) -> int | None: - """Return metering device status.""" - if (status := self.cluster.get(Metering.AttributeDefs.status.name)) is None: - return None - - metering_device_type = self.cluster.get( - Metering.AttributeDefs.metering_device_type.name - ) - if metering_device_type in self.METERING_DEVICE_TYPES_ELECTRIC: - return self.DeviceStatusElectric(status) - if metering_device_type in self.METERING_DEVICE_TYPES_GAS: - return self.DeviceStatusGas(status) - if metering_device_type in self.METERING_DEVICE_TYPES_WATER: - return self.DeviceStatusWater(status) - if metering_device_type in self.METERING_DEVICE_TYPES_HEATING_COOLING: - return self.DeviceStatusHeatingCooling(status) - return self.DeviceStatusDefault(status) - - @property - def unit_of_measurement(self) -> int: - """Return unit of measurement.""" - return self.cluster.get(Metering.AttributeDefs.unit_of_measure.name) - - async def async_initialize_cluster_handler_specific(self, from_cache: bool) -> None: - """Fetch config from device and updates format specifier.""" - - fmting = self.cluster.get( - Metering.AttributeDefs.demand_formatting.name, 0xF9 - ) # 1 digit to the right, 15 digits to the left - self._format_spec = self.get_formatting(fmting) - - fmting = self.cluster.get( - Metering.AttributeDefs.summation_formatting.name, 0xF9 - ) # 1 digit to the right, 15 digits to the left - self._summa_format = self.get_formatting(fmting) - - async def async_update(self) -> None: - """Retrieve latest state.""" - self.debug("async_update") - - attrs = [ - a["attr"] - for a in self.REPORT_CONFIG - if a["attr"] not in self.cluster.unsupported_attributes - ] - result = await self.get_attributes(attrs, from_cache=False, only_cache=False) - if result: - for attr, value in result.items(): - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - self.cluster.find_attribute(attr).id, - attr, - value, - ) - - @staticmethod - def get_formatting(formatting: int) -> str: - """Return a formatting string, given the formatting value. - - Bits 0 to 2: Number of Digits to the right of the Decimal Point. - Bits 3 to 6: Number of Digits to the left of the Decimal Point. - Bit 7: If set, suppress leading zeros. - """ - r_digits = int(formatting & 0x07) # digits to the right of decimal point - l_digits = (formatting >> 3) & 0x0F # digits to the left of decimal point - if l_digits == 0: - l_digits = 15 - width = r_digits + l_digits + (1 if r_digits > 0 else 0) - - if formatting & 0x80: - # suppress leading 0 - return f"{{:{width}.{r_digits}f}}" - - return f"{{:0{width}.{r_digits}f}}" - - def _formatter_function( - self, selector: FormatSelector, value: int - ) -> int | float | str: - """Return formatted value for display.""" - value_float = value * self.multiplier / self.divisor - if self.unit_of_measurement == 0: - # Zigbee spec power unit is kW, but we show the value in W - value_watt = value_float * 1000 - if value_watt < 100: - return round(value_watt, 1) - return round(value_watt) - if selector == self.FormatSelector.SUMMATION: - assert self._summa_format - return self._summa_format.format(value_float).lstrip() - assert self._format_spec - return self._format_spec.format(value_float).lstrip() - - demand_formatter = partialmethod(_formatter_function, FormatSelector.DEMAND) - summa_formatter = partialmethod(_formatter_function, FormatSelector.SUMMATION) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Prepayment.cluster_id) -class PrepaymentClusterHandler(ClusterHandler): - """Prepayment cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Price.cluster_id) -class PriceClusterHandler(ClusterHandler): - """Price cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Tunneling.cluster_id) -class TunnelingClusterHandler(ClusterHandler): - """Tunneling cluster handler.""" diff --git a/homeassistant/components/zha/core/const.py b/homeassistant/components/zha/core/const.py deleted file mode 100644 index 2359fe0a1c3..00000000000 --- a/homeassistant/components/zha/core/const.py +++ /dev/null @@ -1,423 +0,0 @@ -"""All constants related to the ZHA component.""" - -from __future__ import annotations - -import enum -import logging - -import bellows.zigbee.application -import voluptuous as vol -import zigpy.application -import zigpy.types as t -import zigpy_deconz.zigbee.application -import zigpy_xbee.zigbee.application -import zigpy_zigate.zigbee.application -import zigpy_znp.zigbee.application - -from homeassistant.const import Platform -import homeassistant.helpers.config_validation as cv - -ATTR_ACTIVE_COORDINATOR = "active_coordinator" -ATTR_ARGS = "args" -ATTR_ATTRIBUTE = "attribute" -ATTR_ATTRIBUTE_ID = "attribute_id" -ATTR_ATTRIBUTE_NAME = "attribute_name" -ATTR_AVAILABLE = "available" -ATTR_CLUSTER_ID = "cluster_id" -ATTR_CLUSTER_TYPE = "cluster_type" -ATTR_COMMAND_TYPE = "command_type" -ATTR_DEVICE_IEEE = "device_ieee" -ATTR_DEVICE_TYPE = "device_type" -ATTR_ENDPOINTS = "endpoints" -ATTR_ENDPOINT_NAMES = "endpoint_names" -ATTR_ENDPOINT_ID = "endpoint_id" -ATTR_IEEE = "ieee" -ATTR_IN_CLUSTERS = "in_clusters" -ATTR_LAST_SEEN = "last_seen" -ATTR_LEVEL = "level" -ATTR_LQI = "lqi" -ATTR_MANUFACTURER = "manufacturer" -ATTR_MANUFACTURER_CODE = "manufacturer_code" -ATTR_MEMBERS = "members" -ATTR_MODEL = "model" -ATTR_NEIGHBORS = "neighbors" -ATTR_NODE_DESCRIPTOR = "node_descriptor" -ATTR_NWK = "nwk" -ATTR_OUT_CLUSTERS = "out_clusters" -ATTR_PARAMS = "params" -ATTR_POWER_SOURCE = "power_source" -ATTR_PROFILE_ID = "profile_id" -ATTR_QUIRK_APPLIED = "quirk_applied" -ATTR_QUIRK_CLASS = "quirk_class" -ATTR_QUIRK_ID = "quirk_id" -ATTR_ROUTES = "routes" -ATTR_RSSI = "rssi" -ATTR_SIGNATURE = "signature" -ATTR_TYPE = "type" -ATTR_UNIQUE_ID = "unique_id" -ATTR_VALUE = "value" -ATTR_WARNING_DEVICE_DURATION = "duration" -ATTR_WARNING_DEVICE_MODE = "mode" -ATTR_WARNING_DEVICE_STROBE = "strobe" -ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE = "duty_cycle" -ATTR_WARNING_DEVICE_STROBE_INTENSITY = "intensity" - -BAUD_RATES = [2400, 4800, 9600, 14400, 19200, 38400, 57600, 115200, 128000, 256000] -BINDINGS = "bindings" - -CLUSTER_DETAILS = "cluster_details" - -CLUSTER_HANDLER_ACCELEROMETER = "accelerometer" -CLUSTER_HANDLER_BINARY_INPUT = "binary_input" -CLUSTER_HANDLER_ANALOG_INPUT = "analog_input" -CLUSTER_HANDLER_ANALOG_OUTPUT = "analog_output" -CLUSTER_HANDLER_ATTRIBUTE = "attribute" -CLUSTER_HANDLER_BASIC = "basic" -CLUSTER_HANDLER_COLOR = "light_color" -CLUSTER_HANDLER_COVER = "window_covering" -CLUSTER_HANDLER_DEVICE_TEMPERATURE = "device_temperature" -CLUSTER_HANDLER_DOORLOCK = "door_lock" -CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT = "electrical_measurement" -CLUSTER_HANDLER_EVENT_RELAY = "event_relay" -CLUSTER_HANDLER_FAN = "fan" -CLUSTER_HANDLER_HUMIDITY = "humidity" -CLUSTER_HANDLER_HUE_OCCUPANCY = "philips_occupancy" -CLUSTER_HANDLER_SOIL_MOISTURE = "soil_moisture" -CLUSTER_HANDLER_LEAF_WETNESS = "leaf_wetness" -CLUSTER_HANDLER_IAS_ACE = "ias_ace" -CLUSTER_HANDLER_IAS_WD = "ias_wd" -CLUSTER_HANDLER_IDENTIFY = "identify" -CLUSTER_HANDLER_ILLUMINANCE = "illuminance" -CLUSTER_HANDLER_LEVEL = ATTR_LEVEL -CLUSTER_HANDLER_MULTISTATE_INPUT = "multistate_input" -CLUSTER_HANDLER_OCCUPANCY = "occupancy" -CLUSTER_HANDLER_ON_OFF = "on_off" -CLUSTER_HANDLER_OTA = "ota" -CLUSTER_HANDLER_POWER_CONFIGURATION = "power" -CLUSTER_HANDLER_PRESSURE = "pressure" -CLUSTER_HANDLER_SHADE = "shade" -CLUSTER_HANDLER_SMARTENERGY_METERING = "smartenergy_metering" -CLUSTER_HANDLER_TEMPERATURE = "temperature" -CLUSTER_HANDLER_THERMOSTAT = "thermostat" -CLUSTER_HANDLER_ZDO = "zdo" -CLUSTER_HANDLER_ZONE = ZONE = "ias_zone" -CLUSTER_HANDLER_INOVELLI = "inovelli_vzm31sn_cluster" - -CLUSTER_COMMAND_SERVER = "server" -CLUSTER_COMMANDS_CLIENT = "client_commands" -CLUSTER_COMMANDS_SERVER = "server_commands" -CLUSTER_TYPE_IN = "in" -CLUSTER_TYPE_OUT = "out" - -PLATFORMS = ( - Platform.ALARM_CONTROL_PANEL, - Platform.BINARY_SENSOR, - Platform.BUTTON, - Platform.CLIMATE, - Platform.COVER, - Platform.DEVICE_TRACKER, - Platform.FAN, - Platform.LIGHT, - Platform.LOCK, - Platform.NUMBER, - Platform.SELECT, - Platform.SENSOR, - Platform.SIREN, - Platform.SWITCH, - Platform.UPDATE, -) - -CONF_ALARM_MASTER_CODE = "alarm_master_code" -CONF_ALARM_FAILED_TRIES = "alarm_failed_tries" -CONF_ALARM_ARM_REQUIRES_CODE = "alarm_arm_requires_code" - -CONF_BAUDRATE = "baudrate" -CONF_FLOW_CONTROL = "flow_control" -CONF_CUSTOM_QUIRKS_PATH = "custom_quirks_path" -CONF_DEFAULT_LIGHT_TRANSITION = "default_light_transition" -CONF_DEVICE_CONFIG = "device_config" -CONF_ENABLE_ENHANCED_LIGHT_TRANSITION = "enhanced_light_transition" -CONF_ENABLE_LIGHT_TRANSITIONING_FLAG = "light_transitioning_flag" -CONF_ALWAYS_PREFER_XY_COLOR_MODE = "always_prefer_xy_color_mode" -CONF_GROUP_MEMBERS_ASSUME_STATE = "group_members_assume_state" -CONF_ENABLE_IDENTIFY_ON_JOIN = "enable_identify_on_join" -CONF_ENABLE_QUIRKS = "enable_quirks" -CONF_RADIO_TYPE = "radio_type" -CONF_USB_PATH = "usb_path" -CONF_USE_THREAD = "use_thread" -CONF_ZIGPY = "zigpy_config" - -CONF_CONSIDER_UNAVAILABLE_MAINS = "consider_unavailable_mains" -CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS = 60 * 60 * 2 # 2 hours -CONF_CONSIDER_UNAVAILABLE_BATTERY = "consider_unavailable_battery" -CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY = 60 * 60 * 6 # 6 hours - -CONF_ZHA_OPTIONS_SCHEMA = vol.Schema( - { - vol.Optional(CONF_DEFAULT_LIGHT_TRANSITION, default=0): vol.All( - vol.Coerce(float), vol.Range(min=0, max=2**16 / 10) - ), - vol.Required(CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, default=False): cv.boolean, - vol.Required(CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, default=True): cv.boolean, - vol.Required(CONF_ALWAYS_PREFER_XY_COLOR_MODE, default=True): cv.boolean, - vol.Required(CONF_GROUP_MEMBERS_ASSUME_STATE, default=True): cv.boolean, - vol.Required(CONF_ENABLE_IDENTIFY_ON_JOIN, default=True): cv.boolean, - vol.Optional( - CONF_CONSIDER_UNAVAILABLE_MAINS, - default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, - ): cv.positive_int, - vol.Optional( - CONF_CONSIDER_UNAVAILABLE_BATTERY, - default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - ): cv.positive_int, - } -) - -CONF_ZHA_ALARM_SCHEMA = vol.Schema( - { - vol.Required(CONF_ALARM_MASTER_CODE, default="1234"): cv.string, - vol.Required(CONF_ALARM_FAILED_TRIES, default=3): cv.positive_int, - vol.Required(CONF_ALARM_ARM_REQUIRES_CODE, default=False): cv.boolean, - } -) - -CUSTOM_CONFIGURATION = "custom_configuration" - -DATA_DEVICE_CONFIG = "zha_device_config" -DATA_ZHA = "zha" -DATA_ZHA_CONFIG = "config" -DATA_ZHA_CORE_EVENTS = "zha_core_events" -DATA_ZHA_DEVICE_TRIGGER_CACHE = "zha_device_trigger_cache" -DATA_ZHA_GATEWAY = "zha_gateway" - -DEBUG_COMP_BELLOWS = "bellows" -DEBUG_COMP_ZHA = "homeassistant.components.zha" -DEBUG_COMP_ZIGPY = "zigpy" -DEBUG_COMP_ZIGPY_ZNP = "zigpy_znp" -DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz" -DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee" -DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate" -DEBUG_LEVEL_CURRENT = "current" -DEBUG_LEVEL_ORIGINAL = "original" -DEBUG_LEVELS = { - DEBUG_COMP_BELLOWS: logging.DEBUG, - DEBUG_COMP_ZHA: logging.DEBUG, - DEBUG_COMP_ZIGPY: logging.DEBUG, - DEBUG_COMP_ZIGPY_ZNP: logging.DEBUG, - DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG, - DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG, - DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG, -} -DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY] - -DEFAULT_RADIO_TYPE = "ezsp" -DEFAULT_BAUDRATE = 57600 -DEFAULT_DATABASE_NAME = "zigbee.db" - -DEVICE_PAIRING_STATUS = "pairing_status" - -DISCOVERY_KEY = "zha_discovery_info" - -DOMAIN = "zha" - -ENTITY_METADATA = "entity_metadata" - -GROUP_ID = "group_id" -GROUP_IDS = "group_ids" -GROUP_NAME = "group_name" - -MFG_CLUSTER_ID_START = 0xFC00 - -POWER_MAINS_POWERED = "Mains" -POWER_BATTERY_OR_UNKNOWN = "Battery or Unknown" - -PRESET_SCHEDULE = "Schedule" -PRESET_COMPLEX = "Complex" -PRESET_TEMP_MANUAL = "Temporary manual" - -ZCL_INIT_ATTRS = "ZCL_INIT_ATTRS" - -ZHA_ALARM_OPTIONS = "zha_alarm_options" -ZHA_OPTIONS = "zha_options" - -ZHA_CONFIG_SCHEMAS = { - ZHA_OPTIONS: CONF_ZHA_OPTIONS_SCHEMA, - ZHA_ALARM_OPTIONS: CONF_ZHA_ALARM_SCHEMA, -} - -type _ControllerClsType = type[zigpy.application.ControllerApplication] - - -class RadioType(enum.Enum): - """Possible options for radio type.""" - - ezsp = ( - "EZSP = Silicon Labs EmberZNet protocol: Elelabs, HUSBZB-1, Telegesis", - bellows.zigbee.application.ControllerApplication, - ) - znp = ( - "ZNP = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2", - zigpy_znp.zigbee.application.ControllerApplication, - ) - deconz = ( - "deCONZ = dresden elektronik deCONZ protocol: ConBee I/II, RaspBee I/II", - zigpy_deconz.zigbee.application.ControllerApplication, - ) - zigate = ( - "ZiGate = ZiGate Zigbee radios: PiZiGate, ZiGate USB-TTL, ZiGate WiFi", - zigpy_zigate.zigbee.application.ControllerApplication, - ) - xbee = ( - "XBee = Digi XBee Zigbee radios: Digi XBee Series 2, 2C, 3", - zigpy_xbee.zigbee.application.ControllerApplication, - ) - - @classmethod - def list(cls) -> list[str]: - """Return a list of descriptions.""" - return [e.description for e in RadioType] - - @classmethod - def get_by_description(cls, description: str) -> RadioType: - """Get radio by description.""" - for radio in cls: - if radio.description == description: - return radio - raise ValueError - - def __init__(self, description: str, controller_cls: _ControllerClsType) -> None: - """Init instance.""" - self._desc = description - self._ctrl_cls = controller_cls - - @property - def controller(self) -> _ControllerClsType: - """Return controller class.""" - return self._ctrl_cls - - @property - def description(self) -> str: - """Return radio type description.""" - return self._desc - - -REPORT_CONFIG_ATTR_PER_REQ = 3 -REPORT_CONFIG_MAX_INT = 900 -REPORT_CONFIG_MAX_INT_BATTERY_SAVE = 10800 -REPORT_CONFIG_MIN_INT = 30 -REPORT_CONFIG_MIN_INT_ASAP = 1 -REPORT_CONFIG_MIN_INT_IMMEDIATE = 0 -REPORT_CONFIG_MIN_INT_OP = 5 -REPORT_CONFIG_MIN_INT_BATTERY_SAVE = 3600 -REPORT_CONFIG_RPT_CHANGE = 1 -REPORT_CONFIG_DEFAULT = ( - REPORT_CONFIG_MIN_INT, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_RPT_CHANGE, -) -REPORT_CONFIG_ASAP = ( - REPORT_CONFIG_MIN_INT_ASAP, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_RPT_CHANGE, -) -REPORT_CONFIG_BATTERY_SAVE = ( - REPORT_CONFIG_MIN_INT_BATTERY_SAVE, - REPORT_CONFIG_MAX_INT_BATTERY_SAVE, - REPORT_CONFIG_RPT_CHANGE, -) -REPORT_CONFIG_IMMEDIATE = ( - REPORT_CONFIG_MIN_INT_IMMEDIATE, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_RPT_CHANGE, -) -REPORT_CONFIG_OP = ( - REPORT_CONFIG_MIN_INT_OP, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_RPT_CHANGE, -) - -SENSOR_ACCELERATION = "acceleration" -SENSOR_BATTERY = "battery" -SENSOR_ELECTRICAL_MEASUREMENT = CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT -SENSOR_GENERIC = "generic" -SENSOR_HUMIDITY = CLUSTER_HANDLER_HUMIDITY -SENSOR_ILLUMINANCE = CLUSTER_HANDLER_ILLUMINANCE -SENSOR_METERING = "metering" -SENSOR_OCCUPANCY = CLUSTER_HANDLER_OCCUPANCY -SENSOR_OPENING = "opening" -SENSOR_PRESSURE = CLUSTER_HANDLER_PRESSURE -SENSOR_TEMPERATURE = CLUSTER_HANDLER_TEMPERATURE -SENSOR_TYPE = "sensor_type" - -SIGNAL_ADD_ENTITIES = "zha_add_new_entities" -SIGNAL_ATTR_UPDATED = "attribute_updated" -SIGNAL_AVAILABLE = "available" -SIGNAL_MOVE_LEVEL = "move_level" -SIGNAL_REMOVE = "remove" -SIGNAL_SET_LEVEL = "set_level" -SIGNAL_STATE_ATTR = "update_state_attribute" -SIGNAL_UPDATE_DEVICE = "{}_zha_update_device" -SIGNAL_GROUP_ENTITY_REMOVED = "group_entity_removed" -SIGNAL_GROUP_MEMBERSHIP_CHANGE = "group_membership_change" - -UNKNOWN = "unknown" -UNKNOWN_MANUFACTURER = "unk_manufacturer" -UNKNOWN_MODEL = "unk_model" - -WARNING_DEVICE_MODE_STOP = 0 -WARNING_DEVICE_MODE_BURGLAR = 1 -WARNING_DEVICE_MODE_FIRE = 2 -WARNING_DEVICE_MODE_EMERGENCY = 3 -WARNING_DEVICE_MODE_POLICE_PANIC = 4 -WARNING_DEVICE_MODE_FIRE_PANIC = 5 -WARNING_DEVICE_MODE_EMERGENCY_PANIC = 6 - -WARNING_DEVICE_STROBE_NO = 0 -WARNING_DEVICE_STROBE_YES = 1 - -WARNING_DEVICE_SOUND_LOW = 0 -WARNING_DEVICE_SOUND_MEDIUM = 1 -WARNING_DEVICE_SOUND_HIGH = 2 -WARNING_DEVICE_SOUND_VERY_HIGH = 3 - -WARNING_DEVICE_STROBE_LOW = 0x00 -WARNING_DEVICE_STROBE_MEDIUM = 0x01 -WARNING_DEVICE_STROBE_HIGH = 0x02 -WARNING_DEVICE_STROBE_VERY_HIGH = 0x03 - -WARNING_DEVICE_SQUAWK_MODE_ARMED = 0 -WARNING_DEVICE_SQUAWK_MODE_DISARMED = 1 - -ZHA_DISCOVERY_NEW = "zha_discovery_new_{}" -ZHA_CLUSTER_HANDLER_MSG = "zha_channel_message" -ZHA_CLUSTER_HANDLER_MSG_BIND = "zha_channel_bind" -ZHA_CLUSTER_HANDLER_MSG_CFG_RPT = "zha_channel_configure_reporting" -ZHA_CLUSTER_HANDLER_MSG_DATA = "zha_channel_msg_data" -ZHA_CLUSTER_HANDLER_CFG_DONE = "zha_channel_cfg_done" -ZHA_CLUSTER_HANDLER_READS_PER_REQ = 5 -ZHA_EVENT = "zha_event" -ZHA_GW_MSG = "zha_gateway_message" -ZHA_GW_MSG_DEVICE_FULL_INIT = "device_fully_initialized" -ZHA_GW_MSG_DEVICE_INFO = "device_info" -ZHA_GW_MSG_DEVICE_JOINED = "device_joined" -ZHA_GW_MSG_DEVICE_REMOVED = "device_removed" -ZHA_GW_MSG_GROUP_ADDED = "group_added" -ZHA_GW_MSG_GROUP_INFO = "group_info" -ZHA_GW_MSG_GROUP_MEMBER_ADDED = "group_member_added" -ZHA_GW_MSG_GROUP_MEMBER_REMOVED = "group_member_removed" -ZHA_GW_MSG_GROUP_REMOVED = "group_removed" -ZHA_GW_MSG_LOG_ENTRY = "log_entry" -ZHA_GW_MSG_LOG_OUTPUT = "log_output" -ZHA_GW_MSG_RAW_INIT = "raw_device_initialized" - - -class Strobe(t.enum8): - """Strobe enum.""" - - No_Strobe = 0x00 - Strobe = 0x01 - - -EZSP_OVERWRITE_EUI64 = ( - "i_understand_i_can_update_eui64_only_once_and_i_still_want_to_do_it" -) diff --git a/homeassistant/components/zha/core/decorators.py b/homeassistant/components/zha/core/decorators.py deleted file mode 100644 index d20fb7f2a38..00000000000 --- a/homeassistant/components/zha/core/decorators.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Decorators for ZHA core registries.""" - -from __future__ import annotations - -from collections.abc import Callable -from typing import Any - - -class DictRegistry[_TypeT: type[Any]](dict[int | str, _TypeT]): - """Dict Registry of items.""" - - def register(self, name: int | str) -> Callable[[_TypeT], _TypeT]: - """Return decorator to register item with a specific name.""" - - def decorator(cluster_handler: _TypeT) -> _TypeT: - """Register decorated cluster handler or item.""" - self[name] = cluster_handler - return cluster_handler - - return decorator - - -class NestedDictRegistry[_TypeT: type[Any]]( - dict[int | str, dict[int | str | None, _TypeT]] -): - """Dict Registry of multiple items per key.""" - - def register( - self, name: int | str, sub_name: int | str | None = None - ) -> Callable[[_TypeT], _TypeT]: - """Return decorator to register item with a specific and a quirk name.""" - - def decorator(cluster_handler: _TypeT) -> _TypeT: - """Register decorated cluster handler or item.""" - if name not in self: - self[name] = {} - self[name][sub_name] = cluster_handler - return cluster_handler - - return decorator - - -class SetRegistry(set[int | str]): - """Set Registry of items.""" - - def register[_TypeT: type[Any]]( - self, name: int | str - ) -> Callable[[_TypeT], _TypeT]: - """Return decorator to register item with a specific name.""" - - def decorator(cluster_handler: _TypeT) -> _TypeT: - """Register decorated cluster handler or item.""" - self.add(name) - return cluster_handler - - return decorator diff --git a/homeassistant/components/zha/core/device.py b/homeassistant/components/zha/core/device.py deleted file mode 100644 index 163674d614c..00000000000 --- a/homeassistant/components/zha/core/device.py +++ /dev/null @@ -1,1010 +0,0 @@ -"""Device for Zigbee Home Automation.""" - -from __future__ import annotations - -import asyncio -from collections.abc import Callable -from datetime import timedelta -from enum import Enum -from functools import cached_property -import logging -import random -import time -from typing import TYPE_CHECKING, Any, Self - -from zigpy import types -from zigpy.device import Device as ZigpyDevice -import zigpy.exceptions -from zigpy.profiles import PROFILES -import zigpy.quirks -from zigpy.quirks.v2 import CustomDeviceV2 -from zigpy.types.named import EUI64, NWK -from zigpy.zcl.clusters import Cluster -from zigpy.zcl.clusters.general import Groups, Identify -from zigpy.zcl.foundation import Status as ZclStatus, ZCLCommandDef -import zigpy.zdo.types as zdo_types - -from homeassistant.const import ATTR_COMMAND, ATTR_DEVICE_ID, ATTR_NAME -from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceEntry -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.event import async_track_time_interval - -from . import const, discovery -from .cluster_handlers import ClusterHandler, ZDOClusterHandler -from .const import ( - ATTR_ACTIVE_COORDINATOR, - ATTR_ARGS, - ATTR_ATTRIBUTE, - ATTR_AVAILABLE, - ATTR_CLUSTER_ID, - ATTR_CLUSTER_TYPE, - ATTR_COMMAND_TYPE, - ATTR_DEVICE_TYPE, - ATTR_ENDPOINT_ID, - ATTR_ENDPOINT_NAMES, - ATTR_ENDPOINTS, - ATTR_IEEE, - ATTR_LAST_SEEN, - ATTR_LQI, - ATTR_MANUFACTURER, - ATTR_MANUFACTURER_CODE, - ATTR_MODEL, - ATTR_NEIGHBORS, - ATTR_NODE_DESCRIPTOR, - ATTR_NWK, - ATTR_PARAMS, - ATTR_POWER_SOURCE, - ATTR_QUIRK_APPLIED, - ATTR_QUIRK_CLASS, - ATTR_QUIRK_ID, - ATTR_ROUTES, - ATTR_RSSI, - ATTR_SIGNATURE, - ATTR_VALUE, - CLUSTER_COMMAND_SERVER, - CLUSTER_COMMANDS_CLIENT, - CLUSTER_COMMANDS_SERVER, - CLUSTER_TYPE_IN, - CLUSTER_TYPE_OUT, - CONF_CONSIDER_UNAVAILABLE_BATTERY, - CONF_CONSIDER_UNAVAILABLE_MAINS, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, - CONF_ENABLE_IDENTIFY_ON_JOIN, - POWER_BATTERY_OR_UNKNOWN, - POWER_MAINS_POWERED, - SIGNAL_AVAILABLE, - SIGNAL_UPDATE_DEVICE, - UNKNOWN, - UNKNOWN_MANUFACTURER, - UNKNOWN_MODEL, - ZHA_OPTIONS, -) -from .endpoint import Endpoint -from .helpers import LogMixin, async_get_zha_config_value, convert_to_zcl_values - -if TYPE_CHECKING: - from ..websocket_api import ClusterBinding - from .gateway import ZHAGateway - -_LOGGER = logging.getLogger(__name__) -_UPDATE_ALIVE_INTERVAL = (60, 90) -_CHECKIN_GRACE_PERIODS = 2 - - -def get_device_automation_triggers( - device: zigpy.device.Device, -) -> dict[tuple[str, str], dict[str, str]]: - """Get the supported device automation triggers for a zigpy device.""" - return { - ("device_offline", "device_offline"): {"device_event_type": "device_offline"}, - **getattr(device, "device_automation_triggers", {}), - } - - -class DeviceStatus(Enum): - """Status of a device.""" - - CREATED = 1 - INITIALIZED = 2 - - -class ZHADevice(LogMixin): - """ZHA Zigbee device object.""" - - _ha_device_id: str - - def __init__( - self, - hass: HomeAssistant, - zigpy_device: zigpy.device.Device, - zha_gateway: ZHAGateway, - ) -> None: - """Initialize the gateway.""" - self.hass: HomeAssistant = hass - self._zigpy_device: ZigpyDevice = zigpy_device - self._zha_gateway: ZHAGateway = zha_gateway - self._available_signal: str = f"{self.name}_{self.ieee}_{SIGNAL_AVAILABLE}" - self._checkins_missed_count: int = 0 - self.unsubs: list[Callable[[], None]] = [] - self.quirk_applied: bool = isinstance( - self._zigpy_device, zigpy.quirks.CustomDevice - ) - self.quirk_class: str = ( - f"{self._zigpy_device.__class__.__module__}." - f"{self._zigpy_device.__class__.__name__}" - ) - self.quirk_id: str | None = getattr(self._zigpy_device, ATTR_QUIRK_ID, None) - - if self.is_mains_powered: - self.consider_unavailable_time: int = async_get_zha_config_value( - self._zha_gateway.config_entry, - ZHA_OPTIONS, - CONF_CONSIDER_UNAVAILABLE_MAINS, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, - ) - else: - self.consider_unavailable_time = async_get_zha_config_value( - self._zha_gateway.config_entry, - ZHA_OPTIONS, - CONF_CONSIDER_UNAVAILABLE_BATTERY, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - ) - self._available: bool = self.is_coordinator or ( - self.last_seen is not None - and time.time() - self.last_seen < self.consider_unavailable_time - ) - self._zdo_handler: ZDOClusterHandler = ZDOClusterHandler(self) - self._power_config_ch: ClusterHandler | None = None - self._identify_ch: ClusterHandler | None = None - self._basic_ch: ClusterHandler | None = None - self.status: DeviceStatus = DeviceStatus.CREATED - - self._endpoints: dict[int, Endpoint] = {} - for ep_id, endpoint in zigpy_device.endpoints.items(): - if ep_id != 0: - self._endpoints[ep_id] = Endpoint.new(endpoint, self) - - if not self.is_coordinator: - keep_alive_interval = random.randint(*_UPDATE_ALIVE_INTERVAL) - self.debug( - "starting availability checks - interval: %s", keep_alive_interval - ) - self.unsubs.append( - async_track_time_interval( - self.hass, - self._check_available, - timedelta(seconds=keep_alive_interval), - ) - ) - - @property - def device_id(self) -> str: - """Return the HA device registry device id.""" - return self._ha_device_id - - def set_device_id(self, device_id: str) -> None: - """Set the HA device registry device id.""" - self._ha_device_id = device_id - - @property - def device(self) -> zigpy.device.Device: - """Return underlying Zigpy device.""" - return self._zigpy_device - - @property - def name(self) -> str: - """Return device name.""" - return f"{self.manufacturer} {self.model}" - - @property - def ieee(self) -> EUI64: - """Return ieee address for device.""" - return self._zigpy_device.ieee - - @property - def manufacturer(self) -> str: - """Return manufacturer for device.""" - if self._zigpy_device.manufacturer is None: - return UNKNOWN_MANUFACTURER - return self._zigpy_device.manufacturer - - @property - def model(self) -> str: - """Return model for device.""" - if self._zigpy_device.model is None: - return UNKNOWN_MODEL - return self._zigpy_device.model - - @property - def manufacturer_code(self) -> int | None: - """Return the manufacturer code for the device.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.manufacturer_code - - @property - def nwk(self) -> NWK: - """Return nwk for device.""" - return self._zigpy_device.nwk - - @property - def lqi(self): - """Return lqi for device.""" - return self._zigpy_device.lqi - - @property - def rssi(self): - """Return rssi for device.""" - return self._zigpy_device.rssi - - @property - def last_seen(self) -> float | None: - """Return last_seen for device.""" - return self._zigpy_device.last_seen - - @property - def is_mains_powered(self) -> bool | None: - """Return true if device is mains powered.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.is_mains_powered - - @property - def device_type(self) -> str: - """Return the logical device type for the device.""" - if self._zigpy_device.node_desc is None: - return UNKNOWN - - return self._zigpy_device.node_desc.logical_type.name - - @property - def power_source(self) -> str: - """Return the power source for the device.""" - return ( - POWER_MAINS_POWERED if self.is_mains_powered else POWER_BATTERY_OR_UNKNOWN - ) - - @property - def is_router(self) -> bool | None: - """Return true if this is a routing capable device.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.is_router - - @property - def is_coordinator(self) -> bool | None: - """Return true if this device represents a coordinator.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.is_coordinator - - @property - def is_active_coordinator(self) -> bool: - """Return true if this device is the active coordinator.""" - if not self.is_coordinator: - return False - - return self.ieee == self.gateway.state.node_info.ieee - - @property - def is_end_device(self) -> bool | None: - """Return true if this device is an end device.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.is_end_device - - @property - def is_groupable(self) -> bool: - """Return true if this device has a group cluster.""" - return self.is_coordinator or ( - self.available and bool(self.async_get_groupable_endpoints()) - ) - - @property - def skip_configuration(self) -> bool: - """Return true if the device should not issue configuration related commands.""" - return self._zigpy_device.skip_configuration or bool(self.is_coordinator) - - @property - def gateway(self): - """Return the gateway for this device.""" - return self._zha_gateway - - @cached_property - def device_automation_commands(self) -> dict[str, list[tuple[str, str]]]: - """Return the a lookup of commands to etype/sub_type.""" - commands: dict[str, list[tuple[str, str]]] = {} - for etype_subtype, trigger in self.device_automation_triggers.items(): - if command := trigger.get(ATTR_COMMAND): - commands.setdefault(command, []).append(etype_subtype) - return commands - - @cached_property - def device_automation_triggers(self) -> dict[tuple[str, str], dict[str, str]]: - """Return the device automation triggers for this device.""" - return get_device_automation_triggers(self._zigpy_device) - - @property - def available_signal(self) -> str: - """Signal to use to subscribe to device availability changes.""" - return self._available_signal - - @property - def available(self): - """Return True if device is available.""" - return self._available - - @available.setter - def available(self, new_availability: bool) -> None: - """Set device availability.""" - self._available = new_availability - - @property - def power_configuration_ch(self) -> ClusterHandler | None: - """Return power configuration cluster handler.""" - return self._power_config_ch - - @power_configuration_ch.setter - def power_configuration_ch(self, cluster_handler: ClusterHandler) -> None: - """Power configuration cluster handler setter.""" - if self._power_config_ch is None: - self._power_config_ch = cluster_handler - - @property - def basic_ch(self) -> ClusterHandler | None: - """Return basic cluster handler.""" - return self._basic_ch - - @basic_ch.setter - def basic_ch(self, cluster_handler: ClusterHandler) -> None: - """Set the basic cluster handler.""" - if self._basic_ch is None: - self._basic_ch = cluster_handler - - @property - def identify_ch(self) -> ClusterHandler | None: - """Return power configuration cluster handler.""" - return self._identify_ch - - @identify_ch.setter - def identify_ch(self, cluster_handler: ClusterHandler) -> None: - """Power configuration cluster handler setter.""" - if self._identify_ch is None: - self._identify_ch = cluster_handler - - @property - def zdo_cluster_handler(self) -> ZDOClusterHandler: - """Return ZDO cluster handler.""" - return self._zdo_handler - - @property - def endpoints(self) -> dict[int, Endpoint]: - """Return the endpoints for this device.""" - return self._endpoints - - @property - def zigbee_signature(self) -> dict[str, Any]: - """Get zigbee signature for this device.""" - return { - ATTR_NODE_DESCRIPTOR: str(self._zigpy_device.node_desc), - ATTR_ENDPOINTS: { - signature[0]: signature[1] - for signature in [ - endpoint.zigbee_signature for endpoint in self._endpoints.values() - ] - }, - ATTR_MANUFACTURER: self.manufacturer, - ATTR_MODEL: self.model, - } - - @property - def sw_version(self) -> str | None: - """Return the software version for this device.""" - device_registry = dr.async_get(self.hass) - reg_device: DeviceEntry | None = device_registry.async_get(self.device_id) - if reg_device is None: - return None - return reg_device.sw_version - - @classmethod - def new( - cls, - hass: HomeAssistant, - zigpy_dev: zigpy.device.Device, - gateway: ZHAGateway, - ) -> Self: - """Create new device.""" - zha_dev = cls(hass, zigpy_dev, gateway) - zha_dev.unsubs.append( - async_dispatcher_connect( - hass, - SIGNAL_UPDATE_DEVICE.format(str(zha_dev.ieee)), - zha_dev.async_update_sw_build_id, - ) - ) - discovery.PROBE.discover_device_entities(zha_dev) - return zha_dev - - @callback - def async_update_sw_build_id(self, sw_version: int) -> None: - """Update device sw version.""" - if self.device_id is None: - return - - device_registry = dr.async_get(self.hass) - device_registry.async_update_device( - self.device_id, sw_version=f"0x{sw_version:08x}" - ) - - async def _check_available(self, *_: Any) -> None: - # don't flip the availability state of the coordinator - if self.is_coordinator: - return - if self.last_seen is None: - self.debug("last_seen is None, marking the device unavailable") - self.update_available(False) - return - - difference = time.time() - self.last_seen - if difference < self.consider_unavailable_time: - self.debug( - "Device seen - marking the device available and resetting counter" - ) - self.update_available(True) - self._checkins_missed_count = 0 - return - - if self.hass.data[const.DATA_ZHA].allow_polling: - if ( - self._checkins_missed_count >= _CHECKIN_GRACE_PERIODS - or self.manufacturer == "LUMI" - or not self._endpoints - ): - self.debug( - ( - "last_seen is %s seconds ago and ping attempts have been exhausted," - " marking the device unavailable" - ), - difference, - ) - self.update_available(False) - return - - self._checkins_missed_count += 1 - self.debug( - "Attempting to checkin with device - missed checkins: %s", - self._checkins_missed_count, - ) - if not self.basic_ch: - self.debug("does not have a mandatory basic cluster") - self.update_available(False) - return - res = await self.basic_ch.get_attribute_value( - ATTR_MANUFACTURER, from_cache=False - ) - if res is not None: - self._checkins_missed_count = 0 - - def update_available(self, available: bool) -> None: - """Update device availability and signal entities.""" - self.debug( - ( - "Update device availability - device available: %s - new availability:" - " %s - changed: %s" - ), - self.available, - available, - self.available ^ available, - ) - availability_changed = self.available ^ available - self.available = available - if availability_changed and available: - # reinit cluster handlers then signal entities - self.debug( - "Device availability changed and device became available," - " reinitializing cluster handlers" - ) - self.hass.async_create_task(self._async_became_available()) - return - if availability_changed and not available: - self.debug("Device availability changed and device became unavailable") - self.zha_send_event( - { - "device_event_type": "device_offline", - }, - ) - async_dispatcher_send(self.hass, f"{self._available_signal}_entity") - - @callback - def zha_send_event(self, event_data: dict[str, str | int]) -> None: - """Relay events to hass.""" - self.hass.bus.async_fire( - const.ZHA_EVENT, - { - const.ATTR_DEVICE_IEEE: str(self.ieee), - const.ATTR_UNIQUE_ID: str(self.ieee), - ATTR_DEVICE_ID: self.device_id, - **event_data, - }, - ) - - async def _async_became_available(self) -> None: - """Update device availability and signal entities.""" - await self.async_initialize(False) - async_dispatcher_send(self.hass, f"{self._available_signal}_entity") - - @property - def device_info(self) -> dict[str, Any]: - """Return a device description for device.""" - ieee = str(self.ieee) - time_struct = time.localtime(self.last_seen) - update_time = time.strftime("%Y-%m-%dT%H:%M:%S", time_struct) - return { - ATTR_IEEE: ieee, - ATTR_NWK: self.nwk, - ATTR_MANUFACTURER: self.manufacturer, - ATTR_MODEL: self.model, - ATTR_NAME: self.name or ieee, - ATTR_QUIRK_APPLIED: self.quirk_applied, - ATTR_QUIRK_CLASS: self.quirk_class, - ATTR_QUIRK_ID: self.quirk_id, - ATTR_MANUFACTURER_CODE: self.manufacturer_code, - ATTR_POWER_SOURCE: self.power_source, - ATTR_LQI: self.lqi, - ATTR_RSSI: self.rssi, - ATTR_LAST_SEEN: update_time, - ATTR_AVAILABLE: self.available, - ATTR_DEVICE_TYPE: self.device_type, - ATTR_SIGNATURE: self.zigbee_signature, - } - - async def async_configure(self) -> None: - """Configure the device.""" - should_identify = async_get_zha_config_value( - self._zha_gateway.config_entry, - ZHA_OPTIONS, - CONF_ENABLE_IDENTIFY_ON_JOIN, - True, - ) - self.debug("started configuration") - await self._zdo_handler.async_configure() - self._zdo_handler.debug("'async_configure' stage succeeded") - await asyncio.gather( - *(endpoint.async_configure() for endpoint in self._endpoints.values()) - ) - if isinstance(self._zigpy_device, CustomDeviceV2): - self.debug("applying quirks v2 custom device configuration") - await self._zigpy_device.apply_custom_configuration() - async_dispatcher_send( - self.hass, - const.ZHA_CLUSTER_HANDLER_MSG, - { - const.ATTR_TYPE: const.ZHA_CLUSTER_HANDLER_CFG_DONE, - }, - ) - self.debug("completed configuration") - - if ( - should_identify - and self.identify_ch is not None - and not self.skip_configuration - ): - await self.identify_ch.trigger_effect( - effect_id=Identify.EffectIdentifier.Okay, - effect_variant=Identify.EffectVariant.Default, - ) - - async def async_initialize(self, from_cache: bool = False) -> None: - """Initialize cluster handlers.""" - self.debug("started initialization") - await self._zdo_handler.async_initialize(from_cache) - self._zdo_handler.debug("'async_initialize' stage succeeded") - - # We intentionally do not use `gather` here! This is so that if, for example, - # three `device.async_initialize()`s are spawned, only three concurrent requests - # will ever be in flight at once. Startup concurrency is managed at the device - # level. - for endpoint in self._endpoints.values(): - try: - await endpoint.async_initialize(from_cache) - except Exception: # noqa: BLE001 - self.debug("Failed to initialize endpoint", exc_info=True) - - self.debug("power source: %s", self.power_source) - self.status = DeviceStatus.INITIALIZED - self.debug("completed initialization") - - @callback - def async_cleanup_handles(self) -> None: - """Unsubscribe the dispatchers and timers.""" - for unsubscribe in self.unsubs: - unsubscribe() - - @property - def zha_device_info(self) -> dict[str, Any]: - """Get ZHA device information.""" - device_info: dict[str, Any] = {} - device_info.update(self.device_info) - device_info[ATTR_ACTIVE_COORDINATOR] = self.is_active_coordinator - device_info["entities"] = [ - { - "entity_id": entity_ref.reference_id, - ATTR_NAME: entity_ref.device_info[ATTR_NAME], - } - for entity_ref in self.gateway.device_registry[self.ieee] - ] - - topology = self.gateway.application_controller.topology - device_info[ATTR_NEIGHBORS] = [ - { - "device_type": neighbor.device_type.name, - "rx_on_when_idle": neighbor.rx_on_when_idle.name, - "relationship": neighbor.relationship.name, - "extended_pan_id": str(neighbor.extended_pan_id), - "ieee": str(neighbor.ieee), - "nwk": str(neighbor.nwk), - "permit_joining": neighbor.permit_joining.name, - "depth": str(neighbor.depth), - "lqi": str(neighbor.lqi), - } - for neighbor in topology.neighbors[self.ieee] - ] - - device_info[ATTR_ROUTES] = [ - { - "dest_nwk": str(route.DstNWK), - "route_status": str(route.RouteStatus.name), - "memory_constrained": bool(route.MemoryConstrained), - "many_to_one": bool(route.ManyToOne), - "route_record_required": bool(route.RouteRecordRequired), - "next_hop": str(route.NextHop), - } - for route in topology.routes[self.ieee] - ] - - # Return endpoint device type Names - names: list[dict[str, str]] = [] - for endpoint in (ep for epid, ep in self.device.endpoints.items() if epid): - profile = PROFILES.get(endpoint.profile_id) - if profile and endpoint.device_type is not None: - # DeviceType provides undefined enums - names.append({ATTR_NAME: profile.DeviceType(endpoint.device_type).name}) - else: - names.append( - { - ATTR_NAME: ( - f"unknown {endpoint.device_type} device_type " - f"of 0x{(endpoint.profile_id or 0xFFFF):04x} profile id" - ) - } - ) - device_info[ATTR_ENDPOINT_NAMES] = names - - device_registry = dr.async_get(self.hass) - reg_device = device_registry.async_get(self.device_id) - if reg_device is not None: - device_info["user_given_name"] = reg_device.name_by_user - device_info["device_reg_id"] = reg_device.id - device_info["area_id"] = reg_device.area_id - return device_info - - @callback - def async_get_clusters(self) -> dict[int, dict[str, dict[int, Cluster]]]: - """Get all clusters for this device.""" - return { - ep_id: { - CLUSTER_TYPE_IN: endpoint.in_clusters, - CLUSTER_TYPE_OUT: endpoint.out_clusters, - } - for (ep_id, endpoint) in self._zigpy_device.endpoints.items() - if ep_id != 0 - } - - @callback - def async_get_groupable_endpoints(self): - """Get device endpoints that have a group 'in' cluster.""" - return [ - ep_id - for (ep_id, clusters) in self.async_get_clusters().items() - if Groups.cluster_id in clusters[CLUSTER_TYPE_IN] - ] - - @callback - def async_get_std_clusters(self): - """Get ZHA and ZLL clusters for this device.""" - - return { - ep_id: { - CLUSTER_TYPE_IN: endpoint.in_clusters, - CLUSTER_TYPE_OUT: endpoint.out_clusters, - } - for (ep_id, endpoint) in self._zigpy_device.endpoints.items() - if ep_id != 0 and endpoint.profile_id in PROFILES - } - - @callback - def async_get_cluster( - self, endpoint_id: int, cluster_id: int, cluster_type: str = CLUSTER_TYPE_IN - ) -> Cluster: - """Get zigbee cluster from this entity.""" - clusters: dict[int, dict[str, dict[int, Cluster]]] = self.async_get_clusters() - return clusters[endpoint_id][cluster_type][cluster_id] - - @callback - def async_get_cluster_attributes( - self, endpoint_id, cluster_id, cluster_type=CLUSTER_TYPE_IN - ): - """Get zigbee attributes for specified cluster.""" - cluster = self.async_get_cluster(endpoint_id, cluster_id, cluster_type) - if cluster is None: - return None - return cluster.attributes - - @callback - def async_get_cluster_commands( - self, endpoint_id, cluster_id, cluster_type=CLUSTER_TYPE_IN - ): - """Get zigbee commands for specified cluster.""" - cluster = self.async_get_cluster(endpoint_id, cluster_id, cluster_type) - if cluster is None: - return None - return { - CLUSTER_COMMANDS_CLIENT: cluster.client_commands, - CLUSTER_COMMANDS_SERVER: cluster.server_commands, - } - - async def write_zigbee_attribute( - self, - endpoint_id, - cluster_id, - attribute, - value, - cluster_type=CLUSTER_TYPE_IN, - manufacturer=None, - ): - """Write a value to a zigbee attribute for a cluster in this entity.""" - try: - cluster: Cluster = self.async_get_cluster( - endpoint_id, cluster_id, cluster_type - ) - except KeyError as exc: - raise ValueError( - f"Cluster {cluster_id} not found on endpoint {endpoint_id} while" - f" writing attribute {attribute} with value {value}" - ) from exc - - try: - response = await cluster.write_attributes( - {attribute: value}, manufacturer=manufacturer - ) - except zigpy.exceptions.ZigbeeException as exc: - raise HomeAssistantError( - f"Failed to set attribute: " - f"{ATTR_VALUE}: {value} " - f"{ATTR_ATTRIBUTE}: {attribute} " - f"{ATTR_CLUSTER_ID}: {cluster_id} " - f"{ATTR_ENDPOINT_ID}: {endpoint_id}" - ) from exc - - self.debug( - "set: %s for attr: %s to cluster: %s for ept: %s - res: %s", - value, - attribute, - cluster_id, - endpoint_id, - response, - ) - return response - - async def issue_cluster_command( - self, - endpoint_id: int, - cluster_id: int, - command: int, - command_type: str, - args: list | None, - params: dict[str, Any] | None, - cluster_type: str = CLUSTER_TYPE_IN, - manufacturer: int | None = None, - ) -> None: - """Issue a command against specified zigbee cluster on this device.""" - try: - cluster: Cluster = self.async_get_cluster( - endpoint_id, cluster_id, cluster_type - ) - except KeyError as exc: - raise ValueError( - f"Cluster {cluster_id} not found on endpoint {endpoint_id} while" - f" issuing command {command} with args {args}" - ) from exc - commands: dict[int, ZCLCommandDef] = ( - cluster.server_commands - if command_type == CLUSTER_COMMAND_SERVER - else cluster.client_commands - ) - if args is not None: - self.warning( - ( - "args [%s] are deprecated and should be passed with the params key." - " The parameter names are: %s" - ), - args, - [field.name for field in commands[command].schema.fields], - ) - response = await getattr(cluster, commands[command].name)(*args) - else: - assert params is not None - response = await getattr(cluster, commands[command].name)( - **convert_to_zcl_values(params, commands[command].schema) - ) - self.debug( - "Issued cluster command: %s %s %s %s %s %s %s %s", - f"{ATTR_CLUSTER_ID}: [{cluster_id}]", - f"{ATTR_CLUSTER_TYPE}: [{cluster_type}]", - f"{ATTR_ENDPOINT_ID}: [{endpoint_id}]", - f"{ATTR_COMMAND}: [{command}]", - f"{ATTR_COMMAND_TYPE}: [{command_type}]", - f"{ATTR_ARGS}: [{args}]", - f"{ATTR_PARAMS}: [{params}]", - f"{ATTR_MANUFACTURER}: [{manufacturer}]", - ) - if response is None: - return # client commands don't return a response - if isinstance(response, Exception): - raise HomeAssistantError("Failed to issue cluster command") from response - if response[1] is not ZclStatus.SUCCESS: - raise HomeAssistantError( - f"Failed to issue cluster command with status: {response[1]}" - ) - - async def async_add_to_group(self, group_id: int) -> None: - """Add this device to the provided zigbee group.""" - try: - # A group name is required. However, the spec also explicitly states that - # the group name can be ignored by the receiving device if a device cannot - # store it, so we cannot rely on it existing after being written. This is - # only done to make the ZCL command valid. - await self._zigpy_device.add_to_group(group_id, name=f"0x{group_id:04X}") - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "Failed to add device '%s' to group: 0x%04x ex: %s", - self._zigpy_device.ieee, - group_id, - str(ex), - ) - - async def async_remove_from_group(self, group_id: int) -> None: - """Remove this device from the provided zigbee group.""" - try: - await self._zigpy_device.remove_from_group(group_id) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "Failed to remove device '%s' from group: 0x%04x ex: %s", - self._zigpy_device.ieee, - group_id, - str(ex), - ) - - async def async_add_endpoint_to_group( - self, endpoint_id: int, group_id: int - ) -> None: - """Add the device endpoint to the provided zigbee group.""" - try: - await self._zigpy_device.endpoints[endpoint_id].add_to_group( - group_id, name=f"0x{group_id:04X}" - ) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "Failed to add endpoint: %s for device: '%s' to group: 0x%04x ex: %s", - endpoint_id, - self._zigpy_device.ieee, - group_id, - str(ex), - ) - - async def async_remove_endpoint_from_group( - self, endpoint_id: int, group_id: int - ) -> None: - """Remove the device endpoint from the provided zigbee group.""" - try: - await self._zigpy_device.endpoints[endpoint_id].remove_from_group(group_id) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - ( - "Failed to remove endpoint: %s for device '%s' from group: 0x%04x" - " ex: %s" - ), - endpoint_id, - self._zigpy_device.ieee, - group_id, - str(ex), - ) - - async def async_bind_to_group( - self, group_id: int, cluster_bindings: list[ClusterBinding] - ) -> None: - """Directly bind this device to a group for the given clusters.""" - await self._async_group_binding_operation( - group_id, zdo_types.ZDOCmd.Bind_req, cluster_bindings - ) - - async def async_unbind_from_group( - self, group_id: int, cluster_bindings: list[ClusterBinding] - ) -> None: - """Unbind this device from a group for the given clusters.""" - await self._async_group_binding_operation( - group_id, zdo_types.ZDOCmd.Unbind_req, cluster_bindings - ) - - async def _async_group_binding_operation( - self, - group_id: int, - operation: zdo_types.ZDOCmd, - cluster_bindings: list[ClusterBinding], - ) -> None: - """Create or remove a direct zigbee binding between a device and a group.""" - - zdo = self._zigpy_device.zdo - op_msg = "0x%04x: %s %s, ep: %s, cluster: %s to group: 0x%04x" - destination_address = zdo_types.MultiAddress() - destination_address.addrmode = types.uint8_t(1) - destination_address.nwk = types.uint16_t(group_id) - - tasks = [] - - for cluster_binding in cluster_bindings: - if cluster_binding.endpoint_id == 0: - continue - if ( - cluster_binding.id - in self._zigpy_device.endpoints[ - cluster_binding.endpoint_id - ].out_clusters - ): - op_params = ( - self.nwk, - operation.name, - str(self.ieee), - cluster_binding.endpoint_id, - cluster_binding.id, - group_id, - ) - zdo.debug(f"processing {op_msg}", *op_params) - tasks.append( - ( - zdo.request( - operation, - self.ieee, - cluster_binding.endpoint_id, - cluster_binding.id, - destination_address, - ), - op_msg, - op_params, - ) - ) - res = await asyncio.gather(*(t[0] for t in tasks), return_exceptions=True) - for outcome, log_msg in zip(res, tasks, strict=False): - if isinstance(outcome, Exception): - fmt = f"{log_msg[1]} failed: %s" - else: - fmt = f"{log_msg[1]} completed: %s" - zdo.debug(fmt, *(log_msg[2] + (outcome,))) - - def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None: - """Log a message.""" - msg = f"[%s](%s): {msg}" - args = (self.nwk, self.model, *args) - _LOGGER.log(level, msg, *args, **kwargs) diff --git a/homeassistant/components/zha/core/discovery.py b/homeassistant/components/zha/core/discovery.py deleted file mode 100644 index 3c342d14060..00000000000 --- a/homeassistant/components/zha/core/discovery.py +++ /dev/null @@ -1,661 +0,0 @@ -"""Device discovery functions for Zigbee Home Automation.""" - -from __future__ import annotations - -from collections import Counter -from collections.abc import Callable -import logging -from typing import TYPE_CHECKING, Any, cast - -from slugify import slugify -from zigpy.quirks.v2 import ( - BinarySensorMetadata, - CustomDeviceV2, - EntityType, - NumberMetadata, - SwitchMetadata, - WriteAttributeButtonMetadata, - ZCLCommandButtonMetadata, - ZCLEnumMetadata, - ZCLSensorMetadata, -) -from zigpy.state import State -from zigpy.zcl import ClusterType -from zigpy.zcl.clusters.general import Ota - -from homeassistant.const import CONF_TYPE, Platform -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.entity_registry import async_entries_for_device -from homeassistant.helpers.typing import ConfigType - -from .. import ( # noqa: F401 - alarm_control_panel, - binary_sensor, - button, - climate, - cover, - device_tracker, - fan, - light, - lock, - number, - select, - sensor, - siren, - switch, - update, -) -from . import const as zha_const, registries as zha_regs - -# importing cluster handlers updates registries -from .cluster_handlers import ( # noqa: F401 - ClusterHandler, - closures, - general, - homeautomation, - hvac, - lighting, - lightlink, - manufacturerspecific, - measurement, - protocol, - security, - smartenergy, -) -from .helpers import get_zha_data, get_zha_gateway - -if TYPE_CHECKING: - from ..entity import ZhaEntity - from .device import ZHADevice - from .endpoint import Endpoint - from .group import ZHAGroup - -_LOGGER = logging.getLogger(__name__) - - -QUIRKS_ENTITY_META_TO_ENTITY_CLASS = { - ( - Platform.BUTTON, - WriteAttributeButtonMetadata, - EntityType.CONFIG, - ): button.ZHAAttributeButton, - ( - Platform.BUTTON, - WriteAttributeButtonMetadata, - EntityType.STANDARD, - ): button.ZHAAttributeButton, - (Platform.BUTTON, ZCLCommandButtonMetadata, EntityType.CONFIG): button.ZHAButton, - ( - Platform.BUTTON, - ZCLCommandButtonMetadata, - EntityType.DIAGNOSTIC, - ): button.ZHAButton, - (Platform.BUTTON, ZCLCommandButtonMetadata, EntityType.STANDARD): button.ZHAButton, - ( - Platform.BINARY_SENSOR, - BinarySensorMetadata, - EntityType.CONFIG, - ): binary_sensor.BinarySensor, - ( - Platform.BINARY_SENSOR, - BinarySensorMetadata, - EntityType.DIAGNOSTIC, - ): binary_sensor.BinarySensor, - ( - Platform.BINARY_SENSOR, - BinarySensorMetadata, - EntityType.STANDARD, - ): binary_sensor.BinarySensor, - (Platform.SENSOR, ZCLEnumMetadata, EntityType.DIAGNOSTIC): sensor.EnumSensor, - (Platform.SENSOR, ZCLEnumMetadata, EntityType.STANDARD): sensor.EnumSensor, - (Platform.SENSOR, ZCLSensorMetadata, EntityType.DIAGNOSTIC): sensor.Sensor, - (Platform.SENSOR, ZCLSensorMetadata, EntityType.STANDARD): sensor.Sensor, - (Platform.SELECT, ZCLEnumMetadata, EntityType.CONFIG): select.ZCLEnumSelectEntity, - (Platform.SELECT, ZCLEnumMetadata, EntityType.STANDARD): select.ZCLEnumSelectEntity, - ( - Platform.SELECT, - ZCLEnumMetadata, - EntityType.DIAGNOSTIC, - ): select.ZCLEnumSelectEntity, - ( - Platform.NUMBER, - NumberMetadata, - EntityType.CONFIG, - ): number.ZHANumberConfigurationEntity, - (Platform.NUMBER, NumberMetadata, EntityType.DIAGNOSTIC): number.ZhaNumber, - (Platform.NUMBER, NumberMetadata, EntityType.STANDARD): number.ZhaNumber, - ( - Platform.SWITCH, - SwitchMetadata, - EntityType.CONFIG, - ): switch.ZHASwitchConfigurationEntity, - (Platform.SWITCH, SwitchMetadata, EntityType.STANDARD): switch.Switch, -} - - -@callback -async def async_add_entities( - _async_add_entities: AddEntitiesCallback, - entities: list[ - tuple[ - type[ZhaEntity], - tuple[str, ZHADevice, list[ClusterHandler]], - dict[str, Any], - ] - ], - **kwargs, -) -> None: - """Add entities helper.""" - if not entities: - return - - to_add = [ - ent_cls.create_entity(*args, **{**kwargs, **kw_args}) - for ent_cls, args, kw_args in entities - ] - entities_to_add = [entity for entity in to_add if entity is not None] - _async_add_entities(entities_to_add, update_before_add=False) - entities.clear() - - -class ProbeEndpoint: - """All discovered cluster handlers and entities of an endpoint.""" - - def __init__(self) -> None: - """Initialize instance.""" - self._device_configs: ConfigType = {} - - @callback - def discover_entities(self, endpoint: Endpoint) -> None: - """Process an endpoint on a zigpy device.""" - _LOGGER.debug( - "Discovering entities for endpoint: %s-%s", - str(endpoint.device.ieee), - endpoint.id, - ) - self.discover_by_device_type(endpoint) - self.discover_multi_entities(endpoint) - self.discover_by_cluster_id(endpoint) - self.discover_multi_entities(endpoint, config_diagnostic_entities=True) - zha_regs.ZHA_ENTITIES.clean_up() - - @callback - def discover_device_entities(self, device: ZHADevice) -> None: - """Discover entities for a ZHA device.""" - _LOGGER.debug( - "Discovering entities for device: %s-%s", - str(device.ieee), - device.name, - ) - - if device.is_coordinator: - self.discover_coordinator_device_entities(device) - return - - self.discover_quirks_v2_entities(device) - zha_regs.ZHA_ENTITIES.clean_up() - - @callback - def discover_quirks_v2_entities(self, device: ZHADevice) -> None: - """Discover entities for a ZHA device exposed by quirks v2.""" - _LOGGER.debug( - "Attempting to discover quirks v2 entities for device: %s-%s", - str(device.ieee), - device.name, - ) - - if not isinstance(device.device, CustomDeviceV2): - _LOGGER.debug( - "Device: %s-%s is not a quirks v2 device - skipping " - "discover_quirks_v2_entities", - str(device.ieee), - device.name, - ) - return - - zigpy_device: CustomDeviceV2 = device.device - - if not zigpy_device.exposes_metadata: - _LOGGER.debug( - "Device: %s-%s does not expose any quirks v2 entities", - str(device.ieee), - device.name, - ) - return - - for ( - cluster_details, - entity_metadata_list, - ) in zigpy_device.exposes_metadata.items(): - endpoint_id, cluster_id, cluster_type = cluster_details - - if endpoint_id not in device.endpoints: - _LOGGER.warning( - "Device: %s-%s does not have an endpoint with id: %s - unable to " - "create entity with cluster details: %s", - str(device.ieee), - device.name, - endpoint_id, - cluster_details, - ) - continue - - endpoint: Endpoint = device.endpoints[endpoint_id] - cluster = ( - endpoint.zigpy_endpoint.in_clusters.get(cluster_id) - if cluster_type is ClusterType.Server - else endpoint.zigpy_endpoint.out_clusters.get(cluster_id) - ) - - if cluster is None: - _LOGGER.warning( - "Device: %s-%s does not have a cluster with id: %s - " - "unable to create entity with cluster details: %s", - str(device.ieee), - device.name, - cluster_id, - cluster_details, - ) - continue - - cluster_handler_id = f"{endpoint.id}:0x{cluster.cluster_id:04x}" - cluster_handler = ( - endpoint.all_cluster_handlers.get(cluster_handler_id) - if cluster_type is ClusterType.Server - else endpoint.client_cluster_handlers.get(cluster_handler_id) - ) - assert cluster_handler - - for entity_metadata in entity_metadata_list: - platform = Platform(entity_metadata.entity_platform.value) - metadata_type = type(entity_metadata) - entity_class = QUIRKS_ENTITY_META_TO_ENTITY_CLASS.get( - (platform, metadata_type, entity_metadata.entity_type) - ) - - if entity_class is None: - _LOGGER.warning( - "Device: %s-%s has an entity with details: %s that does not" - " have an entity class mapping - unable to create entity", - str(device.ieee), - device.name, - { - zha_const.CLUSTER_DETAILS: cluster_details, - zha_const.ENTITY_METADATA: entity_metadata, - }, - ) - continue - - # automatically add the attribute to ZCL_INIT_ATTRS for the cluster - # handler if it is not already in the list - if ( - hasattr(entity_metadata, "attribute_name") - and entity_metadata.attribute_name - not in cluster_handler.ZCL_INIT_ATTRS - ): - init_attrs = cluster_handler.ZCL_INIT_ATTRS.copy() - init_attrs[entity_metadata.attribute_name] = ( - entity_metadata.attribute_initialized_from_cache - ) - cluster_handler.__dict__[zha_const.ZCL_INIT_ATTRS] = init_attrs - - endpoint.async_new_entity( - platform, - entity_class, - endpoint.unique_id, - [cluster_handler], - entity_metadata=entity_metadata, - ) - - _LOGGER.debug( - "'%s' platform -> '%s' using %s", - platform, - entity_class.__name__, - [cluster_handler.name], - ) - - @callback - def discover_coordinator_device_entities(self, device: ZHADevice) -> None: - """Discover entities for the coordinator device.""" - _LOGGER.debug( - "Discovering entities for coordinator device: %s-%s", - str(device.ieee), - device.name, - ) - state: State = device.gateway.application_controller.state - platforms: dict[Platform, list] = get_zha_data(device.hass).platforms - - @callback - def process_counters(counter_groups: str) -> None: - for counter_group, counters in getattr(state, counter_groups).items(): - for counter in counters: - platforms[Platform.SENSOR].append( - ( - sensor.DeviceCounterSensor, - ( - f"{slugify(str(device.ieee))}_{counter_groups}_{counter_group}_{counter}", - device, - counter_groups, - counter_group, - counter, - ), - {}, - ) - ) - _LOGGER.debug( - "'%s' platform -> '%s' using %s", - Platform.SENSOR, - sensor.DeviceCounterSensor.__name__, - f"counter groups[{counter_groups}] counter group[{counter_group}] counter[{counter}]", - ) - - process_counters("counters") - process_counters("broadcast_counters") - process_counters("device_counters") - process_counters("group_counters") - - @callback - def discover_by_device_type(self, endpoint: Endpoint) -> None: - """Process an endpoint on a zigpy device.""" - - unique_id = endpoint.unique_id - - platform: str | None = self._device_configs.get(unique_id, {}).get(CONF_TYPE) - if platform is None: - ep_profile_id = endpoint.zigpy_endpoint.profile_id - ep_device_type = endpoint.zigpy_endpoint.device_type - platform = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type) - - if platform and platform in zha_const.PLATFORMS: - platform = cast(Platform, platform) - - cluster_handlers = endpoint.unclaimed_cluster_handlers() - platform_entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity( - platform, - endpoint.device.manufacturer, - endpoint.device.model, - cluster_handlers, - endpoint.device.quirk_id, - ) - if platform_entity_class is None: - return - endpoint.claim_cluster_handlers(claimed) - endpoint.async_new_entity( - platform, platform_entity_class, unique_id, claimed - ) - - @callback - def discover_by_cluster_id(self, endpoint: Endpoint) -> None: - """Process an endpoint on a zigpy device.""" - - items = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.items() - single_input_clusters = { - cluster_class: match - for cluster_class, match in items - if not isinstance(cluster_class, int) - } - remaining_cluster_handlers = endpoint.unclaimed_cluster_handlers() - for cluster_handler in remaining_cluster_handlers: - if ( - cluster_handler.cluster.cluster_id - in zha_regs.CLUSTER_HANDLER_ONLY_CLUSTERS - ): - endpoint.claim_cluster_handlers([cluster_handler]) - continue - - platform = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.get( - cluster_handler.cluster.cluster_id - ) - if platform is None: - for cluster_class, match in single_input_clusters.items(): - if isinstance(cluster_handler.cluster, cluster_class): - platform = match - break - - self.probe_single_cluster(platform, cluster_handler, endpoint) - - # until we can get rid of registries - self.handle_on_off_output_cluster_exception(endpoint) - - @staticmethod - def probe_single_cluster( - platform: Platform | None, - cluster_handler: ClusterHandler, - endpoint: Endpoint, - ) -> None: - """Probe specified cluster for specific component.""" - if platform is None or platform not in zha_const.PLATFORMS: - return - cluster_handler_list = [cluster_handler] - unique_id = f"{endpoint.unique_id}-{cluster_handler.cluster.cluster_id}" - - entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity( - platform, - endpoint.device.manufacturer, - endpoint.device.model, - cluster_handler_list, - endpoint.device.quirk_id, - ) - if entity_class is None: - return - endpoint.claim_cluster_handlers(claimed) - endpoint.async_new_entity(platform, entity_class, unique_id, claimed) - - def handle_on_off_output_cluster_exception(self, endpoint: Endpoint) -> None: - """Process output clusters of the endpoint.""" - - profile_id = endpoint.zigpy_endpoint.profile_id - device_type = endpoint.zigpy_endpoint.device_type - if device_type in zha_regs.REMOTE_DEVICE_TYPES.get(profile_id, []): - return - - for cluster_id, cluster in endpoint.zigpy_endpoint.out_clusters.items(): - platform = zha_regs.SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS.get( - cluster.cluster_id - ) - if platform is None: - continue - - cluster_handler_classes = zha_regs.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id, {None: ClusterHandler} - ) - - quirk_id = ( - endpoint.device.quirk_id - if endpoint.device.quirk_id in cluster_handler_classes - else None - ) - - cluster_handler_class = cluster_handler_classes.get( - quirk_id, ClusterHandler - ) - - cluster_handler = cluster_handler_class(cluster, endpoint) - self.probe_single_cluster(platform, cluster_handler, endpoint) - - @staticmethod - @callback - def discover_multi_entities( - endpoint: Endpoint, - config_diagnostic_entities: bool = False, - ) -> None: - """Process an endpoint on and discover multiple entities.""" - - ep_profile_id = endpoint.zigpy_endpoint.profile_id - ep_device_type = endpoint.zigpy_endpoint.device_type - cmpt_by_dev_type = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type) - - if config_diagnostic_entities: - cluster_handlers = list(endpoint.all_cluster_handlers.values()) - ota_handler_id = f"{endpoint.id}:0x{Ota.cluster_id:04x}" - if ota_handler_id in endpoint.client_cluster_handlers: - cluster_handlers.append( - endpoint.client_cluster_handlers[ota_handler_id] - ) - matches, claimed = zha_regs.ZHA_ENTITIES.get_config_diagnostic_entity( - endpoint.device.manufacturer, - endpoint.device.model, - cluster_handlers, - endpoint.device.quirk_id, - ) - else: - matches, claimed = zha_regs.ZHA_ENTITIES.get_multi_entity( - endpoint.device.manufacturer, - endpoint.device.model, - endpoint.unclaimed_cluster_handlers(), - endpoint.device.quirk_id, - ) - - endpoint.claim_cluster_handlers(claimed) - for platform, ent_n_handler_list in matches.items(): - for entity_and_handler in ent_n_handler_list: - _LOGGER.debug( - "'%s' platform -> '%s' using %s", - platform, - entity_and_handler.entity_class.__name__, - [ch.name for ch in entity_and_handler.claimed_cluster_handlers], - ) - for platform, ent_n_handler_list in matches.items(): - for entity_and_handler in ent_n_handler_list: - if platform == cmpt_by_dev_type: - # for well known device types, - # like thermostats we'll take only 1st class - endpoint.async_new_entity( - platform, - entity_and_handler.entity_class, - endpoint.unique_id, - entity_and_handler.claimed_cluster_handlers, - ) - break - first_ch = entity_and_handler.claimed_cluster_handlers[0] - endpoint.async_new_entity( - platform, - entity_and_handler.entity_class, - f"{endpoint.unique_id}-{first_ch.cluster.cluster_id}", - entity_and_handler.claimed_cluster_handlers, - ) - - def initialize(self, hass: HomeAssistant) -> None: - """Update device overrides config.""" - zha_config = get_zha_data(hass).yaml_config - if overrides := zha_config.get(zha_const.CONF_DEVICE_CONFIG): - self._device_configs.update(overrides) - - -class GroupProbe: - """Determine the appropriate component for a group.""" - - _hass: HomeAssistant - - def __init__(self) -> None: - """Initialize instance.""" - self._unsubs: list[Callable[[], None]] = [] - - def initialize(self, hass: HomeAssistant) -> None: - """Initialize the group probe.""" - self._hass = hass - self._unsubs.append( - async_dispatcher_connect( - hass, zha_const.SIGNAL_GROUP_ENTITY_REMOVED, self._reprobe_group - ) - ) - - def cleanup(self) -> None: - """Clean up on when ZHA shuts down.""" - for unsub in self._unsubs[:]: - unsub() - self._unsubs.remove(unsub) - - @callback - def _reprobe_group(self, group_id: int) -> None: - """Reprobe a group for entities after its members change.""" - zha_gateway = get_zha_gateway(self._hass) - if (zha_group := zha_gateway.groups.get(group_id)) is None: - return - self.discover_group_entities(zha_group) - - @callback - def discover_group_entities(self, group: ZHAGroup) -> None: - """Process a group and create any entities that are needed.""" - # only create a group entity if there are 2 or more members in a group - if len(group.members) < 2: - _LOGGER.debug( - "Group: %s:0x%04x has less than 2 members - skipping entity discovery", - group.name, - group.group_id, - ) - return - - entity_domains = GroupProbe.determine_entity_domains(self._hass, group) - - if not entity_domains: - return - - zha_data = get_zha_data(self._hass) - zha_gateway = get_zha_gateway(self._hass) - - for domain in entity_domains: - entity_class = zha_regs.ZHA_ENTITIES.get_group_entity(domain) - if entity_class is None: - continue - zha_data.platforms[domain].append( - ( - entity_class, - ( - group.get_domain_entity_ids(domain), - f"{domain}_zha_group_0x{group.group_id:04x}", - group.group_id, - zha_gateway.coordinator_zha_device, - ), - {}, - ) - ) - async_dispatcher_send(self._hass, zha_const.SIGNAL_ADD_ENTITIES) - - @staticmethod - def determine_entity_domains( - hass: HomeAssistant, group: ZHAGroup - ) -> list[Platform]: - """Determine the entity domains for this group.""" - entity_registry = er.async_get(hass) - - entity_domains: list[Platform] = [] - all_domain_occurrences: list[Platform] = [] - - for member in group.members: - if member.device.is_coordinator: - continue - entities = async_entries_for_device( - entity_registry, - member.device.device_id, - include_disabled_entities=True, - ) - all_domain_occurrences.extend( - [ - cast(Platform, entity.domain) - for entity in entities - if entity.domain in zha_regs.GROUP_ENTITY_DOMAINS - ] - ) - if not all_domain_occurrences: - return entity_domains - # get all domains we care about if there are more than 2 entities of this domain - counts = Counter(all_domain_occurrences) - entity_domains = [domain[0] for domain in counts.items() if domain[1] >= 2] - _LOGGER.debug( - "The entity domains are: %s for group: %s:0x%04x", - entity_domains, - group.name, - group.group_id, - ) - return entity_domains - - -PROBE = ProbeEndpoint() -GROUP_PROBE = GroupProbe() diff --git a/homeassistant/components/zha/core/endpoint.py b/homeassistant/components/zha/core/endpoint.py deleted file mode 100644 index 32483a3bc53..00000000000 --- a/homeassistant/components/zha/core/endpoint.py +++ /dev/null @@ -1,253 +0,0 @@ -"""Representation of a Zigbee endpoint for zha.""" - -from __future__ import annotations - -import asyncio -from collections.abc import Awaitable, Callable -import functools -import logging -from typing import TYPE_CHECKING, Any, Final - -from homeassistant.const import Platform -from homeassistant.core import callback -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.util.async_ import gather_with_limited_concurrency - -from . import const, discovery, registries -from .cluster_handlers import ClusterHandler -from .helpers import get_zha_data - -if TYPE_CHECKING: - from zigpy import Endpoint as ZigpyEndpoint - - from .cluster_handlers import ClientClusterHandler - from .device import ZHADevice - -ATTR_DEVICE_TYPE: Final[str] = "device_type" -ATTR_PROFILE_ID: Final[str] = "profile_id" -ATTR_IN_CLUSTERS: Final[str] = "input_clusters" -ATTR_OUT_CLUSTERS: Final[str] = "output_clusters" - -_LOGGER = logging.getLogger(__name__) - - -class Endpoint: - """Endpoint for a zha device.""" - - def __init__(self, zigpy_endpoint: ZigpyEndpoint, device: ZHADevice) -> None: - """Initialize instance.""" - assert zigpy_endpoint is not None - assert device is not None - self._zigpy_endpoint: ZigpyEndpoint = zigpy_endpoint - self._device: ZHADevice = device - self._all_cluster_handlers: dict[str, ClusterHandler] = {} - self._claimed_cluster_handlers: dict[str, ClusterHandler] = {} - self._client_cluster_handlers: dict[str, ClientClusterHandler] = {} - self._unique_id: str = f"{device.ieee!s}-{zigpy_endpoint.endpoint_id}" - - @property - def device(self) -> ZHADevice: - """Return the device this endpoint belongs to.""" - return self._device - - @property - def all_cluster_handlers(self) -> dict[str, ClusterHandler]: - """All server cluster handlers of an endpoint.""" - return self._all_cluster_handlers - - @property - def claimed_cluster_handlers(self) -> dict[str, ClusterHandler]: - """Cluster handlers in use.""" - return self._claimed_cluster_handlers - - @property - def client_cluster_handlers(self) -> dict[str, ClientClusterHandler]: - """Return a dict of client cluster handlers.""" - return self._client_cluster_handlers - - @property - def zigpy_endpoint(self) -> ZigpyEndpoint: - """Return endpoint of zigpy device.""" - return self._zigpy_endpoint - - @property - def id(self) -> int: - """Return endpoint id.""" - return self._zigpy_endpoint.endpoint_id - - @property - def unique_id(self) -> str: - """Return the unique id for this endpoint.""" - return self._unique_id - - @property - def zigbee_signature(self) -> tuple[int, dict[str, Any]]: - """Get the zigbee signature for the endpoint this pool represents.""" - return ( - self.id, - { - ATTR_PROFILE_ID: f"0x{self._zigpy_endpoint.profile_id:04x}" - if self._zigpy_endpoint.profile_id is not None - else "", - ATTR_DEVICE_TYPE: f"0x{self._zigpy_endpoint.device_type:04x}" - if self._zigpy_endpoint.device_type is not None - else "", - ATTR_IN_CLUSTERS: [ - f"0x{cluster_id:04x}" - for cluster_id in sorted(self._zigpy_endpoint.in_clusters) - ], - ATTR_OUT_CLUSTERS: [ - f"0x{cluster_id:04x}" - for cluster_id in sorted(self._zigpy_endpoint.out_clusters) - ], - }, - ) - - @classmethod - def new(cls, zigpy_endpoint: ZigpyEndpoint, device: ZHADevice) -> Endpoint: - """Create new endpoint and populate cluster handlers.""" - endpoint = cls(zigpy_endpoint, device) - endpoint.add_all_cluster_handlers() - endpoint.add_client_cluster_handlers() - if not device.is_coordinator: - discovery.PROBE.discover_entities(endpoint) - return endpoint - - def add_all_cluster_handlers(self) -> None: - """Create and add cluster handlers for all input clusters.""" - for cluster_id, cluster in self.zigpy_endpoint.in_clusters.items(): - cluster_handler_classes = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id, {None: ClusterHandler} - ) - quirk_id = ( - self.device.quirk_id - if self.device.quirk_id in cluster_handler_classes - else None - ) - cluster_handler_class = cluster_handler_classes.get( - quirk_id, ClusterHandler - ) - - # Allow cluster handler to filter out bad matches - if not cluster_handler_class.matches(cluster, self): - cluster_handler_class = ClusterHandler - - _LOGGER.debug( - "Creating cluster handler for cluster id: %s class: %s", - cluster_id, - cluster_handler_class, - ) - - try: - cluster_handler = cluster_handler_class(cluster, self) - except KeyError as err: - _LOGGER.warning( - "Cluster handler %s for cluster %s on endpoint %s is invalid: %s", - cluster_handler_class, - cluster, - self, - err, - ) - continue - - if cluster_handler.name == const.CLUSTER_HANDLER_POWER_CONFIGURATION: - self._device.power_configuration_ch = cluster_handler - elif cluster_handler.name == const.CLUSTER_HANDLER_IDENTIFY: - self._device.identify_ch = cluster_handler - elif cluster_handler.name == const.CLUSTER_HANDLER_BASIC: - self._device.basic_ch = cluster_handler - self._all_cluster_handlers[cluster_handler.id] = cluster_handler - - def add_client_cluster_handlers(self) -> None: - """Create client cluster handlers for all output clusters if in the registry.""" - for ( - cluster_id, - cluster_handler_class, - ) in registries.CLIENT_CLUSTER_HANDLER_REGISTRY.items(): - cluster = self.zigpy_endpoint.out_clusters.get(cluster_id) - if cluster is not None: - cluster_handler = cluster_handler_class(cluster, self) - self.client_cluster_handlers[cluster_handler.id] = cluster_handler - - async def async_initialize(self, from_cache: bool = False) -> None: - """Initialize claimed cluster handlers.""" - await self._execute_handler_tasks( - "async_initialize", from_cache, max_concurrency=1 - ) - - async def async_configure(self) -> None: - """Configure claimed cluster handlers.""" - await self._execute_handler_tasks("async_configure") - - async def _execute_handler_tasks( - self, func_name: str, *args: Any, max_concurrency: int | None = None - ) -> None: - """Add a throttled cluster handler task and swallow exceptions.""" - cluster_handlers = [ - *self.claimed_cluster_handlers.values(), - *self.client_cluster_handlers.values(), - ] - tasks = [getattr(ch, func_name)(*args) for ch in cluster_handlers] - - gather: Callable[..., Awaitable] - - if max_concurrency is None: - gather = asyncio.gather - else: - gather = functools.partial(gather_with_limited_concurrency, max_concurrency) - - results = await gather(*tasks, return_exceptions=True) - for cluster_handler, outcome in zip(cluster_handlers, results, strict=False): - if isinstance(outcome, Exception): - cluster_handler.debug( - "'%s' stage failed: %s", func_name, str(outcome), exc_info=outcome - ) - else: - cluster_handler.debug("'%s' stage succeeded", func_name) - - def async_new_entity( - self, - platform: Platform, - entity_class: type, - unique_id: str, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Create a new entity.""" - from .device import DeviceStatus # pylint: disable=import-outside-toplevel - - if self.device.status == DeviceStatus.INITIALIZED: - return - - zha_data = get_zha_data(self.device.hass) - zha_data.platforms[platform].append( - (entity_class, (unique_id, self.device, cluster_handlers), kwargs or {}) - ) - - @callback - def async_send_signal(self, signal: str, *args: Any) -> None: - """Send a signal through hass dispatcher.""" - async_dispatcher_send(self.device.hass, signal, *args) - - def send_event(self, signal: dict[str, Any]) -> None: - """Broadcast an event from this endpoint.""" - self.device.zha_send_event( - { - const.ATTR_UNIQUE_ID: self.unique_id, - const.ATTR_ENDPOINT_ID: self.id, - **signal, - } - ) - - def claim_cluster_handlers(self, cluster_handlers: list[ClusterHandler]) -> None: - """Claim cluster handlers.""" - self.claimed_cluster_handlers.update({ch.id: ch for ch in cluster_handlers}) - - def unclaimed_cluster_handlers(self) -> list[ClusterHandler]: - """Return a list of available (unclaimed) cluster handlers.""" - claimed = set(self.claimed_cluster_handlers) - available = set(self.all_cluster_handlers) - return [ - self.all_cluster_handlers[cluster_id] - for cluster_id in (available - claimed) - ] diff --git a/homeassistant/components/zha/core/gateway.py b/homeassistant/components/zha/core/gateway.py deleted file mode 100644 index 8b8826e2648..00000000000 --- a/homeassistant/components/zha/core/gateway.py +++ /dev/null @@ -1,882 +0,0 @@ -"""Virtual gateway for Zigbee Home Automation.""" - -from __future__ import annotations - -import asyncio -import collections -from collections.abc import Callable -from contextlib import suppress -from datetime import timedelta -from enum import Enum -import itertools -import logging -import re -import time -from typing import TYPE_CHECKING, Any, NamedTuple, Self, cast - -from zigpy.application import ControllerApplication -from zigpy.config import ( - CONF_DATABASE, - CONF_DEVICE, - CONF_DEVICE_PATH, - CONF_NWK, - CONF_NWK_CHANNEL, - CONF_NWK_VALIDATE_SETTINGS, -) -import zigpy.device -import zigpy.endpoint -import zigpy.group -from zigpy.state import State -from zigpy.types.named import EUI64 - -from homeassistant import __path__ as HOMEASSISTANT_PATH -from homeassistant.components.system_log import LogEntry -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.typing import ConfigType -from homeassistant.util.async_ import gather_with_limited_concurrency - -from . import discovery -from .const import ( - ATTR_IEEE, - ATTR_MANUFACTURER, - ATTR_MODEL, - ATTR_NWK, - ATTR_SIGNATURE, - ATTR_TYPE, - CONF_RADIO_TYPE, - CONF_USE_THREAD, - CONF_ZIGPY, - DATA_ZHA, - DEBUG_COMP_BELLOWS, - DEBUG_COMP_ZHA, - DEBUG_COMP_ZIGPY, - DEBUG_COMP_ZIGPY_DECONZ, - DEBUG_COMP_ZIGPY_XBEE, - DEBUG_COMP_ZIGPY_ZIGATE, - DEBUG_COMP_ZIGPY_ZNP, - DEBUG_LEVEL_CURRENT, - DEBUG_LEVEL_ORIGINAL, - DEBUG_LEVELS, - DEBUG_RELAY_LOGGERS, - DEFAULT_DATABASE_NAME, - DEVICE_PAIRING_STATUS, - DOMAIN, - SIGNAL_ADD_ENTITIES, - SIGNAL_GROUP_MEMBERSHIP_CHANGE, - SIGNAL_REMOVE, - UNKNOWN_MANUFACTURER, - UNKNOWN_MODEL, - ZHA_GW_MSG, - ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO, - ZHA_GW_MSG_DEVICE_JOINED, - ZHA_GW_MSG_DEVICE_REMOVED, - ZHA_GW_MSG_GROUP_ADDED, - ZHA_GW_MSG_GROUP_INFO, - ZHA_GW_MSG_GROUP_MEMBER_ADDED, - ZHA_GW_MSG_GROUP_MEMBER_REMOVED, - ZHA_GW_MSG_GROUP_REMOVED, - ZHA_GW_MSG_LOG_ENTRY, - ZHA_GW_MSG_LOG_OUTPUT, - ZHA_GW_MSG_RAW_INIT, - RadioType, -) -from .device import DeviceStatus, ZHADevice -from .group import GroupMember, ZHAGroup -from .helpers import get_zha_data -from .registries import GROUP_ENTITY_DOMAINS - -if TYPE_CHECKING: - from logging import Filter, LogRecord - - from ..entity import ZhaEntity - from .cluster_handlers import ClusterHandler - - type _LogFilterType = Filter | Callable[[LogRecord], bool] - -_LOGGER = logging.getLogger(__name__) - - -class EntityReference(NamedTuple): - """Describes an entity reference.""" - - reference_id: str - zha_device: ZHADevice - cluster_handlers: dict[str, ClusterHandler] - device_info: DeviceInfo - remove_future: asyncio.Future[Any] - - -class DevicePairingStatus(Enum): - """Status of a device.""" - - PAIRED = 1 - INTERVIEW_COMPLETE = 2 - CONFIGURED = 3 - INITIALIZED = 4 - - -class ZHAGateway: - """Gateway that handles events that happen on the ZHA Zigbee network.""" - - def __init__( - self, hass: HomeAssistant, config: ConfigType, config_entry: ConfigEntry - ) -> None: - """Initialize the gateway.""" - self.hass = hass - self._config = config - self._devices: dict[EUI64, ZHADevice] = {} - self._groups: dict[int, ZHAGroup] = {} - self.application_controller: ControllerApplication = None - self.coordinator_zha_device: ZHADevice = None # type: ignore[assignment] - self._device_registry: collections.defaultdict[EUI64, list[EntityReference]] = ( - collections.defaultdict(list) - ) - self._log_levels: dict[str, dict[str, int]] = { - DEBUG_LEVEL_ORIGINAL: async_capture_log_levels(), - DEBUG_LEVEL_CURRENT: async_capture_log_levels(), - } - self.debug_enabled = False - self._log_relay_handler = LogRelayHandler(hass, self) - self.config_entry = config_entry - self._unsubs: list[Callable[[], None]] = [] - - self.shutting_down = False - self._reload_task: asyncio.Task | None = None - - def get_application_controller_data(self) -> tuple[ControllerApplication, dict]: - """Get an uninitialized instance of a zigpy `ControllerApplication`.""" - radio_type = RadioType[self.config_entry.data[CONF_RADIO_TYPE]] - - app_config = self._config.get(CONF_ZIGPY, {}) - database = self._config.get( - CONF_DATABASE, - self.hass.config.path(DEFAULT_DATABASE_NAME), - ) - app_config[CONF_DATABASE] = database - app_config[CONF_DEVICE] = self.config_entry.data[CONF_DEVICE] - - if CONF_NWK_VALIDATE_SETTINGS not in app_config: - app_config[CONF_NWK_VALIDATE_SETTINGS] = True - - # The bellows UART thread sometimes propagates a cancellation into the main Core - # event loop, when a connection to a TCP coordinator fails in a specific way - if ( - CONF_USE_THREAD not in app_config - and radio_type is RadioType.ezsp - and app_config[CONF_DEVICE][CONF_DEVICE_PATH].startswith("socket://") - ): - app_config[CONF_USE_THREAD] = False - - # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( - is_multiprotocol_url, - ) - - # Until we have a way to coordinate channels with the Thread half of multi-PAN, - # stick to the old zigpy default of channel 15 instead of dynamically scanning - if ( - is_multiprotocol_url(app_config[CONF_DEVICE][CONF_DEVICE_PATH]) - and app_config.get(CONF_NWK, {}).get(CONF_NWK_CHANNEL) is None - ): - app_config.setdefault(CONF_NWK, {})[CONF_NWK_CHANNEL] = 15 - - return radio_type.controller, radio_type.controller.SCHEMA(app_config) - - @classmethod - async def async_from_config( - cls, hass: HomeAssistant, config: ConfigType, config_entry: ConfigEntry - ) -> Self: - """Create an instance of a gateway from config objects.""" - instance = cls(hass, config, config_entry) - await instance.async_initialize() - return instance - - async def async_initialize(self) -> None: - """Initialize controller and connect radio.""" - discovery.PROBE.initialize(self.hass) - discovery.GROUP_PROBE.initialize(self.hass) - - self.shutting_down = False - - app_controller_cls, app_config = self.get_application_controller_data() - app = await app_controller_cls.new( - config=app_config, - auto_form=False, - start_radio=False, - ) - - try: - await app.startup(auto_form=True) - except Exception: - # Explicitly shut down the controller application on failure - await app.shutdown() - raise - - self.application_controller = app - - zha_data = get_zha_data(self.hass) - zha_data.gateway = self - - self.coordinator_zha_device = self._async_get_or_create_device( - self._find_coordinator_device() - ) - - self.async_load_devices() - self.async_load_groups() - - self.application_controller.add_listener(self) - self.application_controller.groups.add_listener(self) - - def connection_lost(self, exc: Exception) -> None: - """Handle connection lost event.""" - _LOGGER.debug("Connection to the radio was lost: %r", exc) - - if self.shutting_down: - return - - # Ensure we do not queue up multiple resets - if self._reload_task is not None: - _LOGGER.debug("Ignoring reset, one is already running") - return - - self._reload_task = self.hass.async_create_task( - self.hass.config_entries.async_reload(self.config_entry.entry_id) - ) - - def _find_coordinator_device(self) -> zigpy.device.Device: - zigpy_coordinator = self.application_controller.get_device(nwk=0x0000) - - if last_backup := self.application_controller.backups.most_recent_backup(): - with suppress(KeyError): - zigpy_coordinator = self.application_controller.get_device( - ieee=last_backup.node_info.ieee - ) - - return zigpy_coordinator - - @callback - def async_load_devices(self) -> None: - """Restore ZHA devices from zigpy application state.""" - - for zigpy_device in self.application_controller.devices.values(): - zha_device = self._async_get_or_create_device(zigpy_device) - delta_msg = "not known" - if zha_device.last_seen is not None: - delta = round(time.time() - zha_device.last_seen) - delta_msg = f"{timedelta(seconds=delta)!s} ago" - _LOGGER.debug( - ( - "[%s](%s) restored as '%s', last seen: %s," - " consider_unavailable_time: %s seconds" - ), - zha_device.nwk, - zha_device.name, - "available" if zha_device.available else "unavailable", - delta_msg, - zha_device.consider_unavailable_time, - ) - - @callback - def async_load_groups(self) -> None: - """Initialize ZHA groups.""" - - for group_id in self.application_controller.groups: - group = self.application_controller.groups[group_id] - zha_group = self._async_get_or_create_group(group) - # we can do this here because the entities are in the - # entity registry tied to the devices - discovery.GROUP_PROBE.discover_group_entities(zha_group) - - @property - def radio_concurrency(self) -> int: - """Maximum configured radio concurrency.""" - return self.application_controller._concurrent_requests_semaphore.max_value # noqa: SLF001 - - async def async_fetch_updated_state_mains(self) -> None: - """Fetch updated state for mains powered devices.""" - _LOGGER.debug("Fetching current state for mains powered devices") - - now = time.time() - - # Only delay startup to poll mains-powered devices that are online - online_devices = [ - dev - for dev in self.devices.values() - if dev.is_mains_powered - and dev.last_seen is not None - and (now - dev.last_seen) < dev.consider_unavailable_time - ] - - # Prioritize devices that have recently been contacted - online_devices.sort(key=lambda dev: cast(float, dev.last_seen), reverse=True) - - # Make sure that we always leave slots for non-startup requests - max_poll_concurrency = max(1, self.radio_concurrency - 4) - - await gather_with_limited_concurrency( - max_poll_concurrency, - *(dev.async_initialize(from_cache=False) for dev in online_devices), - ) - - _LOGGER.debug("completed fetching current state for mains powered devices") - - async def async_initialize_devices_and_entities(self) -> None: - """Initialize devices and load entities.""" - - _LOGGER.debug("Initializing all devices from Zigpy cache") - await asyncio.gather( - *(dev.async_initialize(from_cache=True) for dev in self.devices.values()) - ) - - async def fetch_updated_state() -> None: - """Fetch updated state for mains powered devices.""" - await self.async_fetch_updated_state_mains() - _LOGGER.debug("Allowing polled requests") - self.hass.data[DATA_ZHA].allow_polling = True - - # background the fetching of state for mains powered devices - self.config_entry.async_create_background_task( - self.hass, fetch_updated_state(), "zha.gateway-fetch_updated_state" - ) - - def device_joined(self, device: zigpy.device.Device) -> None: - """Handle device joined. - - At this point, no information about the device is known other than its - address - """ - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_JOINED, - ZHA_GW_MSG_DEVICE_INFO: { - ATTR_NWK: device.nwk, - ATTR_IEEE: str(device.ieee), - DEVICE_PAIRING_STATUS: DevicePairingStatus.PAIRED.name, - }, - }, - ) - - def raw_device_initialized(self, device: zigpy.device.Device) -> None: - """Handle a device initialization without quirks loaded.""" - manuf = device.manufacturer - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_RAW_INIT, - ZHA_GW_MSG_DEVICE_INFO: { - ATTR_NWK: device.nwk, - ATTR_IEEE: str(device.ieee), - DEVICE_PAIRING_STATUS: DevicePairingStatus.INTERVIEW_COMPLETE.name, - ATTR_MODEL: device.model if device.model else UNKNOWN_MODEL, - ATTR_MANUFACTURER: manuf if manuf else UNKNOWN_MANUFACTURER, - ATTR_SIGNATURE: device.get_signature(), - }, - }, - ) - - def device_initialized(self, device: zigpy.device.Device) -> None: - """Handle device joined and basic information discovered.""" - self.hass.async_create_task(self.async_device_initialized(device)) - - def device_left(self, device: zigpy.device.Device) -> None: - """Handle device leaving the network.""" - self.async_update_device(device, False) - - def group_member_removed( - self, zigpy_group: zigpy.group.Group, endpoint: zigpy.endpoint.Endpoint - ) -> None: - """Handle zigpy group member removed event.""" - # need to handle endpoint correctly on groups - zha_group = self._async_get_or_create_group(zigpy_group) - zha_group.info("group_member_removed - endpoint: %s", endpoint) - self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_MEMBER_REMOVED) - async_dispatcher_send( - self.hass, f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{zigpy_group.group_id:04x}" - ) - - def group_member_added( - self, zigpy_group: zigpy.group.Group, endpoint: zigpy.endpoint.Endpoint - ) -> None: - """Handle zigpy group member added event.""" - # need to handle endpoint correctly on groups - zha_group = self._async_get_or_create_group(zigpy_group) - zha_group.info("group_member_added - endpoint: %s", endpoint) - self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_MEMBER_ADDED) - async_dispatcher_send( - self.hass, f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{zigpy_group.group_id:04x}" - ) - if len(zha_group.members) == 2: - # we need to do this because there wasn't already - # a group entity to remove and re-add - discovery.GROUP_PROBE.discover_group_entities(zha_group) - - def group_added(self, zigpy_group: zigpy.group.Group) -> None: - """Handle zigpy group added event.""" - zha_group = self._async_get_or_create_group(zigpy_group) - zha_group.info("group_added") - # need to dispatch for entity creation here - self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_ADDED) - - def group_removed(self, zigpy_group: zigpy.group.Group) -> None: - """Handle zigpy group removed event.""" - self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_REMOVED) - zha_group = self._groups.pop(zigpy_group.group_id) - zha_group.info("group_removed") - self._cleanup_group_entity_registry_entries(zigpy_group) - - def _send_group_gateway_message( - self, zigpy_group: zigpy.group.Group, gateway_message_type: str - ) -> None: - """Send the gateway event for a zigpy group event.""" - zha_group = self._groups.get(zigpy_group.group_id) - if zha_group is not None: - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: gateway_message_type, - ZHA_GW_MSG_GROUP_INFO: zha_group.group_info, - }, - ) - - async def _async_remove_device( - self, device: ZHADevice, entity_refs: list[EntityReference] | None - ) -> None: - if entity_refs is not None: - remove_tasks: list[asyncio.Future[Any]] = [ - entity_ref.remove_future for entity_ref in entity_refs - ] - if remove_tasks: - await asyncio.wait(remove_tasks) - - device_registry = dr.async_get(self.hass) - reg_device = device_registry.async_get(device.device_id) - if reg_device is not None: - device_registry.async_remove_device(reg_device.id) - - def device_removed(self, device: zigpy.device.Device) -> None: - """Handle device being removed from the network.""" - zha_device = self._devices.pop(device.ieee, None) - entity_refs = self._device_registry.pop(device.ieee, None) - if zha_device is not None: - device_info = zha_device.zha_device_info - zha_device.async_cleanup_handles() - async_dispatcher_send(self.hass, f"{SIGNAL_REMOVE}_{zha_device.ieee!s}") - self.hass.async_create_task( - self._async_remove_device(zha_device, entity_refs), - "ZHAGateway._async_remove_device", - ) - if device_info is not None: - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_REMOVED, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - - def get_device(self, ieee: EUI64) -> ZHADevice | None: - """Return ZHADevice for given ieee.""" - return self._devices.get(ieee) - - def get_group(self, group_id: int) -> ZHAGroup | None: - """Return Group for given group id.""" - return self.groups.get(group_id) - - @callback - def async_get_group_by_name(self, group_name: str) -> ZHAGroup | None: - """Get ZHA group by name.""" - for group in self.groups.values(): - if group.name == group_name: - return group - return None - - def get_entity_reference(self, entity_id: str) -> EntityReference | None: - """Return entity reference for given entity_id if found.""" - for entity_reference in itertools.chain.from_iterable( - self.device_registry.values() - ): - if entity_id == entity_reference.reference_id: - return entity_reference - return None - - def remove_entity_reference(self, entity: ZhaEntity) -> None: - """Remove entity reference for given entity_id if found.""" - if entity.zha_device.ieee in self.device_registry: - entity_refs = self.device_registry.get(entity.zha_device.ieee) - self.device_registry[entity.zha_device.ieee] = [ - e - for e in entity_refs # type: ignore[union-attr] - if e.reference_id != entity.entity_id - ] - - def _cleanup_group_entity_registry_entries( - self, zigpy_group: zigpy.group.Group - ) -> None: - """Remove entity registry entries for group entities when the groups are removed from HA.""" - # first we collect the potential unique ids for entities that could be created from this group - possible_entity_unique_ids = [ - f"{domain}_zha_group_0x{zigpy_group.group_id:04x}" - for domain in GROUP_ENTITY_DOMAINS - ] - - # then we get all group entity entries tied to the coordinator - entity_registry = er.async_get(self.hass) - assert self.coordinator_zha_device - all_group_entity_entries = er.async_entries_for_device( - entity_registry, - self.coordinator_zha_device.device_id, - include_disabled_entities=True, - ) - - # then we get the entity entries for this specific group - # by getting the entries that match - entries_to_remove = [ - entry - for entry in all_group_entity_entries - if entry.unique_id in possible_entity_unique_ids - ] - - # then we remove the entries from the entity registry - for entry in entries_to_remove: - _LOGGER.debug( - "cleaning up entity registry entry for entity: %s", entry.entity_id - ) - entity_registry.async_remove(entry.entity_id) - - @property - def state(self) -> State: - """Return the active coordinator's network state.""" - return self.application_controller.state - - @property - def devices(self) -> dict[EUI64, ZHADevice]: - """Return devices.""" - return self._devices - - @property - def groups(self) -> dict[int, ZHAGroup]: - """Return groups.""" - return self._groups - - @property - def device_registry(self) -> collections.defaultdict[EUI64, list[EntityReference]]: - """Return entities by ieee.""" - return self._device_registry - - def register_entity_reference( - self, - ieee: EUI64, - reference_id: str, - zha_device: ZHADevice, - cluster_handlers: dict[str, ClusterHandler], - device_info: DeviceInfo, - remove_future: asyncio.Future[Any], - ): - """Record the creation of a hass entity associated with ieee.""" - self._device_registry[ieee].append( - EntityReference( - reference_id=reference_id, - zha_device=zha_device, - cluster_handlers=cluster_handlers, - device_info=device_info, - remove_future=remove_future, - ) - ) - - @callback - def async_enable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: - """Enable debug mode for ZHA.""" - self._log_levels[DEBUG_LEVEL_ORIGINAL] = async_capture_log_levels() - async_set_logger_levels(DEBUG_LEVELS) - self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() - - if filterer: - self._log_relay_handler.addFilter(filterer) - - for logger_name in DEBUG_RELAY_LOGGERS: - logging.getLogger(logger_name).addHandler(self._log_relay_handler) - - self.debug_enabled = True - - @callback - def async_disable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: - """Disable debug mode for ZHA.""" - async_set_logger_levels(self._log_levels[DEBUG_LEVEL_ORIGINAL]) - self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() - for logger_name in DEBUG_RELAY_LOGGERS: - logging.getLogger(logger_name).removeHandler(self._log_relay_handler) - if filterer: - self._log_relay_handler.removeFilter(filterer) - self.debug_enabled = False - - @callback - def _async_get_or_create_device( - self, zigpy_device: zigpy.device.Device - ) -> ZHADevice: - """Get or create a ZHA device.""" - if (zha_device := self._devices.get(zigpy_device.ieee)) is None: - zha_device = ZHADevice.new(self.hass, zigpy_device, self) - self._devices[zigpy_device.ieee] = zha_device - - device_registry = dr.async_get(self.hass) - device_registry_device = device_registry.async_get_or_create( - config_entry_id=self.config_entry.entry_id, - connections={(dr.CONNECTION_ZIGBEE, str(zha_device.ieee))}, - identifiers={(DOMAIN, str(zha_device.ieee))}, - name=zha_device.name, - manufacturer=zha_device.manufacturer, - model=zha_device.model, - ) - zha_device.set_device_id(device_registry_device.id) - return zha_device - - @callback - def _async_get_or_create_group(self, zigpy_group: zigpy.group.Group) -> ZHAGroup: - """Get or create a ZHA group.""" - zha_group = self._groups.get(zigpy_group.group_id) - if zha_group is None: - zha_group = ZHAGroup(self.hass, self, zigpy_group) - self._groups[zigpy_group.group_id] = zha_group - return zha_group - - @callback - def async_update_device( - self, sender: zigpy.device.Device, available: bool = True - ) -> None: - """Update device that has just become available.""" - if sender.ieee in self.devices: - device = self.devices[sender.ieee] - # avoid a race condition during new joins - if device.status is DeviceStatus.INITIALIZED: - device.update_available(available) - - async def async_device_initialized(self, device: zigpy.device.Device) -> None: - """Handle device joined and basic information discovered (async).""" - zha_device = self._async_get_or_create_device(device) - _LOGGER.debug( - "device - %s:%s entering async_device_initialized - is_new_join: %s", - device.nwk, - device.ieee, - zha_device.status is not DeviceStatus.INITIALIZED, - ) - - if zha_device.status is DeviceStatus.INITIALIZED: - # ZHA already has an initialized device so either the device was assigned a - # new nwk or device was physically reset and added again without being removed - _LOGGER.debug( - "device - %s:%s has been reset and re-added or its nwk address changed", - device.nwk, - device.ieee, - ) - await self._async_device_rejoined(zha_device) - else: - _LOGGER.debug( - "device - %s:%s has joined the ZHA zigbee network", - device.nwk, - device.ieee, - ) - await self._async_device_joined(zha_device) - - device_info = zha_device.zha_device_info - device_info[DEVICE_PAIRING_STATUS] = DevicePairingStatus.INITIALIZED.name - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - - async def _async_device_joined(self, zha_device: ZHADevice) -> None: - zha_device.available = True - device_info = zha_device.device_info - await zha_device.async_configure() - device_info[DEVICE_PAIRING_STATUS] = DevicePairingStatus.CONFIGURED.name - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - await zha_device.async_initialize(from_cache=False) - async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) - - async def _async_device_rejoined(self, zha_device: ZHADevice) -> None: - _LOGGER.debug( - "skipping discovery for previously discovered device - %s:%s", - zha_device.nwk, - zha_device.ieee, - ) - # we don't have to do this on a nwk swap - # but we don't have a way to tell currently - await zha_device.async_configure() - device_info = zha_device.device_info - device_info[DEVICE_PAIRING_STATUS] = DevicePairingStatus.CONFIGURED.name - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - # force async_initialize() to fire so don't explicitly call it - zha_device.available = False - zha_device.update_available(True) - - async def async_create_zigpy_group( - self, - name: str, - members: list[GroupMember] | None, - group_id: int | None = None, - ) -> ZHAGroup | None: - """Create a new Zigpy Zigbee group.""" - - # we start with two to fill any gaps from a user removing existing groups - - if group_id is None: - group_id = 2 - while group_id in self.groups: - group_id += 1 - - # guard against group already existing - if self.async_get_group_by_name(name) is None: - self.application_controller.groups.add_group(group_id, name) - if members is not None: - tasks = [] - for member in members: - _LOGGER.debug( - ( - "Adding member with IEEE: %s and endpoint ID: %s to group:" - " %s:0x%04x" - ), - member.ieee, - member.endpoint_id, - name, - group_id, - ) - tasks.append( - self.devices[member.ieee].async_add_endpoint_to_group( - member.endpoint_id, group_id - ) - ) - await asyncio.gather(*tasks) - return self.groups.get(group_id) - - async def async_remove_zigpy_group(self, group_id: int) -> None: - """Remove a Zigbee group from Zigpy.""" - if not (group := self.groups.get(group_id)): - _LOGGER.debug("Group: 0x%04x could not be found", group_id) - return - if group.members: - tasks = [member.async_remove_from_group() for member in group.members] - if tasks: - await asyncio.gather(*tasks) - self.application_controller.groups.pop(group_id) - - async def shutdown(self) -> None: - """Stop ZHA Controller Application.""" - if self.shutting_down: - _LOGGER.debug("Ignoring duplicate shutdown event") - return - - _LOGGER.debug("Shutting down ZHA ControllerApplication") - self.shutting_down = True - - for unsubscribe in self._unsubs: - unsubscribe() - for device in self.devices.values(): - device.async_cleanup_handles() - await self.application_controller.shutdown() - - def handle_message( - self, - sender: zigpy.device.Device, - profile: int, - cluster: int, - src_ep: int, - dst_ep: int, - message: bytes, - ) -> None: - """Handle message from a device Event handler.""" - if sender.ieee in self.devices and not self.devices[sender.ieee].available: - self.async_update_device(sender, available=True) - - -@callback -def async_capture_log_levels() -> dict[str, int]: - """Capture current logger levels for ZHA.""" - return { - DEBUG_COMP_BELLOWS: logging.getLogger(DEBUG_COMP_BELLOWS).getEffectiveLevel(), - DEBUG_COMP_ZHA: logging.getLogger(DEBUG_COMP_ZHA).getEffectiveLevel(), - DEBUG_COMP_ZIGPY: logging.getLogger(DEBUG_COMP_ZIGPY).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_ZNP: logging.getLogger( - DEBUG_COMP_ZIGPY_ZNP - ).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_DECONZ: logging.getLogger( - DEBUG_COMP_ZIGPY_DECONZ - ).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_XBEE: logging.getLogger( - DEBUG_COMP_ZIGPY_XBEE - ).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_ZIGATE: logging.getLogger( - DEBUG_COMP_ZIGPY_ZIGATE - ).getEffectiveLevel(), - } - - -@callback -def async_set_logger_levels(levels: dict[str, int]) -> None: - """Set logger levels for ZHA.""" - logging.getLogger(DEBUG_COMP_BELLOWS).setLevel(levels[DEBUG_COMP_BELLOWS]) - logging.getLogger(DEBUG_COMP_ZHA).setLevel(levels[DEBUG_COMP_ZHA]) - logging.getLogger(DEBUG_COMP_ZIGPY).setLevel(levels[DEBUG_COMP_ZIGPY]) - logging.getLogger(DEBUG_COMP_ZIGPY_ZNP).setLevel(levels[DEBUG_COMP_ZIGPY_ZNP]) - logging.getLogger(DEBUG_COMP_ZIGPY_DECONZ).setLevel(levels[DEBUG_COMP_ZIGPY_DECONZ]) - logging.getLogger(DEBUG_COMP_ZIGPY_XBEE).setLevel(levels[DEBUG_COMP_ZIGPY_XBEE]) - logging.getLogger(DEBUG_COMP_ZIGPY_ZIGATE).setLevel(levels[DEBUG_COMP_ZIGPY_ZIGATE]) - - -class LogRelayHandler(logging.Handler): - """Log handler for error messages.""" - - def __init__(self, hass: HomeAssistant, gateway: ZHAGateway) -> None: - """Initialize a new LogErrorHandler.""" - super().__init__() - self.hass = hass - self.gateway = gateway - hass_path: str = HOMEASSISTANT_PATH[0] - config_dir = self.hass.config.config_dir - self.paths_re = re.compile( - r"(?:{})/(.*)".format( - "|".join([re.escape(x) for x in (hass_path, config_dir)]) - ) - ) - - def emit(self, record: LogRecord) -> None: - """Relay log message via dispatcher.""" - entry = LogEntry( - record, - self.paths_re, - formatter=self.formatter, - figure_out_source=record.levelno >= logging.WARNING, - ) - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - {ATTR_TYPE: ZHA_GW_MSG_LOG_OUTPUT, ZHA_GW_MSG_LOG_ENTRY: entry.to_dict()}, - ) diff --git a/homeassistant/components/zha/core/group.py b/homeassistant/components/zha/core/group.py deleted file mode 100644 index a6156ab63b7..00000000000 --- a/homeassistant/components/zha/core/group.py +++ /dev/null @@ -1,246 +0,0 @@ -"""Group for Zigbee Home Automation.""" - -from __future__ import annotations - -import asyncio -import logging -from typing import TYPE_CHECKING, Any, NamedTuple - -import zigpy.endpoint -import zigpy.exceptions -import zigpy.group -from zigpy.types.named import EUI64 - -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_registry import async_entries_for_device - -from .helpers import LogMixin - -if TYPE_CHECKING: - from .device import ZHADevice - from .gateway import ZHAGateway - -_LOGGER = logging.getLogger(__name__) - - -class GroupMember(NamedTuple): - """Describes a group member.""" - - ieee: EUI64 - endpoint_id: int - - -class GroupEntityReference(NamedTuple): - """Reference to a group entity.""" - - name: str | None - original_name: str | None - entity_id: int - - -class ZHAGroupMember(LogMixin): - """Composite object that represents a device endpoint in a Zigbee group.""" - - def __init__( - self, zha_group: ZHAGroup, zha_device: ZHADevice, endpoint_id: int - ) -> None: - """Initialize the group member.""" - self._zha_group = zha_group - self._zha_device = zha_device - self._endpoint_id = endpoint_id - - @property - def group(self) -> ZHAGroup: - """Return the group this member belongs to.""" - return self._zha_group - - @property - def endpoint_id(self) -> int: - """Return the endpoint id for this group member.""" - return self._endpoint_id - - @property - def endpoint(self) -> zigpy.endpoint.Endpoint: - """Return the endpoint for this group member.""" - return self._zha_device.device.endpoints.get(self.endpoint_id) - - @property - def device(self) -> ZHADevice: - """Return the ZHA device for this group member.""" - return self._zha_device - - @property - def member_info(self) -> dict[str, Any]: - """Get ZHA group info.""" - member_info: dict[str, Any] = {} - member_info["endpoint_id"] = self.endpoint_id - member_info["device"] = self.device.zha_device_info - member_info["entities"] = self.associated_entities - return member_info - - @property - def associated_entities(self) -> list[dict[str, Any]]: - """Return the list of entities that were derived from this endpoint.""" - entity_registry = er.async_get(self._zha_device.hass) - zha_device_registry = self.device.gateway.device_registry - - entity_info = [] - - for entity_ref in zha_device_registry.get(self.device.ieee): - # We have device entities now that don't leverage cluster handlers - if not entity_ref.cluster_handlers: - continue - entity = entity_registry.async_get(entity_ref.reference_id) - handler = list(entity_ref.cluster_handlers.values())[0] - - if ( - entity is None - or handler.cluster.endpoint.endpoint_id != self.endpoint_id - ): - continue - - entity_info.append( - GroupEntityReference( - name=entity.name, - original_name=entity.original_name, - entity_id=entity_ref.reference_id, - )._asdict() - ) - - return entity_info - - async def async_remove_from_group(self) -> None: - """Remove the device endpoint from the provided zigbee group.""" - try: - await self._zha_device.device.endpoints[ - self._endpoint_id - ].remove_from_group(self._zha_group.group_id) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - ( - "Failed to remove endpoint: %s for device '%s' from group: 0x%04x" - " ex: %s" - ), - self._endpoint_id, - self._zha_device.ieee, - self._zha_group.group_id, - str(ex), - ) - - def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: - """Log a message.""" - msg = f"[%s](%s): {msg}" - args = (f"0x{self._zha_group.group_id:04x}", self.endpoint_id, *args) - _LOGGER.log(level, msg, *args, **kwargs) - - -class ZHAGroup(LogMixin): - """ZHA Zigbee group object.""" - - def __init__( - self, - hass: HomeAssistant, - zha_gateway: ZHAGateway, - zigpy_group: zigpy.group.Group, - ) -> None: - """Initialize the group.""" - self.hass = hass - self._zha_gateway = zha_gateway - self._zigpy_group = zigpy_group - - @property - def name(self) -> str: - """Return group name.""" - return self._zigpy_group.name - - @property - def group_id(self) -> int: - """Return group name.""" - return self._zigpy_group.group_id - - @property - def endpoint(self) -> zigpy.endpoint.Endpoint: - """Return the endpoint for this group.""" - return self._zigpy_group.endpoint - - @property - def members(self) -> list[ZHAGroupMember]: - """Return the ZHA devices that are members of this group.""" - return [ - ZHAGroupMember(self, self._zha_gateway.devices[member_ieee], endpoint_id) - for (member_ieee, endpoint_id) in self._zigpy_group.members - if member_ieee in self._zha_gateway.devices - ] - - async def async_add_members(self, members: list[GroupMember]) -> None: - """Add members to this group.""" - if len(members) > 1: - tasks = [ - self._zha_gateway.devices[member.ieee].async_add_endpoint_to_group( - member.endpoint_id, self.group_id - ) - for member in members - ] - await asyncio.gather(*tasks) - else: - await self._zha_gateway.devices[ - members[0].ieee - ].async_add_endpoint_to_group(members[0].endpoint_id, self.group_id) - - async def async_remove_members(self, members: list[GroupMember]) -> None: - """Remove members from this group.""" - if len(members) > 1: - tasks = [ - self._zha_gateway.devices[member.ieee].async_remove_endpoint_from_group( - member.endpoint_id, self.group_id - ) - for member in members - ] - await asyncio.gather(*tasks) - else: - await self._zha_gateway.devices[ - members[0].ieee - ].async_remove_endpoint_from_group(members[0].endpoint_id, self.group_id) - - @property - def member_entity_ids(self) -> list[str]: - """Return the ZHA entity ids for all entities for the members of this group.""" - return [ - entity_reference["entity_id"] - for member in self.members - for entity_reference in member.associated_entities - ] - - def get_domain_entity_ids(self, domain: str) -> list[str]: - """Return entity ids from the entity domain for this group.""" - entity_registry = er.async_get(self.hass) - domain_entity_ids: list[str] = [] - - for member in self.members: - if member.device.is_coordinator: - continue - entities = async_entries_for_device( - entity_registry, - member.device.device_id, - include_disabled_entities=True, - ) - domain_entity_ids.extend( - [entity.entity_id for entity in entities if entity.domain == domain] - ) - return domain_entity_ids - - @property - def group_info(self) -> dict[str, Any]: - """Get ZHA group info.""" - group_info: dict[str, Any] = {} - group_info["group_id"] = self.group_id - group_info["name"] = self.name - group_info["members"] = [member.member_info for member in self.members] - return group_info - - def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: - """Log a message.""" - msg = f"[%s](%s): {msg}" - args = (self.name, self.group_id, *args) - _LOGGER.log(level, msg, *args, **kwargs) diff --git a/homeassistant/components/zha/core/helpers.py b/homeassistant/components/zha/core/helpers.py deleted file mode 100644 index 2508dd34fd4..00000000000 --- a/homeassistant/components/zha/core/helpers.py +++ /dev/null @@ -1,523 +0,0 @@ -"""Helpers for Zigbee Home Automation. - -For more details about this component, please refer to the documentation at -https://home-assistant.io/integrations/zha/ -""" - -from __future__ import annotations - -import binascii -import collections -from collections.abc import Callable, Iterator -import dataclasses -from dataclasses import dataclass -import enum -import logging -import re -from typing import TYPE_CHECKING, Any, overload - -import voluptuous as vol -import zigpy.exceptions -import zigpy.types -import zigpy.util -import zigpy.zcl -from zigpy.zcl.foundation import CommandSchema -import zigpy.zdo.types as zdo_types - -from homeassistant.components.binary_sensor import BinarySensorDeviceClass -from homeassistant.components.number import NumberDeviceClass -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - Platform, - UnitOfApparentPower, - UnitOfDataRate, - UnitOfElectricCurrent, - UnitOfElectricPotential, - UnitOfEnergy, - UnitOfFrequency, - UnitOfInformation, - UnitOfIrradiance, - UnitOfLength, - UnitOfMass, - UnitOfPower, - UnitOfPrecipitationDepth, - UnitOfPressure, - UnitOfSoundPressure, - UnitOfSpeed, - UnitOfTemperature, - UnitOfTime, - UnitOfVolume, - UnitOfVolumeFlowRate, - UnitOfVolumetricFlux, -) -from homeassistant.core import HomeAssistant, State, callback -from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.typing import ConfigType - -from .const import CLUSTER_TYPE_IN, CLUSTER_TYPE_OUT, CUSTOM_CONFIGURATION, DATA_ZHA -from .registries import BINDABLE_CLUSTERS - -if TYPE_CHECKING: - from .device import ZHADevice - from .gateway import ZHAGateway - -_LOGGER = logging.getLogger(__name__) - - -@dataclass -class BindingPair: - """Information for binding.""" - - source_cluster: zigpy.zcl.Cluster - target_ieee: zigpy.types.EUI64 - target_ep_id: int - - @property - def destination_address(self) -> zdo_types.MultiAddress: - """Return a ZDO multi address instance.""" - return zdo_types.MultiAddress( - addrmode=3, ieee=self.target_ieee, endpoint=self.target_ep_id - ) - - -async def safe_read( - cluster, attributes, allow_cache=True, only_cache=False, manufacturer=None -): - """Swallow all exceptions from network read. - - If we throw during initialization, setup fails. Rather have an entity that - exists, but is in a maybe wrong state, than no entity. This method should - probably only be used during initialization. - """ - try: - result, _ = await cluster.read_attributes( - attributes, - allow_cache=allow_cache, - only_cache=only_cache, - manufacturer=manufacturer, - ) - except Exception: # noqa: BLE001 - return {} - return result - - -async def get_matched_clusters( - source_zha_device: ZHADevice, target_zha_device: ZHADevice -) -> list[BindingPair]: - """Get matched input/output cluster pairs for 2 devices.""" - source_clusters = source_zha_device.async_get_std_clusters() - target_clusters = target_zha_device.async_get_std_clusters() - clusters_to_bind = [] - - for endpoint_id in source_clusters: - for cluster_id in source_clusters[endpoint_id][CLUSTER_TYPE_OUT]: - if cluster_id not in BINDABLE_CLUSTERS: - continue - if target_zha_device.nwk == 0x0000: - cluster_pair = BindingPair( - source_cluster=source_clusters[endpoint_id][CLUSTER_TYPE_OUT][ - cluster_id - ], - target_ieee=target_zha_device.ieee, - target_ep_id=target_zha_device.device.application.get_endpoint_id( - cluster_id, is_server_cluster=True - ), - ) - clusters_to_bind.append(cluster_pair) - continue - for t_endpoint_id in target_clusters: - if cluster_id in target_clusters[t_endpoint_id][CLUSTER_TYPE_IN]: - cluster_pair = BindingPair( - source_cluster=source_clusters[endpoint_id][CLUSTER_TYPE_OUT][ - cluster_id - ], - target_ieee=target_zha_device.ieee, - target_ep_id=t_endpoint_id, - ) - clusters_to_bind.append(cluster_pair) - return clusters_to_bind - - -def cluster_command_schema_to_vol_schema(schema: CommandSchema) -> vol.Schema: - """Convert a cluster command schema to a voluptuous schema.""" - return vol.Schema( - { - vol.Optional(field.name) - if field.optional - else vol.Required(field.name): schema_type_to_vol(field.type) - for field in schema.fields - } - ) - - -def schema_type_to_vol(field_type: Any) -> Any: - """Convert a schema type to a voluptuous type.""" - if issubclass(field_type, enum.Flag) and field_type.__members__: - return cv.multi_select( - [key.replace("_", " ") for key in field_type.__members__] - ) - if issubclass(field_type, enum.Enum) and field_type.__members__: - return vol.In([key.replace("_", " ") for key in field_type.__members__]) - if ( - issubclass(field_type, zigpy.types.FixedIntType) - or issubclass(field_type, enum.Flag) - or issubclass(field_type, enum.Enum) - ): - return vol.All( - vol.Coerce(int), vol.Range(field_type.min_value, field_type.max_value) - ) - return str - - -def convert_to_zcl_values( - fields: dict[str, Any], schema: CommandSchema -) -> dict[str, Any]: - """Convert user input to ZCL values.""" - converted_fields: dict[str, Any] = {} - for field in schema.fields: - if field.name not in fields: - continue - value = fields[field.name] - if issubclass(field.type, enum.Flag) and isinstance(value, list): - new_value = 0 - - for flag in value: - if isinstance(flag, str): - new_value |= field.type[flag.replace(" ", "_")] - else: - new_value |= flag - - value = field.type(new_value) - elif issubclass(field.type, enum.Enum): - value = ( - field.type[value.replace(" ", "_")] - if isinstance(value, str) - else field.type(value) - ) - else: - value = field.type(value) - _LOGGER.debug( - "Converted ZCL schema field(%s) value from: %s to: %s", - field.name, - fields[field.name], - value, - ) - converted_fields[field.name] = value - return converted_fields - - -@callback -def async_is_bindable_target(source_zha_device, target_zha_device): - """Determine if target is bindable to source.""" - if target_zha_device.nwk == 0x0000: - return True - - source_clusters = source_zha_device.async_get_std_clusters() - target_clusters = target_zha_device.async_get_std_clusters() - - for endpoint_id in source_clusters: - for t_endpoint_id in target_clusters: - matches = set( - source_clusters[endpoint_id][CLUSTER_TYPE_OUT].keys() - ).intersection(target_clusters[t_endpoint_id][CLUSTER_TYPE_IN].keys()) - if any(bindable in BINDABLE_CLUSTERS for bindable in matches): - return True - return False - - -@callback -def async_get_zha_config_value[_T]( - config_entry: ConfigEntry, section: str, config_key: str, default: _T -) -> _T: - """Get the value for the specified configuration from the ZHA config entry.""" - return ( - config_entry.options.get(CUSTOM_CONFIGURATION, {}) - .get(section, {}) - .get(config_key, default) - ) - - -def async_cluster_exists(hass: HomeAssistant, cluster_id, skip_coordinator=True): - """Determine if a device containing the specified in cluster is paired.""" - zha_gateway = get_zha_gateway(hass) - zha_devices = zha_gateway.devices.values() - for zha_device in zha_devices: - if skip_coordinator and zha_device.is_coordinator: - continue - clusters_by_endpoint = zha_device.async_get_clusters() - for clusters in clusters_by_endpoint.values(): - if ( - cluster_id in clusters[CLUSTER_TYPE_IN] - or cluster_id in clusters[CLUSTER_TYPE_OUT] - ): - return True - return False - - -@callback -def async_get_zha_device(hass: HomeAssistant, device_id: str) -> ZHADevice: - """Get a ZHA device for the given device registry id.""" - device_registry = dr.async_get(hass) - registry_device = device_registry.async_get(device_id) - if not registry_device: - _LOGGER.error("Device id `%s` not found in registry", device_id) - raise KeyError(f"Device id `{device_id}` not found in registry.") - zha_gateway = get_zha_gateway(hass) - try: - ieee_address = list(registry_device.identifiers)[0][1] - ieee = zigpy.types.EUI64.convert(ieee_address) - except (IndexError, ValueError) as ex: - _LOGGER.error( - "Unable to determine device IEEE for device with device id `%s`", device_id - ) - raise KeyError( - f"Unable to determine device IEEE for device with device id `{device_id}`." - ) from ex - return zha_gateway.devices[ieee] - - -def find_state_attributes(states: list[State], key: str) -> Iterator[Any]: - """Find attributes with matching key from states.""" - for state in states: - if (value := state.attributes.get(key)) is not None: - yield value - - -def mean_int(*args): - """Return the mean of the supplied values.""" - return int(sum(args) / len(args)) - - -def mean_tuple(*args): - """Return the mean values along the columns of the supplied values.""" - return tuple(sum(x) / len(x) for x in zip(*args, strict=False)) - - -def reduce_attribute( - states: list[State], - key: str, - default: Any | None = None, - reduce: Callable[..., Any] = mean_int, -) -> Any: - """Find the first attribute matching key from states. - - If none are found, return default. - """ - attrs = list(find_state_attributes(states, key)) - - if not attrs: - return default - - if len(attrs) == 1: - return attrs[0] - - return reduce(*attrs) - - -class LogMixin: - """Log helper.""" - - def log(self, level, msg, *args, **kwargs): - """Log with level.""" - raise NotImplementedError - - def debug(self, msg, *args, **kwargs): - """Debug level log.""" - return self.log(logging.DEBUG, msg, *args, **kwargs) - - def info(self, msg, *args, **kwargs): - """Info level log.""" - return self.log(logging.INFO, msg, *args, **kwargs) - - def warning(self, msg, *args, **kwargs): - """Warning method log.""" - return self.log(logging.WARNING, msg, *args, **kwargs) - - def error(self, msg, *args, **kwargs): - """Error level log.""" - return self.log(logging.ERROR, msg, *args, **kwargs) - - -def convert_install_code(value: str) -> zigpy.types.KeyData: - """Convert string to install code bytes and validate length.""" - - try: - code = binascii.unhexlify(value.replace("-", "").lower()) - except binascii.Error as exc: - raise vol.Invalid(f"invalid hex string: {value}") from exc - - if len(code) != 18: # 16 byte code + 2 crc bytes - raise vol.Invalid("invalid length of the install code") - - link_key = zigpy.util.convert_install_code(code) - if link_key is None: - raise vol.Invalid("invalid install code") - - return link_key - - -QR_CODES = ( - # Consciot - r"^([\da-fA-F]{16})\|([\da-fA-F]{36})$", - # Enbrighten - r""" - ^Z: - ([0-9a-fA-F]{16}) # IEEE address - \$I: - ([0-9a-fA-F]{36}) # install code - $ - """, - # Aqara - r""" - \$A: - ([0-9a-fA-F]{16}) # IEEE address - \$I: - ([0-9a-fA-F]{36}) # install code - $ - """, - # Bosch - r""" - ^RB01SG - [0-9a-fA-F]{34} - ([0-9a-fA-F]{16}) # IEEE address - DLK - ([0-9a-fA-F]{36}|[0-9a-fA-F]{32}) # install code / link key - $ - """, -) - - -def qr_to_install_code(qr_code: str) -> tuple[zigpy.types.EUI64, zigpy.types.KeyData]: - """Try to parse the QR code. - - if successful, return a tuple of a EUI64 address and install code. - """ - - for code_pattern in QR_CODES: - match = re.search(code_pattern, qr_code, re.VERBOSE) - if match is None: - continue - - ieee_hex = binascii.unhexlify(match[1]) - ieee = zigpy.types.EUI64(ieee_hex[::-1]) - - # Bosch supplies (A) device specific link key (DSLK) or (A) install code + crc - if "RB01SG" in code_pattern and len(match[2]) == 32: - link_key_hex = binascii.unhexlify(match[2]) - link_key = zigpy.types.KeyData(link_key_hex) - return ieee, link_key - install_code = match[2] - # install_code sanity check - link_key = convert_install_code(install_code) - return ieee, link_key - - raise vol.Invalid(f"couldn't convert qr code: {qr_code}") - - -@dataclasses.dataclass(kw_only=True, slots=True) -class ZHAData: - """ZHA component data stored in `hass.data`.""" - - yaml_config: ConfigType = dataclasses.field(default_factory=dict) - platforms: collections.defaultdict[Platform, list] = dataclasses.field( - default_factory=lambda: collections.defaultdict(list) - ) - gateway: ZHAGateway | None = dataclasses.field(default=None) - device_trigger_cache: dict[str, tuple[str, dict]] = dataclasses.field( - default_factory=dict - ) - allow_polling: bool = dataclasses.field(default=False) - - -def get_zha_data(hass: HomeAssistant) -> ZHAData: - """Get the global ZHA data object.""" - if DATA_ZHA not in hass.data: - hass.data[DATA_ZHA] = ZHAData() - - return hass.data[DATA_ZHA] - - -def get_zha_gateway(hass: HomeAssistant) -> ZHAGateway: - """Get the ZHA gateway object.""" - if (zha_gateway := get_zha_data(hass).gateway) is None: - raise ValueError("No gateway object exists") - - return zha_gateway - - -UNITS_OF_MEASURE = { - UnitOfApparentPower.__name__: UnitOfApparentPower, - UnitOfPower.__name__: UnitOfPower, - UnitOfEnergy.__name__: UnitOfEnergy, - UnitOfElectricCurrent.__name__: UnitOfElectricCurrent, - UnitOfElectricPotential.__name__: UnitOfElectricPotential, - UnitOfTemperature.__name__: UnitOfTemperature, - UnitOfTime.__name__: UnitOfTime, - UnitOfLength.__name__: UnitOfLength, - UnitOfFrequency.__name__: UnitOfFrequency, - UnitOfPressure.__name__: UnitOfPressure, - UnitOfSoundPressure.__name__: UnitOfSoundPressure, - UnitOfVolume.__name__: UnitOfVolume, - UnitOfVolumeFlowRate.__name__: UnitOfVolumeFlowRate, - UnitOfMass.__name__: UnitOfMass, - UnitOfIrradiance.__name__: UnitOfIrradiance, - UnitOfVolumetricFlux.__name__: UnitOfVolumetricFlux, - UnitOfPrecipitationDepth.__name__: UnitOfPrecipitationDepth, - UnitOfSpeed.__name__: UnitOfSpeed, - UnitOfInformation.__name__: UnitOfInformation, - UnitOfDataRate.__name__: UnitOfDataRate, -} - - -def validate_unit(quirks_unit: enum.Enum) -> enum.Enum: - """Validate and return a unit of measure.""" - return UNITS_OF_MEASURE[type(quirks_unit).__name__](quirks_unit.value) - - -@overload -def validate_device_class( - device_class_enum: type[BinarySensorDeviceClass], - metadata_value, - platform: str, - logger: logging.Logger, -) -> BinarySensorDeviceClass | None: ... - - -@overload -def validate_device_class( - device_class_enum: type[SensorDeviceClass], - metadata_value, - platform: str, - logger: logging.Logger, -) -> SensorDeviceClass | None: ... - - -@overload -def validate_device_class( - device_class_enum: type[NumberDeviceClass], - metadata_value, - platform: str, - logger: logging.Logger, -) -> NumberDeviceClass | None: ... - - -def validate_device_class( - device_class_enum: type[ - BinarySensorDeviceClass | SensorDeviceClass | NumberDeviceClass - ], - metadata_value: enum.Enum, - platform: str, - logger: logging.Logger, -) -> BinarySensorDeviceClass | SensorDeviceClass | NumberDeviceClass | None: - """Validate and return a device class.""" - try: - return device_class_enum(metadata_value.value) - except ValueError as ex: - logger.warning( - "Quirks provided an invalid device class: %s for platform %s: %s", - metadata_value, - platform, - ex, - ) - return None diff --git a/homeassistant/components/zha/core/registries.py b/homeassistant/components/zha/core/registries.py deleted file mode 100644 index 9d23b77efaa..00000000000 --- a/homeassistant/components/zha/core/registries.py +++ /dev/null @@ -1,516 +0,0 @@ -"""Mapping registries for Zigbee Home Automation.""" - -from __future__ import annotations - -import collections -from collections.abc import Callable -import dataclasses -from operator import attrgetter -from typing import TYPE_CHECKING - -import attr -from zigpy import zcl -import zigpy.profiles.zha -import zigpy.profiles.zll -from zigpy.types.named import EUI64 - -from homeassistant.const import Platform - -from .decorators import DictRegistry, NestedDictRegistry, SetRegistry - -if TYPE_CHECKING: - from ..entity import ZhaEntity, ZhaGroupEntity - from .cluster_handlers import ClientClusterHandler, ClusterHandler - - -GROUP_ENTITY_DOMAINS = [Platform.LIGHT, Platform.SWITCH, Platform.FAN] - -IKEA_AIR_PURIFIER_CLUSTER = 0xFC7D -PHILLIPS_REMOTE_CLUSTER = 0xFC00 -SMARTTHINGS_ACCELERATION_CLUSTER = 0xFC02 -SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE = 0x8000 -SMARTTHINGS_HUMIDITY_CLUSTER = 0xFC45 -TUYA_MANUFACTURER_CLUSTER = 0xEF00 -VOC_LEVEL_CLUSTER = 0x042E - -REMOTE_DEVICE_TYPES = { - zigpy.profiles.zha.PROFILE_ID: [ - zigpy.profiles.zha.DeviceType.COLOR_CONTROLLER, - zigpy.profiles.zha.DeviceType.COLOR_DIMMER_SWITCH, - zigpy.profiles.zha.DeviceType.COLOR_SCENE_CONTROLLER, - zigpy.profiles.zha.DeviceType.DIMMER_SWITCH, - zigpy.profiles.zha.DeviceType.LEVEL_CONTROL_SWITCH, - zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER, - zigpy.profiles.zha.DeviceType.NON_COLOR_SCENE_CONTROLLER, - zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT_SWITCH, - zigpy.profiles.zha.DeviceType.REMOTE_CONTROL, - zigpy.profiles.zha.DeviceType.SCENE_SELECTOR, - ], - zigpy.profiles.zll.PROFILE_ID: [ - zigpy.profiles.zll.DeviceType.COLOR_CONTROLLER, - zigpy.profiles.zll.DeviceType.COLOR_SCENE_CONTROLLER, - zigpy.profiles.zll.DeviceType.CONTROL_BRIDGE, - zigpy.profiles.zll.DeviceType.CONTROLLER, - zigpy.profiles.zll.DeviceType.SCENE_CONTROLLER, - ], -} -REMOTE_DEVICE_TYPES = collections.defaultdict(list, REMOTE_DEVICE_TYPES) - -SINGLE_INPUT_CLUSTER_DEVICE_CLASS = { - # this works for now but if we hit conflicts we can break it out to - # a different dict that is keyed by manufacturer - zcl.clusters.general.AnalogOutput.cluster_id: Platform.NUMBER, - zcl.clusters.general.MultistateInput.cluster_id: Platform.SENSOR, - zcl.clusters.general.OnOff.cluster_id: Platform.SWITCH, - zcl.clusters.hvac.Fan.cluster_id: Platform.FAN, -} - -SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS = { - zcl.clusters.general.OnOff.cluster_id: Platform.BINARY_SENSOR, - zcl.clusters.security.IasAce.cluster_id: Platform.ALARM_CONTROL_PANEL, -} - -BINDABLE_CLUSTERS = SetRegistry() - -DEVICE_CLASS = { - zigpy.profiles.zha.PROFILE_ID: { - SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE: Platform.DEVICE_TRACKER, - zigpy.profiles.zha.DeviceType.THERMOSTAT: Platform.CLIMATE, - zigpy.profiles.zha.DeviceType.COLOR_DIMMABLE_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.COLOR_TEMPERATURE_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.DIMMABLE_BALLAST: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.DIMMABLE_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.DIMMABLE_PLUG_IN_UNIT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.EXTENDED_COLOR_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.LEVEL_CONTROLLABLE_OUTPUT: Platform.COVER, - zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST: Platform.SWITCH, - zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.ON_OFF_PLUG_IN_UNIT: Platform.SWITCH, - zigpy.profiles.zha.DeviceType.SHADE: Platform.COVER, - zigpy.profiles.zha.DeviceType.SMART_PLUG: Platform.SWITCH, - zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL: Platform.ALARM_CONTROL_PANEL, - zigpy.profiles.zha.DeviceType.IAS_WARNING_DEVICE: Platform.SIREN, - }, - zigpy.profiles.zll.PROFILE_ID: { - zigpy.profiles.zll.DeviceType.COLOR_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.COLOR_TEMPERATURE_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.DIMMABLE_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.DIMMABLE_PLUGIN_UNIT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.EXTENDED_COLOR_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.ON_OFF_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.ON_OFF_PLUGIN_UNIT: Platform.SWITCH, - }, -} -DEVICE_CLASS = collections.defaultdict(dict, DEVICE_CLASS) - -CLUSTER_HANDLER_ONLY_CLUSTERS = SetRegistry() -CLIENT_CLUSTER_HANDLER_REGISTRY: DictRegistry[type[ClientClusterHandler]] = ( - DictRegistry() -) -ZIGBEE_CLUSTER_HANDLER_REGISTRY: NestedDictRegistry[type[ClusterHandler]] = ( - NestedDictRegistry() -) - -WEIGHT_ATTR = attrgetter("weight") - - -def set_or_callable(value) -> frozenset[str] | Callable: - """Convert single str or None to a set. Pass through callables and sets.""" - if value is None: - return frozenset() - if callable(value): - return value - if isinstance(value, (frozenset, set, list)): - return frozenset(value) - return frozenset([str(value)]) - - -def _get_empty_frozenset() -> frozenset[str]: - return frozenset() - - -@attr.s(frozen=True) -class MatchRule: - """Match a ZHA Entity to a cluster handler name or generic id.""" - - cluster_handler_names: frozenset[str] = attr.ib( - factory=frozenset, converter=set_or_callable - ) - generic_ids: frozenset[str] = attr.ib(factory=frozenset, converter=set_or_callable) - manufacturers: frozenset[str] | Callable = attr.ib( - factory=_get_empty_frozenset, converter=set_or_callable - ) - models: frozenset[str] | Callable = attr.ib( - factory=_get_empty_frozenset, converter=set_or_callable - ) - aux_cluster_handlers: frozenset[str] | Callable = attr.ib( - factory=_get_empty_frozenset, converter=set_or_callable - ) - quirk_ids: frozenset[str] | Callable = attr.ib( - factory=_get_empty_frozenset, converter=set_or_callable - ) - - @property - def weight(self) -> int: - """Return the weight of the matching rule. - - More specific matches should be preferred over less specific. Quirk class - matching rules have priority over model matching rules - and have a priority over manufacturer matching rules and rules matching a - single model/manufacturer get a better priority over rules matching multiple - models/manufacturers. And any model or manufacturers matching rules get better - priority over rules matching only cluster handlers. - But in case of a cluster handler name/cluster handler id matching, we give rules matching - multiple cluster handlers a better priority over rules matching a single cluster handler. - """ - weight = 0 - if self.quirk_ids: - weight += 501 - (1 if callable(self.quirk_ids) else len(self.quirk_ids)) - - if self.models: - weight += 401 - (1 if callable(self.models) else len(self.models)) - - if self.manufacturers: - weight += 301 - ( - 1 if callable(self.manufacturers) else len(self.manufacturers) - ) - - weight += 10 * len(self.cluster_handler_names) - weight += 5 * len(self.generic_ids) - if isinstance(self.aux_cluster_handlers, frozenset): - weight += 1 * len(self.aux_cluster_handlers) - return weight - - def claim_cluster_handlers( - self, cluster_handlers: list[ClusterHandler] - ) -> list[ClusterHandler]: - """Return a list of cluster handlers this rule matches + aux cluster handlers.""" - claimed = [] - if isinstance(self.cluster_handler_names, frozenset): - claimed.extend( - [ch for ch in cluster_handlers if ch.name in self.cluster_handler_names] - ) - if isinstance(self.generic_ids, frozenset): - claimed.extend( - [ch for ch in cluster_handlers if ch.generic_id in self.generic_ids] - ) - if isinstance(self.aux_cluster_handlers, frozenset): - claimed.extend( - [ch for ch in cluster_handlers if ch.name in self.aux_cluster_handlers] - ) - return claimed - - def strict_matched( - self, - manufacturer: str, - model: str, - cluster_handlers: list, - quirk_id: str | None, - ) -> bool: - """Return True if this device matches the criteria.""" - return all(self._matched(manufacturer, model, cluster_handlers, quirk_id)) - - def loose_matched( - self, - manufacturer: str, - model: str, - cluster_handlers: list, - quirk_id: str | None, - ) -> bool: - """Return True if this device matches the criteria.""" - return any(self._matched(manufacturer, model, cluster_handlers, quirk_id)) - - def _matched( - self, - manufacturer: str, - model: str, - cluster_handlers: list, - quirk_id: str | None, - ) -> list: - """Return a list of field matches.""" - if not any(attr.asdict(self).values()): - return [False] - - matches = [] - if self.cluster_handler_names: - cluster_handler_names = {ch.name for ch in cluster_handlers} - matches.append(self.cluster_handler_names.issubset(cluster_handler_names)) - - if self.generic_ids: - all_generic_ids = {ch.generic_id for ch in cluster_handlers} - matches.append(self.generic_ids.issubset(all_generic_ids)) - - if self.manufacturers: - if callable(self.manufacturers): - matches.append(self.manufacturers(manufacturer)) - else: - matches.append(manufacturer in self.manufacturers) - - if self.models: - if callable(self.models): - matches.append(self.models(model)) - else: - matches.append(model in self.models) - - if self.quirk_ids: - if callable(self.quirk_ids): - matches.append(self.quirk_ids(quirk_id)) - else: - matches.append(quirk_id in self.quirk_ids) - - return matches - - -@dataclasses.dataclass -class EntityClassAndClusterHandlers: - """Container for entity class and corresponding cluster handlers.""" - - entity_class: type[ZhaEntity] - claimed_cluster_handlers: list[ClusterHandler] - - -class ZHAEntityRegistry: - """Cluster handler to ZHA Entity mapping.""" - - def __init__(self) -> None: - """Initialize Registry instance.""" - self._strict_registry: dict[Platform, dict[MatchRule, type[ZhaEntity]]] = ( - collections.defaultdict(dict) - ) - self._multi_entity_registry: dict[ - Platform, dict[int | str | None, dict[MatchRule, list[type[ZhaEntity]]]] - ] = collections.defaultdict( - lambda: collections.defaultdict(lambda: collections.defaultdict(list)) - ) - self._config_diagnostic_entity_registry: dict[ - Platform, dict[int | str | None, dict[MatchRule, list[type[ZhaEntity]]]] - ] = collections.defaultdict( - lambda: collections.defaultdict(lambda: collections.defaultdict(list)) - ) - self._group_registry: dict[str, type[ZhaGroupEntity]] = {} - self.single_device_matches: dict[Platform, dict[EUI64, list[str]]] = ( - collections.defaultdict(lambda: collections.defaultdict(list)) - ) - - def get_entity( - self, - component: Platform, - manufacturer: str, - model: str, - cluster_handlers: list[ClusterHandler], - quirk_id: str | None, - default: type[ZhaEntity] | None = None, - ) -> tuple[type[ZhaEntity] | None, list[ClusterHandler]]: - """Match a ZHA ClusterHandler to a ZHA Entity class.""" - matches = self._strict_registry[component] - for match in sorted(matches, key=WEIGHT_ATTR, reverse=True): - if match.strict_matched(manufacturer, model, cluster_handlers, quirk_id): - claimed = match.claim_cluster_handlers(cluster_handlers) - return self._strict_registry[component][match], claimed - - return default, [] - - def get_multi_entity( - self, - manufacturer: str, - model: str, - cluster_handlers: list[ClusterHandler], - quirk_id: str | None, - ) -> tuple[ - dict[Platform, list[EntityClassAndClusterHandlers]], list[ClusterHandler] - ]: - """Match ZHA cluster handlers to potentially multiple ZHA Entity classes.""" - result: dict[Platform, list[EntityClassAndClusterHandlers]] = ( - collections.defaultdict(list) - ) - all_claimed: set[ClusterHandler] = set() - for component, stop_match_groups in self._multi_entity_registry.items(): - for stop_match_grp, matches in stop_match_groups.items(): - sorted_matches = sorted(matches, key=WEIGHT_ATTR, reverse=True) - for match in sorted_matches: - if match.strict_matched( - manufacturer, model, cluster_handlers, quirk_id - ): - claimed = match.claim_cluster_handlers(cluster_handlers) - for ent_class in stop_match_groups[stop_match_grp][match]: - ent_n_cluster_handlers = EntityClassAndClusterHandlers( - ent_class, claimed - ) - result[component].append(ent_n_cluster_handlers) - all_claimed |= set(claimed) - if stop_match_grp: - break - - return result, list(all_claimed) - - def get_config_diagnostic_entity( - self, - manufacturer: str, - model: str, - cluster_handlers: list[ClusterHandler], - quirk_id: str | None, - ) -> tuple[ - dict[Platform, list[EntityClassAndClusterHandlers]], list[ClusterHandler] - ]: - """Match ZHA cluster handlers to potentially multiple ZHA Entity classes.""" - result: dict[Platform, list[EntityClassAndClusterHandlers]] = ( - collections.defaultdict(list) - ) - all_claimed: set[ClusterHandler] = set() - for ( - component, - stop_match_groups, - ) in self._config_diagnostic_entity_registry.items(): - for stop_match_grp, matches in stop_match_groups.items(): - sorted_matches = sorted(matches, key=WEIGHT_ATTR, reverse=True) - for match in sorted_matches: - if match.strict_matched( - manufacturer, model, cluster_handlers, quirk_id - ): - claimed = match.claim_cluster_handlers(cluster_handlers) - for ent_class in stop_match_groups[stop_match_grp][match]: - ent_n_cluster_handlers = EntityClassAndClusterHandlers( - ent_class, claimed - ) - result[component].append(ent_n_cluster_handlers) - all_claimed |= set(claimed) - if stop_match_grp: - break - - return result, list(all_claimed) - - def get_group_entity(self, component: str) -> type[ZhaGroupEntity] | None: - """Match a ZHA group to a ZHA Entity class.""" - return self._group_registry.get(component) - - def strict_match[_ZhaEntityT: type[ZhaEntity]]( - self, - component: Platform, - cluster_handler_names: set[str] | str | None = None, - generic_ids: set[str] | str | None = None, - manufacturers: Callable | set[str] | str | None = None, - models: Callable | set[str] | str | None = None, - aux_cluster_handlers: Callable | set[str] | str | None = None, - quirk_ids: set[str] | str | None = None, - ) -> Callable[[_ZhaEntityT], _ZhaEntityT]: - """Decorate a strict match rule.""" - - rule = MatchRule( - cluster_handler_names, - generic_ids, - manufacturers, - models, - aux_cluster_handlers, - quirk_ids, - ) - - def decorator(zha_ent: _ZhaEntityT) -> _ZhaEntityT: - """Register a strict match rule. - - All non-empty fields of a match rule must match. - """ - self._strict_registry[component][rule] = zha_ent - return zha_ent - - return decorator - - def multipass_match[_ZhaEntityT: type[ZhaEntity]]( - self, - component: Platform, - cluster_handler_names: set[str] | str | None = None, - generic_ids: set[str] | str | None = None, - manufacturers: Callable | set[str] | str | None = None, - models: Callable | set[str] | str | None = None, - aux_cluster_handlers: Callable | set[str] | str | None = None, - stop_on_match_group: int | str | None = None, - quirk_ids: set[str] | str | None = None, - ) -> Callable[[_ZhaEntityT], _ZhaEntityT]: - """Decorate a loose match rule.""" - - rule = MatchRule( - cluster_handler_names, - generic_ids, - manufacturers, - models, - aux_cluster_handlers, - quirk_ids, - ) - - def decorator(zha_entity: _ZhaEntityT) -> _ZhaEntityT: - """Register a loose match rule. - - All non empty fields of a match rule must match. - """ - # group the rules by cluster handlers - self._multi_entity_registry[component][stop_on_match_group][rule].append( - zha_entity - ) - return zha_entity - - return decorator - - def config_diagnostic_match[_ZhaEntityT: type[ZhaEntity]]( - self, - component: Platform, - cluster_handler_names: set[str] | str | None = None, - generic_ids: set[str] | str | None = None, - manufacturers: Callable | set[str] | str | None = None, - models: Callable | set[str] | str | None = None, - aux_cluster_handlers: Callable | set[str] | str | None = None, - stop_on_match_group: int | str | None = None, - quirk_ids: set[str] | str | None = None, - ) -> Callable[[_ZhaEntityT], _ZhaEntityT]: - """Decorate a loose match rule.""" - - rule = MatchRule( - cluster_handler_names, - generic_ids, - manufacturers, - models, - aux_cluster_handlers, - quirk_ids, - ) - - def decorator(zha_entity: _ZhaEntityT) -> _ZhaEntityT: - """Register a loose match rule. - - All non-empty fields of a match rule must match. - """ - # group the rules by cluster handlers - self._config_diagnostic_entity_registry[component][stop_on_match_group][ - rule - ].append(zha_entity) - return zha_entity - - return decorator - - def group_match[_ZhaGroupEntityT: type[ZhaGroupEntity]]( - self, component: Platform - ) -> Callable[[_ZhaGroupEntityT], _ZhaGroupEntityT]: - """Decorate a group match rule.""" - - def decorator(zha_ent: _ZhaGroupEntityT) -> _ZhaGroupEntityT: - """Register a group match rule.""" - self._group_registry[component] = zha_ent - return zha_ent - - return decorator - - def prevent_entity_creation(self, platform: Platform, ieee: EUI64, key: str): - """Return True if the entity should not be created.""" - platform_restrictions = self.single_device_matches[platform] - device_restrictions = platform_restrictions[ieee] - if key in device_restrictions: - return True - device_restrictions.append(key) - return False - - def clean_up(self) -> None: - """Clean up post discovery.""" - self.single_device_matches = collections.defaultdict( - lambda: collections.defaultdict(list) - ) - - -ZHA_ENTITIES = ZHAEntityRegistry() diff --git a/homeassistant/components/zha/cover.py b/homeassistant/components/zha/cover.py index 718b6fed3a2..0d6be2dbb35 100644 --- a/homeassistant/components/zha/cover.py +++ b/homeassistant/components/zha/cover.py @@ -2,16 +2,17 @@ from __future__ import annotations -import asyncio +from collections.abc import Mapping import functools import logging -from typing import TYPE_CHECKING, Any, cast +from typing import Any -from zigpy.zcl.clusters.closures import WindowCovering as WindowCoveringCluster -from zigpy.zcl.foundation import Status +from zha.application.platforms.cover import Shade as ZhaShade +from zha.application.platforms.cover.const import ( + CoverEntityFeature as ZHACoverEntityFeature, +) from homeassistant.components.cover import ( - ATTR_CURRENT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, CoverDeviceClass, @@ -19,41 +20,22 @@ from homeassistant.components.cover import ( CoverEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) -from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.cluster_handlers.closures import WindowCoveringClusterHandler -from .core.const import ( - CLUSTER_HANDLER_COVER, - CLUSTER_HANDLER_LEVEL, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_SHADE, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, - SIGNAL_SET_LEVEL, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice _LOGGER = logging.getLogger(__name__) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.COVER) - async def async_setup_entry( hass: HomeAssistant, @@ -68,421 +50,143 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, ZhaCover, entities_to_create ), ) config_entry.async_on_unload(unsub) -WCAttrs = WindowCoveringCluster.AttributeDefs -WCT = WindowCoveringCluster.WindowCoveringType -WCCS = WindowCoveringCluster.ConfigStatus - -ZCL_TO_COVER_DEVICE_CLASS = { - WCT.Awning: CoverDeviceClass.AWNING, - WCT.Drapery: CoverDeviceClass.CURTAIN, - WCT.Projector_screen: CoverDeviceClass.SHADE, - WCT.Rollershade: CoverDeviceClass.SHADE, - WCT.Rollershade_two_motors: CoverDeviceClass.SHADE, - WCT.Rollershade_exterior: CoverDeviceClass.SHADE, - WCT.Rollershade_exterior_two_motors: CoverDeviceClass.SHADE, - WCT.Shutter: CoverDeviceClass.SHUTTER, - WCT.Tilt_blind_tilt_only: CoverDeviceClass.BLIND, - WCT.Tilt_blind_tilt_and_lift: CoverDeviceClass.BLIND, -} - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_COVER) -class ZhaCover(ZhaEntity, CoverEntity): +class ZhaCover(ZHAEntity, CoverEntity): """Representation of a ZHA cover.""" - _attr_translation_key: str = "cover" + def __init__(self, entity_data: EntityData) -> None: + """Initialize the ZHA cover.""" + super().__init__(entity_data) - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this cover.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_COVER) - assert cluster_handler - self._cover_cluster_handler: WindowCoveringClusterHandler = cast( - WindowCoveringClusterHandler, cluster_handler - ) - if self._cover_cluster_handler.window_covering_type: - self._attr_device_class: CoverDeviceClass | None = ( - ZCL_TO_COVER_DEVICE_CLASS.get( - self._cover_cluster_handler.window_covering_type - ) + if self.entity_data.entity.info_object.device_class is not None: + self._attr_device_class = CoverDeviceClass( + self.entity_data.entity.info_object.device_class ) - self._attr_supported_features: CoverEntityFeature = ( - self._determine_supported_features() - ) - self._target_lift_position: int | None = None - self._target_tilt_position: int | None = None - self._determine_initial_state() - def _determine_supported_features(self) -> CoverEntityFeature: - """Determine the supported cover features.""" - supported_features: CoverEntityFeature = ( - CoverEntityFeature.OPEN - | CoverEntityFeature.CLOSE - | CoverEntityFeature.STOP - | CoverEntityFeature.SET_POSITION - ) - if ( - self._cover_cluster_handler.window_covering_type - and self._cover_cluster_handler.window_covering_type - in ( - WCT.Shutter, - WCT.Tilt_blind_tilt_only, - WCT.Tilt_blind_tilt_and_lift, - ) - ): - supported_features |= CoverEntityFeature.SET_TILT_POSITION - supported_features |= CoverEntityFeature.OPEN_TILT - supported_features |= CoverEntityFeature.CLOSE_TILT - supported_features |= CoverEntityFeature.STOP_TILT - return supported_features + features = CoverEntityFeature(0) + zha_features: ZHACoverEntityFeature = self.entity_data.entity.supported_features - def _determine_initial_state(self) -> None: - """Determine the initial state of the cover.""" - if ( - self._cover_cluster_handler.window_covering_type - and self._cover_cluster_handler.window_covering_type - in ( - WCT.Shutter, - WCT.Tilt_blind_tilt_only, - WCT.Tilt_blind_tilt_and_lift, - ) - ): - self._determine_state( - self.current_cover_tilt_position, is_lift_update=False - ) - if ( - self._cover_cluster_handler.window_covering_type - == WCT.Tilt_blind_tilt_and_lift - ): - state = self._state - self._determine_state(self.current_cover_position) - if state == STATE_OPEN and self._state == STATE_CLOSED: - # let the tilt state override the lift state - self._state = STATE_OPEN - else: - self._determine_state(self.current_cover_position) + if ZHACoverEntityFeature.OPEN in zha_features: + features |= CoverEntityFeature.OPEN + if ZHACoverEntityFeature.CLOSE in zha_features: + features |= CoverEntityFeature.CLOSE + if ZHACoverEntityFeature.SET_POSITION in zha_features: + features |= CoverEntityFeature.SET_POSITION + if ZHACoverEntityFeature.STOP in zha_features: + features |= CoverEntityFeature.STOP + if ZHACoverEntityFeature.OPEN_TILT in zha_features: + features |= CoverEntityFeature.OPEN_TILT + if ZHACoverEntityFeature.CLOSE_TILT in zha_features: + features |= CoverEntityFeature.CLOSE_TILT + if ZHACoverEntityFeature.STOP_TILT in zha_features: + features |= CoverEntityFeature.STOP_TILT + if ZHACoverEntityFeature.SET_TILT_POSITION in zha_features: + features |= CoverEntityFeature.SET_TILT_POSITION - def _determine_state(self, position_or_tilt, is_lift_update=True) -> None: - """Determine the state of the cover. + self._attr_supported_features = features - In HA None is unknown, 0 is closed, 100 is fully open. - In ZCL 0 is fully open, 100 is fully closed. - Keep in mind the values have already been flipped to match HA - in the WindowCovering cluster handler - """ - if is_lift_update: - target = self._target_lift_position - current = self.current_cover_position - else: - target = self._target_tilt_position - current = self.current_cover_tilt_position - - if position_or_tilt == 100: - self._state = STATE_CLOSED - return - if target is not None and target != current: - # we are mid transition and shouldn't update the state - return - self._state = STATE_OPEN - - async def async_added_to_hass(self) -> None: - """Run when the cover entity is about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cover_cluster_handler, SIGNAL_ATTR_UPDATED, self.zcl_attribute_updated - ) + @property + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return entity specific state attributes.""" + state = self.entity_data.entity.state + return { + "target_lift_position": state.get("target_lift_position"), + "target_tilt_position": state.get("target_tilt_position"), + } @property def is_closed(self) -> bool | None: - """Return True if the cover is closed. - - In HA None is unknown, 0 is closed, 100 is fully open. - In ZCL 0 is fully open, 100 is fully closed. - Keep in mind the values have already been flipped to match HA - in the WindowCovering cluster handler - """ - if self.current_cover_position is None: - return None - return self.current_cover_position == 0 + """Return True if the cover is closed.""" + return self.entity_data.entity.is_closed @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._state == STATE_OPENING + return self.entity_data.entity.is_opening @property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._state == STATE_CLOSING + return self.entity_data.entity.is_closing @property def current_cover_position(self) -> int | None: - """Return the current position of ZHA cover. - - In HA None is unknown, 0 is closed, 100 is fully open. - In ZCL 0 is fully open, 100 is fully closed. - Keep in mind the values have already been flipped to match HA - in the WindowCovering cluster handler - """ - return self._cover_cluster_handler.current_position_lift_percentage + """Return the current position of ZHA cover.""" + return self.entity_data.entity.current_cover_position @property def current_cover_tilt_position(self) -> int | None: """Return the current tilt position of the cover.""" - return self._cover_cluster_handler.current_position_tilt_percentage - - @callback - def zcl_attribute_updated(self, attr_id, attr_name, value): - """Handle position update from cluster handler.""" - if attr_id in ( - WCAttrs.current_position_lift_percentage.id, - WCAttrs.current_position_tilt_percentage.id, - ): - value = ( - self.current_cover_position - if attr_id == WCAttrs.current_position_lift_percentage.id - else self.current_cover_tilt_position - ) - self._determine_state( - value, - is_lift_update=attr_id == WCAttrs.current_position_lift_percentage.id, - ) - self.async_write_ha_state() - - @callback - def async_update_state(self, state): - """Handle state update from HA operations below.""" - _LOGGER.debug("async_update_state=%s", state) - self._state = state - self.async_write_ha_state() + return self.entity_data.entity.current_cover_tilt_position + @convert_zha_error_to_ha_error async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" - res = await self._cover_cluster_handler.up_open() - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to open cover: {res[1]}") - self.async_update_state(STATE_OPENING) + await self.entity_data.entity.async_open_cover() + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_open_cover_tilt(self, **kwargs: Any) -> None: """Open the cover tilt.""" - # 0 is open in ZCL - res = await self._cover_cluster_handler.go_to_tilt_percentage(0) - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to open cover tilt: {res[1]}") - self.async_update_state(STATE_OPENING) + await self.entity_data.entity.async_open_cover_tilt() + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" - res = await self._cover_cluster_handler.down_close() - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to close cover: {res[1]}") - self.async_update_state(STATE_CLOSING) + await self.entity_data.entity.async_close_cover() + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_close_cover_tilt(self, **kwargs: Any) -> None: """Close the cover tilt.""" - # 100 is closed in ZCL - res = await self._cover_cluster_handler.go_to_tilt_percentage(100) - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to close cover tilt: {res[1]}") - self.async_update_state(STATE_CLOSING) + await self.entity_data.entity.async_close_cover_tilt() + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" - self._target_lift_position = kwargs[ATTR_POSITION] - assert self._target_lift_position is not None - assert self.current_cover_position is not None - # the 100 - value is because we need to invert the value before giving it to ZCL - res = await self._cover_cluster_handler.go_to_lift_percentage( - 100 - self._target_lift_position - ) - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to set cover position: {res[1]}") - self.async_update_state( - STATE_CLOSING - if self._target_lift_position < self.current_cover_position - else STATE_OPENING + await self.entity_data.entity.async_set_cover_position( + position=kwargs.get(ATTR_POSITION) ) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: """Move the cover tilt to a specific position.""" - self._target_tilt_position = kwargs[ATTR_TILT_POSITION] - assert self._target_tilt_position is not None - assert self.current_cover_tilt_position is not None - # the 100 - value is because we need to invert the value before giving it to ZCL - res = await self._cover_cluster_handler.go_to_tilt_percentage( - 100 - self._target_tilt_position + await self.entity_data.entity.async_set_cover_tilt_position( + tilt_position=kwargs.get(ATTR_TILT_POSITION) ) - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to set cover tilt position: {res[1]}") - self.async_update_state( - STATE_CLOSING - if self._target_tilt_position < self.current_cover_tilt_position - else STATE_OPENING - ) - - async def async_stop_cover(self, **kwargs: Any) -> None: - """Stop the cover.""" - res = await self._cover_cluster_handler.stop() - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to stop cover: {res[1]}") - self._target_lift_position = self.current_cover_position - self._determine_state(self.current_cover_position) self.async_write_ha_state() + @convert_zha_error_to_ha_error + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self.entity_data.entity.async_stop_cover() + self.async_write_ha_state() + + @convert_zha_error_to_ha_error async def async_stop_cover_tilt(self, **kwargs: Any) -> None: """Stop the cover tilt.""" - res = await self._cover_cluster_handler.stop() - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to stop cover: {res[1]}") - self._target_tilt_position = self.current_cover_tilt_position - self._determine_state(self.current_cover_tilt_position, is_lift_update=False) - self.async_write_ha_state() - - -@MULTI_MATCH( - cluster_handler_names={ - CLUSTER_HANDLER_LEVEL, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_SHADE, - } -) -class Shade(ZhaEntity, CoverEntity): - """ZHA Shade.""" - - _attr_device_class = CoverDeviceClass.SHADE - _attr_translation_key: str = "shade" - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs, - ) -> None: - """Initialize the ZHA light.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._on_off_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_ON_OFF] - self._level_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_LEVEL] - self._position: int | None = None - self._is_open: bool | None = None - - @property - def current_cover_position(self) -> int | None: - """Return current position of cover. - - None is unknown, 0 is closed, 100 is fully open. - """ - return self._position - - @property - def is_closed(self) -> bool | None: - """Return True if shade is closed.""" - if self._is_open is None: - return None - return not self._is_open - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._on_off_cluster_handler, - SIGNAL_ATTR_UPDATED, - self.async_set_open_closed, - ) - self.async_accept_signal( - self._level_cluster_handler, SIGNAL_SET_LEVEL, self.async_set_level - ) - - @callback - def async_restore_last_state(self, last_state): - """Restore previous state.""" - self._is_open = last_state.state == STATE_OPEN - if ATTR_CURRENT_POSITION in last_state.attributes: - self._position = last_state.attributes[ATTR_CURRENT_POSITION] - - @callback - def async_set_open_closed(self, attr_id: int, attr_name: str, value: bool) -> None: - """Set open/closed state.""" - self._is_open = bool(value) + await self.entity_data.entity.async_stop_cover_tilt() self.async_write_ha_state() @callback - def async_set_level(self, value: int) -> None: - """Set the reported position.""" - value = max(0, min(255, value)) - self._position = int(value * 100 / 255) - self.async_write_ha_state() + def restore_external_state_attributes(self, state: State) -> None: + """Restore entity state.""" - async def async_open_cover(self, **kwargs: Any) -> None: - """Open the window cover.""" - res = await self._on_off_cluster_handler.on() - if res[1] != Status.SUCCESS: - raise HomeAssistantError(f"Failed to open cover: {res[1]}") + # Shades are a subtype of cover that do not need external state restored + if isinstance(self.entity_data.entity, ZhaShade): + return - self._is_open = True - self.async_write_ha_state() - - async def async_close_cover(self, **kwargs: Any) -> None: - """Close the window cover.""" - res = await self._on_off_cluster_handler.off() - if res[1] != Status.SUCCESS: - raise HomeAssistantError(f"Failed to close cover: {res[1]}") - - self._is_open = False - self.async_write_ha_state() - - async def async_set_cover_position(self, **kwargs: Any) -> None: - """Move the roller shutter to a specific position.""" - new_pos = kwargs[ATTR_POSITION] - res = await self._level_cluster_handler.move_to_level_with_on_off( - new_pos * 255 / 100, 1 + # Same as `light`, some entity state is not derived from ZCL attributes + self.entity_data.entity.restore_external_state_attributes( + state=state.state, + target_lift_position=state.attributes.get("target_lift_position"), + target_tilt_position=state.attributes.get("target_tilt_position"), ) - - if res[1] != Status.SUCCESS: - raise HomeAssistantError(f"Failed to set cover position: {res[1]}") - - self._position = new_pos - self.async_write_ha_state() - - async def async_stop_cover(self, **kwargs: Any) -> None: - """Stop the cover.""" - res = await self._level_cluster_handler.stop() - if res[1] != Status.SUCCESS: - raise HomeAssistantError(f"Failed to stop cover: {res[1]}") - - -@MULTI_MATCH( - cluster_handler_names={CLUSTER_HANDLER_LEVEL, CLUSTER_HANDLER_ON_OFF}, - manufacturers="Keen Home Inc", -) -class KeenVent(Shade): - """Keen vent cover.""" - - _attr_device_class = CoverDeviceClass.DAMPER - _attr_translation_key: str = "keen_vent" - - async def async_open_cover(self, **kwargs: Any) -> None: - """Open the cover.""" - position = self._position or 100 - await asyncio.gather( - self._level_cluster_handler.move_to_level_with_on_off( - position * 255 / 100, 1 - ), - self._on_off_cluster_handler.on(), - ) - - self._is_open = True - self._position = position - self.async_write_ha_state() diff --git a/homeassistant/components/zha/device_action.py b/homeassistant/components/zha/device_action.py index 8f5a03a7fe5..b4b40880734 100644 --- a/homeassistant/components/zha/device_action.py +++ b/homeassistant/components/zha/device_action.py @@ -5,20 +5,25 @@ from __future__ import annotations from typing import Any import voluptuous as vol +from zha.exceptions import ZHAException +from zha.zigbee.cluster_handlers.const import ( + CLUSTER_HANDLER_IAS_WD, + CLUSTER_HANDLER_INOVELLI, +) +from zha.zigbee.cluster_handlers.manufacturerspecific import ( + AllLEDEffectType, + SingleLEDEffectType, +) from homeassistant.components.device_automation import InvalidDeviceAutomationConfig from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_TYPE from homeassistant.core import Context, HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from . import DOMAIN -from .core.cluster_handlers.manufacturerspecific import ( - AllLEDEffectType, - SingleLEDEffectType, -) -from .core.const import CLUSTER_HANDLER_IAS_WD, CLUSTER_HANDLER_INOVELLI -from .core.helpers import async_get_zha_device +from .const import DOMAIN +from .helpers import async_get_zha_device_proxy from .websocket_api import SERVICE_WARNING_DEVICE_SQUAWK, SERVICE_WARNING_DEVICE_WARN # mypy: disallow-any-generics @@ -144,7 +149,7 @@ async def async_get_actions( ) -> list[dict[str, str]]: """List device actions.""" try: - zha_device = async_get_zha_device(hass, device_id) + zha_device = async_get_zha_device_proxy(hass, device_id).device except (KeyError, AttributeError): return [] cluster_handlers = [ @@ -167,8 +172,9 @@ async def async_get_action_capabilities( hass: HomeAssistant, config: ConfigType ) -> dict[str, vol.Schema]: """List action capabilities.""" - - return {"extra_fields": DEVICE_ACTION_SCHEMAS.get(config[CONF_TYPE], {})} + if (fields := DEVICE_ACTION_SCHEMAS.get(config[CONF_TYPE])) is None: + return {} + return {"extra_fields": fields} async def _execute_service_based_action( @@ -180,7 +186,7 @@ async def _execute_service_based_action( action_type = config[CONF_TYPE] service_name = SERVICE_NAMES[action_type] try: - zha_device = async_get_zha_device(hass, config[CONF_DEVICE_ID]) + zha_device = async_get_zha_device_proxy(hass, config[CONF_DEVICE_ID]).device except (KeyError, AttributeError): return @@ -200,7 +206,7 @@ async def _execute_cluster_handler_command_based_action( action_type = config[CONF_TYPE] cluster_handler_name = CLUSTER_HANDLER_MAPPINGS[action_type] try: - zha_device = async_get_zha_device(hass, config[CONF_DEVICE_ID]) + zha_device = async_get_zha_device_proxy(hass, config[CONF_DEVICE_ID]).device except (KeyError, AttributeError): return @@ -223,7 +229,10 @@ async def _execute_cluster_handler_command_based_action( f" {action_type}" ) - await getattr(action_cluster_handler, action_type)(**config) + try: + await getattr(action_cluster_handler, action_type)(**config) + except ZHAException as err: + raise HomeAssistantError(err) from err ZHA_ACTION_TYPES = { diff --git a/homeassistant/components/zha/device_tracker.py b/homeassistant/components/zha/device_tracker.py index 9c96fd0e346..247219777f4 100644 --- a/homeassistant/components/zha/device_tracker.py +++ b/homeassistant/components/zha/device_tracker.py @@ -3,28 +3,21 @@ from __future__ import annotations import functools -import time from homeassistant.components.device_tracker import ScannerEntity, SourceType from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_POWER_CONFIGURATION, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + async_add_entities as zha_async_add_entities, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity -from .sensor import Battery - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.DEVICE_TRACKER) async def async_setup_entry( @@ -40,92 +33,48 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, + async_add_entities, + ZHADeviceScannerEntity, + entities_to_create, ), ) config_entry.async_on_unload(unsub) -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_POWER_CONFIGURATION) -class ZHADeviceScannerEntity(ScannerEntity, ZhaEntity): +class ZHADeviceScannerEntity(ScannerEntity, ZHAEntity): """Represent a tracked device.""" _attr_should_poll = True # BaseZhaEntity defaults to False _attr_name: str = "Device scanner" - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize the ZHA device tracker.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._battery_cluster_handler = self.cluster_handlers.get( - CLUSTER_HANDLER_POWER_CONFIGURATION - ) - self._connected = False - self._keepalive_interval = 60 - self._battery_level = None - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - if self._battery_cluster_handler: - self.async_accept_signal( - self._battery_cluster_handler, - SIGNAL_ATTR_UPDATED, - self.async_battery_percentage_remaining_updated, - ) - - async def async_update(self) -> None: - """Handle polling.""" - if self.zha_device.last_seen is None: - self._connected = False - else: - difference = time.time() - self.zha_device.last_seen - if difference > self._keepalive_interval: - self._connected = False - else: - self._connected = True - @property - def is_connected(self): + def is_connected(self) -> bool: """Return true if the device is connected to the network.""" - return self._connected + return self.entity_data.entity.is_connected @property def source_type(self) -> SourceType: """Return the source type, eg gps or router, of the device.""" return SourceType.ROUTER - @callback - def async_battery_percentage_remaining_updated(self, attr_id, attr_name, value): - """Handle tracking.""" - if attr_name != "battery_percentage_remaining": - return - self.debug("battery_percentage_remaining updated: %s", value) - self._connected = True - self._battery_level = Battery.formatter(value) - self.async_write_ha_state() - @property - def battery_level(self): + def battery_level(self) -> int | None: """Return the battery level of the device. Percentage from 0-100. """ - return self._battery_level + return self.entity_data.entity.battery_level - @property # type: ignore[misc] - def device_info( - self, - ) -> DeviceInfo: + @property # type: ignore[explicit-override, misc] + def device_info(self) -> DeviceInfo: """Return device info.""" # We opt ZHA device tracker back into overriding this method because # it doesn't track IP-based devices. - # Call Super because ScannerEntity overrode it. - # mypy doesn't know about fget: https://github.com/python/mypy/issues/6185 - return ZhaEntity.device_info.fget(self) # type: ignore[attr-defined] + return ZHAEntity.device_info.__get__(self) @property def unique_id(self) -> str: """Return unique ID.""" # Call Super because ScannerEntity overrode it. - # mypy doesn't know about fget: https://github.com/python/mypy/issues/6185 - return ZhaEntity.unique_id.fget(self) # type: ignore[attr-defined] + return ZHAEntity.unique_id.__get__(self) diff --git a/homeassistant/components/zha/device_trigger.py b/homeassistant/components/zha/device_trigger.py index a2ae734b8fc..a134d2aa59b 100644 --- a/homeassistant/components/zha/device_trigger.py +++ b/homeassistant/components/zha/device_trigger.py @@ -1,6 +1,7 @@ """Provides device automations for ZHA devices that emit events.""" import voluptuous as vol +from zha.application.const import ZHA_EVENT from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA from homeassistant.components.device_automation.exceptions import ( @@ -13,9 +14,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN as ZHA_DOMAIN -from .core.const import ZHA_EVENT -from .core.helpers import async_get_zha_device, get_zha_data +from .const import DOMAIN as ZHA_DOMAIN +from .helpers import async_get_zha_device_proxy, get_zha_data CONF_SUBTYPE = "subtype" DEVICE = "device" @@ -31,7 +31,7 @@ def _get_device_trigger_data(hass: HomeAssistant, device_id: str) -> tuple[str, # First, try checking to see if the device itself is accessible try: - zha_device = async_get_zha_device(hass, device_id) + zha_device = async_get_zha_device_proxy(hass, device_id).device except ValueError: pass else: diff --git a/homeassistant/components/zha/diagnostics.py b/homeassistant/components/zha/diagnostics.py index fff816777c0..f276630dfee 100644 --- a/homeassistant/components/zha/diagnostics.py +++ b/homeassistant/components/zha/diagnostics.py @@ -6,6 +6,18 @@ import dataclasses from importlib.metadata import version from typing import Any +from zha.application.const import ( + ATTR_ATTRIBUTE, + ATTR_DEVICE_TYPE, + ATTR_IEEE, + ATTR_IN_CLUSTERS, + ATTR_OUT_CLUSTERS, + ATTR_PROFILE_ID, + ATTR_VALUE, + UNKNOWN, +) +from zha.application.gateway import Gateway +from zha.zigbee.device import Device from zigpy.config import CONF_NWK_EXTENDED_PAN_ID from zigpy.profiles import PROFILES from zigpy.types import Channels @@ -17,20 +29,13 @@ from homeassistant.const import CONF_ID, CONF_NAME, CONF_UNIQUE_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .core.const import ( - ATTR_ATTRIBUTE_NAME, - ATTR_DEVICE_TYPE, - ATTR_IEEE, - ATTR_IN_CLUSTERS, - ATTR_OUT_CLUSTERS, - ATTR_PROFILE_ID, - ATTR_VALUE, - CONF_ALARM_MASTER_CODE, - UNKNOWN, +from .const import CONF_ALARM_MASTER_CODE +from .helpers import ( + ZHADeviceProxy, + async_get_zha_device_proxy, + get_zha_data, + get_zha_gateway, ) -from .core.device import ZHADevice -from .core.gateway import ZHAGateway -from .core.helpers import async_get_zha_device, get_zha_data, get_zha_gateway KEYS_TO_REDACT = { ATTR_IEEE, @@ -65,7 +70,7 @@ async def async_get_config_entry_diagnostics( ) -> dict[str, Any]: """Return diagnostics for a config entry.""" zha_data = get_zha_data(hass) - gateway: ZHAGateway = get_zha_gateway(hass) + gateway: Gateway = get_zha_gateway(hass) app = gateway.application_controller energy_scan = await app.energy_scan( @@ -88,6 +93,7 @@ async def async_get_config_entry_diagnostics( "zigpy_znp": version("zigpy_znp"), "zigpy_zigate": version("zigpy-zigate"), "zhaquirks": version("zha-quirks"), + "zha": version("zha"), }, "devices": [ { @@ -106,13 +112,15 @@ async def async_get_device_diagnostics( hass: HomeAssistant, config_entry: ConfigEntry, device: dr.DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device.""" - zha_device: ZHADevice = async_get_zha_device(hass, device.id) - device_info: dict[str, Any] = zha_device.zha_device_info - device_info[CLUSTER_DETAILS] = get_endpoint_cluster_attr_data(zha_device) + zha_device_proxy: ZHADeviceProxy = async_get_zha_device_proxy(hass, device.id) + device_info: dict[str, Any] = zha_device_proxy.zha_device_info + device_info[CLUSTER_DETAILS] = get_endpoint_cluster_attr_data( + zha_device_proxy.device + ) return async_redact_data(device_info, KEYS_TO_REDACT) -def get_endpoint_cluster_attr_data(zha_device: ZHADevice) -> dict: +def get_endpoint_cluster_attr_data(zha_device: Device) -> dict: """Return endpoint cluster attribute data.""" cluster_details = {} for ep_id, endpoint in zha_device.device.endpoints.items(): @@ -150,27 +158,15 @@ def get_endpoint_cluster_attr_data(zha_device: ZHADevice) -> dict: def get_cluster_attr_data(cluster: Cluster) -> dict: """Return cluster attribute data.""" - unsupported_attributes = {} - for u_attr in cluster.unsupported_attributes: - try: - u_attr_def = cluster.find_attribute(u_attr) - unsupported_attributes[f"0x{u_attr_def.id:04x}"] = { - ATTR_ATTRIBUTE_NAME: u_attr_def.name - } - except KeyError: - if isinstance(u_attr, int): - unsupported_attributes[f"0x{u_attr:04x}"] = {} - else: - unsupported_attributes[u_attr] = {} - return { ATTRIBUTES: { f"0x{attr_id:04x}": { - ATTR_ATTRIBUTE_NAME: attr_def.name, - ATTR_VALUE: attr_value, + ATTR_ATTRIBUTE: repr(attr_def), + ATTR_VALUE: cluster.get(attr_def.name), } for attr_id, attr_def in cluster.attributes.items() - if (attr_value := cluster.get(attr_def.name)) is not None }, - UNSUPPORTED_ATTRIBUTES: unsupported_attributes, + UNSUPPORTED_ATTRIBUTES: sorted( + cluster.unsupported_attributes, key=lambda v: (isinstance(v, str), v) + ), } diff --git a/homeassistant/components/zha/entity.py b/homeassistant/components/zha/entity.py index f10e377dc46..348e545f1c4 100644 --- a/homeassistant/components/zha/entity.py +++ b/homeassistant/components/zha/entity.py @@ -6,84 +6,70 @@ import asyncio from collections.abc import Callable import functools import logging -from typing import TYPE_CHECKING, Any, Self +from typing import Any -from zigpy.quirks.v2 import EntityMetadata, EntityType +from zha.mixins import LogMixin -from homeassistant.const import ATTR_NAME, EntityCategory -from homeassistant.core import CALLBACK_TYPE, Event, EventStateChangedData, callback -from homeassistant.helpers import entity -from homeassistant.helpers.debounce import Debouncer +from homeassistant.const import ATTR_MANUFACTURER, ATTR_MODEL, ATTR_NAME, EntityCategory +from homeassistant.core import State, callback from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE, DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.event import async_track_state_change_event +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity from homeassistant.helpers.restore_state import RestoreEntity -from .core.const import ( - ATTR_MANUFACTURER, - ATTR_MODEL, - DOMAIN, - SIGNAL_GROUP_ENTITY_REMOVED, - SIGNAL_GROUP_MEMBERSHIP_CHANGE, - SIGNAL_REMOVE, -) -from .core.helpers import LogMixin, get_zha_gateway - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice +from .const import DOMAIN +from .helpers import SIGNAL_REMOVE_ENTITIES, EntityData, convert_zha_error_to_ha_error _LOGGER = logging.getLogger(__name__) -ENTITY_SUFFIX = "entity_suffix" -DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY = 0.5 - -class BaseZhaEntity(LogMixin, entity.Entity): - """A base class for ZHA entities.""" - - _unique_id_suffix: str | None = None - """suffix to add to the unique_id of the entity. Used for multi - entities using the same cluster handler/cluster id for the entity.""" +class ZHAEntity(LogMixin, RestoreEntity, Entity): + """ZHA eitity.""" _attr_has_entity_name = True _attr_should_poll = False + remove_future: asyncio.Future[Any] - def __init__(self, unique_id: str, zha_device: ZHADevice, **kwargs: Any) -> None: + def __init__(self, entity_data: EntityData, *args, **kwargs) -> None: """Init ZHA entity.""" - self._unique_id: str = unique_id - if self._unique_id_suffix: - self._unique_id += f"-{self._unique_id_suffix}" - self._state: Any = None - self._extra_state_attributes: dict[str, Any] = {} - self._zha_device = zha_device + super().__init__(*args, **kwargs) + self.entity_data: EntityData = entity_data self._unsubs: list[Callable[[], None]] = [] - @property - def unique_id(self) -> str: - """Return a unique ID.""" - return self._unique_id + if self.entity_data.entity.icon is not None: + # Only custom quirks will realistically set an icon + self._attr_icon = self.entity_data.entity.icon + + meta = self.entity_data.entity.info_object + self._attr_unique_id = meta.unique_id + + if meta.translation_key is not None: + self._attr_translation_key = meta.translation_key + elif meta.fallback_name is not None: + # Only custom quirks will create entities with just a fallback name! + # + # This is to allow local development and to register niche devices, since + # their translation_key will probably never be added to `zha/strings.json`. + self._attr_name = meta.fallback_name + + if meta.entity_category is not None: + self._attr_entity_category = EntityCategory(meta.entity_category) + + self._attr_entity_registry_enabled_default = ( + meta.entity_registry_enabled_default + ) @property - def zha_device(self) -> ZHADevice: - """Return the ZHA device this entity is attached to.""" - return self._zha_device - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return device specific state attributes.""" - return self._extra_state_attributes + def available(self) -> bool: + """Return entity availability.""" + return self.entity_data.entity.available @property def device_info(self) -> DeviceInfo: """Return a device description for device registry.""" - zha_device_info = self._zha_device.device_info + zha_device_info = self.entity_data.device_proxy.device_info ieee = zha_device_info["ieee"] - - zha_gateway = get_zha_gateway(self.hass) + zha_gateway = self.entity_data.device_proxy.gateway_proxy.gateway return DeviceInfo( connections={(CONNECTION_ZIGBEE, ieee)}, @@ -95,265 +81,67 @@ class BaseZhaEntity(LogMixin, entity.Entity): ) @callback - def async_state_changed(self) -> None: + def _handle_entity_events(self, event: Any) -> None: """Entity state changed.""" + self.debug("Handling event from entity: %s", event) self.async_write_ha_state() - @callback - def async_update_state_attribute(self, key: str, value: Any) -> None: - """Update a single device state attribute.""" - self._extra_state_attributes.update({key: value}) - self.async_write_ha_state() + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + self.remove_future = self.hass.loop.create_future() + self._unsubs.append( + self.entity_data.entity.on_all_events(self._handle_entity_events) + ) + remove_signal = ( + f"{SIGNAL_REMOVE_ENTITIES}_group_{self.entity_data.group_proxy.group.group_id}" + if self.entity_data.is_group_entity + and self.entity_data.group_proxy is not None + else f"{SIGNAL_REMOVE_ENTITIES}_{self.entity_data.device_proxy.device.ieee}" + ) + self._unsubs.append( + async_dispatcher_connect( + self.hass, + remove_signal, + functools.partial(self.async_remove, force_remove=True), + ) + ) + self.entity_data.device_proxy.gateway_proxy.register_entity_reference( + self.entity_id, + self.entity_data, + self.device_info, + self.remove_future, + ) + + if (state := await self.async_get_last_state()) is None: + return + + self.restore_external_state_attributes(state) @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any) -> None: - """Set the entity state.""" + def restore_external_state_attributes(self, state: State) -> None: + """Restore ephemeral external state from Home Assistant back into ZHA.""" + + # Some operations rely on extra state that is not maintained in the ZCL + # attribute cache. Until ZHA is able to maintain its own persistent state (or + # provides a more generic hook to utilize HA to do this), we directly restore + # them. async def async_will_remove_from_hass(self) -> None: """Disconnect entity object when removed.""" for unsub in self._unsubs[:]: unsub() self._unsubs.remove(unsub) + await super().async_will_remove_from_hass() + self.remove_future.set_result(True) - @callback - def async_accept_signal( - self, - cluster_handler: ClusterHandler | None, - signal: str, - func: Callable[..., Any], - signal_override=False, - ): - """Accept a signal from a cluster handler.""" - unsub = None - if signal_override: - unsub = async_dispatcher_connect(self.hass, signal, func) - else: - assert cluster_handler - unsub = async_dispatcher_connect( - self.hass, f"{cluster_handler.unique_id}_{signal}", func - ) - self._unsubs.append(unsub) + @convert_zha_error_to_ha_error + async def async_update(self) -> None: + """Update the entity.""" + await self.entity_data.entity.async_update() + self.async_write_ha_state() def log(self, level: int, msg: str, *args, **kwargs): """Log a message.""" msg = f"%s: {msg}" args = (self.entity_id, *args) _LOGGER.log(level, msg, *args, **kwargs) - - -class ZhaEntity(BaseZhaEntity, RestoreEntity): - """A base class for non group ZHA entities.""" - - remove_future: asyncio.Future[Any] - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init ZHA entity.""" - super().__init__(unique_id, zha_device, **kwargs) - - self.cluster_handlers: dict[str, ClusterHandler] = {} - for cluster_handler in cluster_handlers: - self.cluster_handlers[cluster_handler.name] = cluster_handler - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None: - """Init this entity from the quirks metadata.""" - if entity_metadata.initially_disabled: - self._attr_entity_registry_enabled_default = False - - has_device_class = hasattr(entity_metadata, "device_class") - has_attribute_name = hasattr(entity_metadata, "attribute_name") - has_command_name = hasattr(entity_metadata, "command_name") - if not has_device_class or ( - has_device_class and entity_metadata.device_class is None - ): - if entity_metadata.translation_key: - self._attr_translation_key = entity_metadata.translation_key - elif has_attribute_name: - self._attr_translation_key = entity_metadata.attribute_name - elif has_command_name: - self._attr_translation_key = entity_metadata.command_name - if has_attribute_name: - self._unique_id_suffix = entity_metadata.attribute_name - elif has_command_name: - self._unique_id_suffix = entity_metadata.command_name - if entity_metadata.entity_type is EntityType.CONFIG: - self._attr_entity_category = EntityCategory.CONFIG - elif entity_metadata.entity_type is EntityType.DIAGNOSTIC: - self._attr_entity_category = EntityCategory.DIAGNOSTIC - else: - self._attr_entity_category = None - - @property - def available(self) -> bool: - """Return entity availability.""" - return self._zha_device.available - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - self.remove_future = self.hass.loop.create_future() - self.async_accept_signal( - None, - f"{SIGNAL_REMOVE}_{self.zha_device.ieee}", - functools.partial(self.async_remove, force_remove=True), - signal_override=True, - ) - - if last_state := await self.async_get_last_state(): - self.async_restore_last_state(last_state) - - self.async_accept_signal( - None, - f"{self.zha_device.available_signal}_entity", - self.async_state_changed, - signal_override=True, - ) - self._zha_device.gateway.register_entity_reference( - self._zha_device.ieee, - self.entity_id, - self._zha_device, - self.cluster_handlers, - self.device_info, - self.remove_future, - ) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - await super().async_will_remove_from_hass() - self.zha_device.gateway.remove_entity_reference(self) - self.remove_future.set_result(True) - - @callback - def async_restore_last_state(self, last_state) -> None: - """Restore previous state.""" - - async def async_update(self) -> None: - """Retrieve latest state.""" - tasks = [ - cluster_handler.async_update() - for cluster_handler in self.cluster_handlers.values() - if hasattr(cluster_handler, "async_update") - ] - if tasks: - await asyncio.gather(*tasks) - - -class ZhaGroupEntity(BaseZhaEntity): - """A base class for ZHA group entities.""" - - # The group name is set in the initializer - _attr_name: str - - def __init__( - self, - entity_ids: list[str], - unique_id: str, - group_id: int, - zha_device: ZHADevice, - **kwargs: Any, - ) -> None: - """Initialize a ZHA group.""" - super().__init__(unique_id, zha_device, **kwargs) - self._available = False - self._group = zha_device.gateway.groups.get(group_id) - self._group_id: int = group_id - self._entity_ids: list[str] = entity_ids - self._async_unsub_state_changed: CALLBACK_TYPE | None = None - self._handled_group_membership = False - self._change_listener_debouncer: Debouncer | None = None - self._update_group_from_child_delay = DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY - - self._attr_name = self._group.name - - @property - def available(self) -> bool: - """Return entity availability.""" - return self._available - - @classmethod - def create_entity( - cls, - entity_ids: list[str], - unique_id: str, - group_id: int, - zha_device: ZHADevice, - **kwargs: Any, - ) -> Self | None: - """Group Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - return cls(entity_ids, unique_id, group_id, zha_device, **kwargs) - - async def _handle_group_membership_changed(self): - """Handle group membership changed.""" - # Make sure we don't call remove twice as members are removed - if self._handled_group_membership: - return - - self._handled_group_membership = True - await self.async_remove(force_remove=True) - if len(self._group.members) >= 2: - async_dispatcher_send( - self.hass, SIGNAL_GROUP_ENTITY_REMOVED, self._group_id - ) - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - await super().async_added_to_hass() - await self.async_update() - - self.async_accept_signal( - None, - f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{self._group_id:04x}", - self._handle_group_membership_changed, - signal_override=True, - ) - - if self._change_listener_debouncer is None: - self._change_listener_debouncer = Debouncer( - self.hass, - _LOGGER, - cooldown=self._update_group_from_child_delay, - immediate=False, - function=functools.partial(self.async_update_ha_state, True), - ) - self.async_on_remove(self._change_listener_debouncer.async_cancel) - self._async_unsub_state_changed = async_track_state_change_event( - self.hass, self._entity_ids, self.async_state_changed_listener - ) - - @callback - def async_state_changed_listener(self, event: Event[EventStateChangedData]) -> None: - """Handle child updates.""" - # Delay to ensure that we get updates from all members before updating the group - assert self._change_listener_debouncer - self._change_listener_debouncer.async_schedule_call() - - async def async_will_remove_from_hass(self) -> None: - """Handle removal from Home Assistant.""" - await super().async_will_remove_from_hass() - if self._async_unsub_state_changed is not None: - self._async_unsub_state_changed() - self._async_unsub_state_changed = None - - async def async_update(self) -> None: - """Update the state of the group entity.""" diff --git a/homeassistant/components/zha/fan.py b/homeassistant/components/zha/fan.py index 3677befb76e..767c0d4cfb7 100644 --- a/homeassistant/components/zha/fan.py +++ b/homeassistant/components/zha/fan.py @@ -2,54 +2,26 @@ from __future__ import annotations -from abc import abstractmethod import functools -import math from typing import Any -from zigpy.zcl.clusters import hvac +from zha.application.platforms.fan.const import FanEntityFeature as ZHAFanEntityFeature -from homeassistant.components.fan import ( - ATTR_PERCENTAGE, - ATTR_PRESET_MODE, - FanEntity, - FanEntityFeature, -) +from homeassistant.components.fan import FanEntity, FanEntityFeature from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant, State, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.percentage import ( - percentage_to_ranged_value, - ranged_value_to_percentage, + +from .entity import ZHAEntity +from .helpers import ( + SIGNAL_ADD_ENTITIES, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from homeassistant.util.scaling import int_states_in_range - -from .core import discovery -from .core.cluster_handlers import wrap_zigpy_exceptions -from .core.const import CLUSTER_HANDLER_FAN, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity, ZhaGroupEntity - -# Additional speeds in zigbee's ZCL -# Spec is unclear as to what this value means. On King Of Fans HBUniversal -# receiver, this means Very High. -PRESET_MODE_ON = "on" -# The fan speed is self-regulated -PRESET_MODE_AUTO = "auto" -# When the heated/cooled space is occupied, the fan is always on -PRESET_MODE_SMART = "smart" - -SPEED_RANGE = (1, 3) # off is not included -PRESET_MODES_TO_NAME = {4: PRESET_MODE_ON, 5: PRESET_MODE_AUTO, 6: PRESET_MODE_SMART} - -DEFAULT_ON_PERCENTAGE = 50 - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.FAN) -GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, Platform.FAN) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.FAN) async def async_setup_entry( @@ -65,50 +37,65 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, ZhaFan, entities_to_create ), ) config_entry.async_on_unload(unsub) -class BaseFan(FanEntity): - """Base representation of a ZHA fan.""" +class ZhaFan(FanEntity, ZHAEntity): + """Representation of a ZHA fan.""" - _attr_supported_features = FanEntityFeature.SET_SPEED _attr_translation_key: str = "fan" + _enable_turn_on_off_backwards_compatibility = False + + def __init__(self, entity_data: EntityData) -> None: + """Initialize the ZHA fan.""" + super().__init__(entity_data) + features = FanEntityFeature(0) + zha_features: ZHAFanEntityFeature = self.entity_data.entity.supported_features + + if ZHAFanEntityFeature.DIRECTION in zha_features: + features |= FanEntityFeature.DIRECTION + if ZHAFanEntityFeature.OSCILLATE in zha_features: + features |= FanEntityFeature.OSCILLATE + if ZHAFanEntityFeature.PRESET_MODE in zha_features: + features |= FanEntityFeature.PRESET_MODE + if ZHAFanEntityFeature.SET_SPEED in zha_features: + features |= FanEntityFeature.SET_SPEED + if ZHAFanEntityFeature.TURN_ON in zha_features: + features |= FanEntityFeature.TURN_ON + if ZHAFanEntityFeature.TURN_OFF in zha_features: + features |= FanEntityFeature.TURN_OFF + + self._attr_supported_features = features + + @property + def preset_mode(self) -> str | None: + """Return the current preset mode.""" + return self.entity_data.entity.preset_mode @property def preset_modes(self) -> list[str]: """Return the available preset modes.""" - return list(self.preset_modes_to_name.values()) - - @property - def preset_modes_to_name(self) -> dict[int, str]: - """Return a dict from preset mode to name.""" - return PRESET_MODES_TO_NAME - - @property - def preset_name_to_mode(self) -> dict[str, int]: - """Return a dict from preset name to mode.""" - return {v: k for k, v in self.preset_modes_to_name.items()} + return self.entity_data.entity.preset_modes @property def default_on_percentage(self) -> int: """Return the default on percentage.""" - return DEFAULT_ON_PERCENTAGE + return self.entity_data.entity.default_on_percentage @property def speed_range(self) -> tuple[int, int]: """Return the range of speeds the fan supports. Off is not included.""" - return SPEED_RANGE + return self.entity_data.entity.speed_range @property def speed_count(self) -> int: """Return the number of speeds the fan supports.""" - return int_states_in_range(self.speed_range) + return self.entity_data.entity.speed_count + @convert_zha_error_to_ha_error async def async_turn_on( self, percentage: int | None = None, @@ -116,201 +103,30 @@ class BaseFan(FanEntity): **kwargs: Any, ) -> None: """Turn the entity on.""" - if percentage is None: - percentage = self.default_on_percentage - await self.async_set_percentage(percentage) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - await self.async_set_percentage(0) - - async def async_set_percentage(self, percentage: int) -> None: - """Set the speed percentage of the fan.""" - fan_mode = math.ceil(percentage_to_ranged_value(self.speed_range, percentage)) - await self._async_set_fan_mode(fan_mode) - - async def async_set_preset_mode(self, preset_mode: str) -> None: - """Set the preset mode for the fan.""" - await self._async_set_fan_mode(self.preset_name_to_mode[preset_mode]) - - @abstractmethod - async def _async_set_fan_mode(self, fan_mode: int) -> None: - """Set the fan mode for the fan.""" - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Handle state update from cluster handler.""" - - -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_FAN) -class ZhaFan(BaseFan, ZhaEntity): - """Representation of a ZHA fan.""" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._fan_cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_FAN) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._fan_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + await self.entity_data.entity.async_turn_on( + percentage=percentage, preset_mode=preset_mode ) - - @property - def percentage(self) -> int | None: - """Return the current speed percentage.""" - if ( - self._fan_cluster_handler.fan_mode is None - or self._fan_cluster_handler.fan_mode > self.speed_range[1] - ): - return None - if self._fan_cluster_handler.fan_mode == 0: - return 0 - return ranged_value_to_percentage( - self.speed_range, self._fan_cluster_handler.fan_mode - ) - - @property - def preset_mode(self) -> str | None: - """Return the current preset mode.""" - return self.preset_modes_to_name.get(self._fan_cluster_handler.fan_mode) - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Handle state update from cluster handler.""" self.async_write_ha_state() - async def _async_set_fan_mode(self, fan_mode: int) -> None: - """Set the fan mode for the fan.""" - await self._fan_cluster_handler.async_set_speed(fan_mode) - self.async_set_state(0, "fan_mode", fan_mode) + @convert_zha_error_to_ha_error + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + await self.entity_data.entity.async_turn_off() + self.async_write_ha_state() + @convert_zha_error_to_ha_error + async def async_set_percentage(self, percentage: int) -> None: + """Set the speed percentage of the fan.""" + await self.entity_data.entity.async_set_percentage(percentage=percentage) + self.async_write_ha_state() -@GROUP_MATCH() -class FanGroup(BaseFan, ZhaGroupEntity): - """Representation of a fan group.""" - - _attr_translation_key: str = "fan_group" - - def __init__( - self, entity_ids: list[str], unique_id: str, group_id: int, zha_device, **kwargs - ) -> None: - """Initialize a fan group.""" - super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) - self._available: bool = False - group = self.zha_device.gateway.get_group(self._group_id) - self._fan_cluster_handler = group.endpoint[hvac.Fan.cluster_id] - self._percentage = None - self._preset_mode = None + @convert_zha_error_to_ha_error + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set the preset mode for the fan.""" + await self.entity_data.entity.async_set_preset_mode(preset_mode=preset_mode) + self.async_write_ha_state() @property def percentage(self) -> int | None: """Return the current speed percentage.""" - return self._percentage - - @property - def preset_mode(self) -> str | None: - """Return the current preset mode.""" - return self._preset_mode - - async def _async_set_fan_mode(self, fan_mode: int) -> None: - """Set the fan mode for the group.""" - - with wrap_zigpy_exceptions(): - await self._fan_cluster_handler.write_attributes({"fan_mode": fan_mode}) - - self.async_set_state(0, "fan_mode", fan_mode) - - async def async_update(self) -> None: - """Attempt to retrieve on off state from the fan.""" - all_states = [self.hass.states.get(x) for x in self._entity_ids] - states: list[State] = list(filter(None, all_states)) - percentage_states: list[State] = [ - state for state in states if state.attributes.get(ATTR_PERCENTAGE) - ] - preset_mode_states: list[State] = [ - state for state in states if state.attributes.get(ATTR_PRESET_MODE) - ] - self._available = any(state.state != STATE_UNAVAILABLE for state in states) - - if percentage_states: - self._percentage = percentage_states[0].attributes[ATTR_PERCENTAGE] - self._preset_mode = None - elif preset_mode_states: - self._preset_mode = preset_mode_states[0].attributes[ATTR_PRESET_MODE] - self._percentage = None - else: - self._percentage = None - self._preset_mode = None - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await self.async_update() - await super().async_added_to_hass() - - -IKEA_SPEED_RANGE = (1, 10) # off is not included -IKEA_PRESET_MODES_TO_NAME = { - 1: PRESET_MODE_AUTO, - 2: "Speed 1", - 3: "Speed 1.5", - 4: "Speed 2", - 5: "Speed 2.5", - 6: "Speed 3", - 7: "Speed 3.5", - 8: "Speed 4", - 9: "Speed 4.5", - 10: "Speed 5", -} - - -@MULTI_MATCH( - cluster_handler_names="ikea_airpurifier", - models={"STARKVIND Air purifier", "STARKVIND Air purifier table"}, -) -class IkeaFan(ZhaFan): - """Representation of an Ikea fan.""" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs) -> None: - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._fan_cluster_handler = self.cluster_handlers.get("ikea_airpurifier") - - @property - def preset_modes_to_name(self) -> dict[int, str]: - """Return a dict from preset mode to name.""" - return IKEA_PRESET_MODES_TO_NAME - - @property - def speed_range(self) -> tuple[int, int]: - """Return the range of speeds the fan supports. Off is not included.""" - return IKEA_SPEED_RANGE - - @property - def default_on_percentage(self) -> int: - """Return the default on percentage.""" - return int( - (100 / self.speed_count) * self.preset_name_to_mode[PRESET_MODE_AUTO] - ) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_FAN, - models={"HBUniversalCFRemote", "HDC52EastwindFan"}, -) -class KofFan(ZhaFan): - """Representation of a fan made by King Of Fans.""" - - _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE - - @property - def speed_range(self) -> tuple[int, int]: - """Return the range of speeds the fan supports. Off is not included.""" - return (1, 4) - - @property - def preset_modes_to_name(self) -> dict[int, str]: - """Return a dict from preset mode to name.""" - return {6: PRESET_MODE_SMART} + return self.entity_data.entity.percentage diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py new file mode 100644 index 00000000000..35a794e8631 --- /dev/null +++ b/homeassistant/components/zha/helpers.py @@ -0,0 +1,1298 @@ +"""Helper functions for the ZHA integration.""" + +from __future__ import annotations + +import asyncio +import collections +from collections.abc import Awaitable, Callable, Coroutine, Mapping +import copy +import dataclasses +import enum +import functools +import itertools +import logging +import re +import time +from types import MappingProxyType +from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, ParamSpec, TypeVar, cast +from zoneinfo import ZoneInfo + +import voluptuous as vol +from zha.application.const import ( + ATTR_CLUSTER_ID, + ATTR_DEVICE_IEEE, + ATTR_TYPE, + ATTR_UNIQUE_ID, + CLUSTER_TYPE_IN, + CLUSTER_TYPE_OUT, + CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, + CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, + UNKNOWN_MANUFACTURER, + UNKNOWN_MODEL, + ZHA_CLUSTER_HANDLER_CFG_DONE, + ZHA_CLUSTER_HANDLER_MSG, + ZHA_CLUSTER_HANDLER_MSG_BIND, + ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, + ZHA_CLUSTER_HANDLER_MSG_DATA, + ZHA_EVENT, + ZHA_GW_MSG, + ZHA_GW_MSG_DEVICE_FULL_INIT, + ZHA_GW_MSG_DEVICE_INFO, + ZHA_GW_MSG_DEVICE_JOINED, + ZHA_GW_MSG_DEVICE_REMOVED, + ZHA_GW_MSG_GROUP_ADDED, + ZHA_GW_MSG_GROUP_INFO, + ZHA_GW_MSG_GROUP_MEMBER_ADDED, + ZHA_GW_MSG_GROUP_MEMBER_REMOVED, + ZHA_GW_MSG_GROUP_REMOVED, + ZHA_GW_MSG_RAW_INIT, + RadioType, +) +from zha.application.gateway import ( + ConnectionLostEvent, + DeviceFullInitEvent, + DeviceJoinedEvent, + DeviceLeftEvent, + DeviceRemovedEvent, + Gateway, + GroupEvent, + RawDeviceInitializedEvent, +) +from zha.application.helpers import ( + AlarmControlPanelOptions, + CoordinatorConfiguration, + DeviceOptions, + DeviceOverridesConfiguration, + LightOptions, + QuirksConfiguration, + ZHAConfiguration, + ZHAData, +) +from zha.application.platforms import GroupEntity, PlatformEntity +from zha.event import EventBase +from zha.exceptions import ZHAException +from zha.mixins import LogMixin +from zha.zigbee.cluster_handlers import ClusterBindEvent, ClusterConfigureReportingEvent +from zha.zigbee.device import ClusterHandlerConfigurationComplete, Device, ZHAEvent +from zha.zigbee.group import Group, GroupInfo, GroupMember +from zigpy.config import ( + CONF_DATABASE, + CONF_DEVICE, + CONF_DEVICE_PATH, + CONF_NWK, + CONF_NWK_CHANNEL, +) +import zigpy.exceptions +from zigpy.profiles import PROFILES +import zigpy.types +from zigpy.types import EUI64 +import zigpy.util +import zigpy.zcl +from zigpy.zcl.foundation import CommandSchema + +from homeassistant import __path__ as HOMEASSISTANT_PATH +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + is_multiprotocol_url, +) +from homeassistant.components.system_log import LogEntry +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + ATTR_AREA_ID, + ATTR_DEVICE_ID, + ATTR_ENTITY_ID, + ATTR_MODEL, + ATTR_NAME, + Platform, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType + +from .const import ( + ATTR_ACTIVE_COORDINATOR, + ATTR_ATTRIBUTES, + ATTR_AVAILABLE, + ATTR_CLUSTER_NAME, + ATTR_DEVICE_TYPE, + ATTR_ENDPOINT_NAMES, + ATTR_IEEE, + ATTR_LAST_SEEN, + ATTR_LQI, + ATTR_MANUFACTURER, + ATTR_MANUFACTURER_CODE, + ATTR_NEIGHBORS, + ATTR_NWK, + ATTR_POWER_SOURCE, + ATTR_QUIRK_APPLIED, + ATTR_QUIRK_CLASS, + ATTR_QUIRK_ID, + ATTR_ROUTES, + ATTR_RSSI, + ATTR_SIGNATURE, + ATTR_SUCCESS, + CONF_ALARM_ARM_REQUIRES_CODE, + CONF_ALARM_FAILED_TRIES, + CONF_ALARM_MASTER_CODE, + CONF_ALWAYS_PREFER_XY_COLOR_MODE, + CONF_BAUDRATE, + CONF_CONSIDER_UNAVAILABLE_BATTERY, + CONF_CONSIDER_UNAVAILABLE_MAINS, + CONF_CUSTOM_QUIRKS_PATH, + CONF_DEFAULT_LIGHT_TRANSITION, + CONF_DEVICE_CONFIG, + CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, + CONF_ENABLE_IDENTIFY_ON_JOIN, + CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, + CONF_ENABLE_QUIRKS, + CONF_FLOW_CONTROL, + CONF_GROUP_MEMBERS_ASSUME_STATE, + CONF_RADIO_TYPE, + CONF_ZIGPY, + CUSTOM_CONFIGURATION, + DATA_ZHA, + DEFAULT_DATABASE_NAME, + DEVICE_PAIRING_STATUS, + DOMAIN, + ZHA_ALARM_OPTIONS, + ZHA_OPTIONS, +) + +if TYPE_CHECKING: + from logging import Filter, LogRecord + + from .entity import ZHAEntity + from .update import ZHAFirmwareUpdateCoordinator + + _LogFilterType = Filter | Callable[[LogRecord], bool] + +_P = ParamSpec("_P") +_EntityT = TypeVar("_EntityT", bound="ZHAEntity") + +_LOGGER = logging.getLogger(__name__) + +DEBUG_COMP_BELLOWS = "bellows" +DEBUG_COMP_ZHA = "homeassistant.components.zha" +DEBUG_LIB_ZHA = "zha" +DEBUG_COMP_ZIGPY = "zigpy" +DEBUG_COMP_ZIGPY_ZNP = "zigpy_znp" +DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz" +DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee" +DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate" +DEBUG_LEVEL_CURRENT = "current" +DEBUG_LEVEL_ORIGINAL = "original" +DEBUG_LEVELS = { + DEBUG_COMP_BELLOWS: logging.DEBUG, + DEBUG_COMP_ZHA: logging.DEBUG, + DEBUG_COMP_ZIGPY: logging.DEBUG, + DEBUG_COMP_ZIGPY_ZNP: logging.DEBUG, + DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG, + DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG, + DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG, + DEBUG_LIB_ZHA: logging.DEBUG, +} +DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY, DEBUG_LIB_ZHA] +ZHA_GW_MSG_LOG_ENTRY = "log_entry" +ZHA_GW_MSG_LOG_OUTPUT = "log_output" +SIGNAL_REMOVE_ENTITIES = "zha_remove_entities" +GROUP_ENTITY_DOMAINS = [Platform.LIGHT, Platform.SWITCH, Platform.FAN] +SIGNAL_ADD_ENTITIES = "zha_add_entities" +ENTITIES = "entities" + +RX_ON_WHEN_IDLE = "rx_on_when_idle" +RELATIONSHIP = "relationship" +EXTENDED_PAN_ID = "extended_pan_id" +PERMIT_JOINING = "permit_joining" +DEPTH = "depth" + +DEST_NWK = "dest_nwk" +ROUTE_STATUS = "route_status" +MEMORY_CONSTRAINED = "memory_constrained" +MANY_TO_ONE = "many_to_one" +ROUTE_RECORD_REQUIRED = "route_record_required" +NEXT_HOP = "next_hop" + +USER_GIVEN_NAME = "user_given_name" +DEVICE_REG_ID = "device_reg_id" + + +class GroupEntityReference(NamedTuple): + """Reference to a group entity.""" + + name: str | None + original_name: str | None + entity_id: str + + +class ZHAGroupProxy(LogMixin): + """Proxy class to interact with the ZHA group instances.""" + + def __init__(self, group: Group, gateway_proxy: ZHAGatewayProxy) -> None: + """Initialize the gateway proxy.""" + self.group: Group = group + self.gateway_proxy: ZHAGatewayProxy = gateway_proxy + + @property + def group_info(self) -> dict[str, Any]: + """Return a group description for group.""" + return { + "name": self.group.name, + "group_id": self.group.group_id, + "members": [ + { + "endpoint_id": member.endpoint_id, + "device": self.gateway_proxy.device_proxies[ + member.device.ieee + ].zha_device_info, + "entities": [e._asdict() for e in self.associated_entities(member)], + } + for member in self.group.members + ], + } + + def associated_entities(self, member: GroupMember) -> list[GroupEntityReference]: + """Return the list of entities that were derived from this endpoint.""" + entity_registry = er.async_get(self.gateway_proxy.hass) + entity_refs: collections.defaultdict[EUI64, list[EntityReference]] = ( + self.gateway_proxy.ha_entity_refs + ) + + entity_info = [] + + for entity_ref in entity_refs.get(member.device.ieee): # type: ignore[union-attr] + if not entity_ref.entity_data.is_group_entity: + continue + entity = entity_registry.async_get(entity_ref.ha_entity_id) + + if ( + entity is None + or entity_ref.entity_data.group_proxy is None + or entity_ref.entity_data.group_proxy.group.group_id + != member.group.group_id + ): + continue + + entity_info.append( + GroupEntityReference( + name=entity.name, + original_name=entity.original_name, + entity_id=entity_ref.ha_entity_id, + ) + ) + + return entity_info + + def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: + """Log a message.""" + msg = f"[%s](%s): {msg}" + args = ( + f"0x{self.group.group_id:04x}", + self.group.endpoint.endpoint_id, + *args, + ) + _LOGGER.log(level, msg, *args, **kwargs) + + +class ZHADeviceProxy(EventBase): + """Proxy class to interact with the ZHA device instances.""" + + _ha_device_id: str + + def __init__(self, device: Device, gateway_proxy: ZHAGatewayProxy) -> None: + """Initialize the gateway proxy.""" + super().__init__() + self.device = device + self.gateway_proxy = gateway_proxy + self._unsubs: list[Callable[[], None]] = [] + self._unsubs.append(self.device.on_all_events(self._handle_event_protocol)) + + @property + def device_id(self) -> str: + """Return the HA device registry device id.""" + return self._ha_device_id + + @device_id.setter + def device_id(self, device_id: str) -> None: + """Set the HA device registry device id.""" + self._ha_device_id = device_id + + @property + def device_info(self) -> dict[str, Any]: + """Return a device description for device.""" + ieee = str(self.device.ieee) + time_struct = time.localtime(self.device.last_seen) + update_time = time.strftime("%Y-%m-%dT%H:%M:%S", time_struct) + return { + ATTR_IEEE: ieee, + ATTR_NWK: self.device.nwk, + ATTR_MANUFACTURER: self.device.manufacturer, + ATTR_MODEL: self.device.model, + ATTR_NAME: self.device.name or ieee, + ATTR_QUIRK_APPLIED: self.device.quirk_applied, + ATTR_QUIRK_CLASS: self.device.quirk_class, + ATTR_QUIRK_ID: self.device.quirk_id, + ATTR_MANUFACTURER_CODE: self.device.manufacturer_code, + ATTR_POWER_SOURCE: self.device.power_source, + ATTR_LQI: self.device.lqi, + ATTR_RSSI: self.device.rssi, + ATTR_LAST_SEEN: update_time, + ATTR_AVAILABLE: self.device.available, + ATTR_DEVICE_TYPE: self.device.device_type, + ATTR_SIGNATURE: self.device.zigbee_signature, + } + + @property + def zha_device_info(self) -> dict[str, Any]: + """Get ZHA device information.""" + device_info: dict[str, Any] = {} + device_info.update(self.device_info) + device_info[ATTR_ACTIVE_COORDINATOR] = self.device.is_active_coordinator + device_info[ENTITIES] = [ + { + ATTR_ENTITY_ID: entity_ref.ha_entity_id, + ATTR_NAME: entity_ref.ha_device_info[ATTR_NAME], + } + for entity_ref in self.gateway_proxy.ha_entity_refs[self.device.ieee] + ] + + topology = self.gateway_proxy.gateway.application_controller.topology + device_info[ATTR_NEIGHBORS] = [ + { + ATTR_DEVICE_TYPE: neighbor.device_type.name, + RX_ON_WHEN_IDLE: neighbor.rx_on_when_idle.name, + RELATIONSHIP: neighbor.relationship.name, + EXTENDED_PAN_ID: str(neighbor.extended_pan_id), + ATTR_IEEE: str(neighbor.ieee), + ATTR_NWK: str(neighbor.nwk), + PERMIT_JOINING: neighbor.permit_joining.name, + DEPTH: str(neighbor.depth), + ATTR_LQI: str(neighbor.lqi), + } + for neighbor in topology.neighbors[self.device.ieee] + ] + + device_info[ATTR_ROUTES] = [ + { + DEST_NWK: str(route.DstNWK), + ROUTE_STATUS: str(route.RouteStatus.name), + MEMORY_CONSTRAINED: bool(route.MemoryConstrained), + MANY_TO_ONE: bool(route.ManyToOne), + ROUTE_RECORD_REQUIRED: bool(route.RouteRecordRequired), + NEXT_HOP: str(route.NextHop), + } + for route in topology.routes[self.device.ieee] + ] + + # Return endpoint device type Names + names: list[dict[str, str]] = [] + for endpoint in ( + ep for epid, ep in self.device.device.endpoints.items() if epid + ): + profile = PROFILES.get(endpoint.profile_id) + if profile and endpoint.device_type is not None: + # DeviceType provides undefined enums + names.append({ATTR_NAME: profile.DeviceType(endpoint.device_type).name}) + else: + names.append( + { + ATTR_NAME: ( + f"unknown {endpoint.device_type} device_type " + f"of 0x{(endpoint.profile_id or 0xFFFF):04x} profile id" + ) + } + ) + device_info[ATTR_ENDPOINT_NAMES] = names + + device_registry = dr.async_get(self.gateway_proxy.hass) + reg_device = device_registry.async_get(self.device_id) + if reg_device is not None: + device_info[USER_GIVEN_NAME] = reg_device.name_by_user + device_info[DEVICE_REG_ID] = reg_device.id + device_info[ATTR_AREA_ID] = reg_device.area_id + return device_info + + @callback + def handle_zha_event(self, zha_event: ZHAEvent) -> None: + """Handle a ZHA event.""" + self.gateway_proxy.hass.bus.async_fire( + ZHA_EVENT, + { + ATTR_DEVICE_IEEE: str(zha_event.device_ieee), + ATTR_UNIQUE_ID: zha_event.unique_id, + ATTR_DEVICE_ID: self.device_id, + **zha_event.data, + }, + ) + + @callback + def handle_zha_channel_configure_reporting( + self, event: ClusterConfigureReportingEvent + ) -> None: + """Handle a ZHA cluster configure reporting event.""" + async_dispatcher_send( + self.gateway_proxy.hass, + ZHA_CLUSTER_HANDLER_MSG, + { + ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, + ZHA_CLUSTER_HANDLER_MSG_DATA: { + ATTR_CLUSTER_NAME: event.cluster_name, + ATTR_CLUSTER_ID: event.cluster_id, + ATTR_ATTRIBUTES: event.attributes, + }, + }, + ) + + @callback + def handle_zha_channel_cfg_done( + self, event: ClusterHandlerConfigurationComplete + ) -> None: + """Handle a ZHA cluster configure reporting event.""" + async_dispatcher_send( + self.gateway_proxy.hass, + ZHA_CLUSTER_HANDLER_MSG, + { + ATTR_TYPE: ZHA_CLUSTER_HANDLER_CFG_DONE, + }, + ) + + @callback + def handle_zha_channel_bind(self, event: ClusterBindEvent) -> None: + """Handle a ZHA cluster bind event.""" + async_dispatcher_send( + self.gateway_proxy.hass, + ZHA_CLUSTER_HANDLER_MSG, + { + ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_BIND, + ZHA_CLUSTER_HANDLER_MSG_DATA: { + ATTR_CLUSTER_NAME: event.cluster_name, + ATTR_CLUSTER_ID: event.cluster_id, + ATTR_SUCCESS: event.success, + }, + }, + ) + + +class EntityReference(NamedTuple): + """Describes an entity reference.""" + + ha_entity_id: str + entity_data: EntityData + ha_device_info: dr.DeviceInfo + remove_future: asyncio.Future[Any] + + +class ZHAGatewayProxy(EventBase): + """Proxy class to interact with the ZHA gateway.""" + + def __init__( + self, hass: HomeAssistant, config_entry: ConfigEntry, gateway: Gateway + ) -> None: + """Initialize the gateway proxy.""" + super().__init__() + self.hass = hass + self.config_entry = config_entry + self.gateway = gateway + self.device_proxies: dict[str, ZHADeviceProxy] = {} + self.group_proxies: dict[int, ZHAGroupProxy] = {} + self._ha_entity_refs: collections.defaultdict[EUI64, list[EntityReference]] = ( + collections.defaultdict(list) + ) + self._log_levels: dict[str, dict[str, int]] = { + DEBUG_LEVEL_ORIGINAL: async_capture_log_levels(), + DEBUG_LEVEL_CURRENT: async_capture_log_levels(), + } + self.debug_enabled: bool = False + self._log_relay_handler: LogRelayHandler = LogRelayHandler(hass, self) + self._unsubs: list[Callable[[], None]] = [] + self._unsubs.append(self.gateway.on_all_events(self._handle_event_protocol)) + self._reload_task: asyncio.Task | None = None + + @property + def ha_entity_refs(self) -> collections.defaultdict[EUI64, list[EntityReference]]: + """Return entities by ieee.""" + return self._ha_entity_refs + + def register_entity_reference( + self, + ha_entity_id: str, + entity_data: EntityData, + ha_device_info: dr.DeviceInfo, + remove_future: asyncio.Future[Any], + ) -> None: + """Record the creation of a hass entity associated with ieee.""" + self._ha_entity_refs[entity_data.device_proxy.device.ieee].append( + EntityReference( + ha_entity_id=ha_entity_id, + entity_data=entity_data, + ha_device_info=ha_device_info, + remove_future=remove_future, + ) + ) + + async def async_initialize_devices_and_entities(self) -> None: + """Initialize devices and entities.""" + for device in self.gateway.devices.values(): + device_proxy = self._async_get_or_create_device_proxy(device) + self._create_entity_metadata(device_proxy) + for group in self.gateway.groups.values(): + group_proxy = self._async_get_or_create_group_proxy(group) + self._create_entity_metadata(group_proxy) + + await self.gateway.async_initialize_devices_and_entities() + + @callback + def handle_connection_lost(self, event: ConnectionLostEvent) -> None: + """Handle a connection lost event.""" + + _LOGGER.debug("Connection to the radio was lost: %r", event) + + # Ensure we do not queue up multiple resets + if self._reload_task is not None: + _LOGGER.debug("Ignoring reset, one is already running") + return + + self._reload_task = self.hass.async_create_task( + self.hass.config_entries.async_reload(self.config_entry.entry_id), + ) + + @callback + def handle_device_joined(self, event: DeviceJoinedEvent) -> None: + """Handle a device joined event.""" + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_JOINED, + ZHA_GW_MSG_DEVICE_INFO: { + ATTR_NWK: event.device_info.nwk, + ATTR_IEEE: str(event.device_info.ieee), + DEVICE_PAIRING_STATUS: event.device_info.pairing_status.name, + }, + }, + ) + + @callback + def handle_device_removed(self, event: DeviceRemovedEvent) -> None: + """Handle a device removed event.""" + zha_device_proxy = self.device_proxies.pop(event.device_info.ieee, None) + entity_refs = self._ha_entity_refs.pop(event.device_info.ieee, None) + if zha_device_proxy is not None: + device_info = zha_device_proxy.zha_device_info + # zha_device_proxy.async_cleanup_handles() + async_dispatcher_send( + self.hass, + f"{SIGNAL_REMOVE_ENTITIES}_{zha_device_proxy.device.ieee!s}", + ) + self.hass.async_create_task( + self._async_remove_device(zha_device_proxy, entity_refs), + "ZHAGateway._async_remove_device", + ) + if device_info is not None: + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_REMOVED, + ZHA_GW_MSG_DEVICE_INFO: device_info, + }, + ) + + @callback + def handle_device_left(self, event: DeviceLeftEvent) -> None: + """Handle a device left event.""" + + @callback + def handle_raw_device_initialized(self, event: RawDeviceInitializedEvent) -> None: + """Handle a raw device initialized event.""" + manuf = event.device_info.manufacturer + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_RAW_INIT, + ZHA_GW_MSG_DEVICE_INFO: { + ATTR_NWK: str(event.device_info.nwk), + ATTR_IEEE: str(event.device_info.ieee), + DEVICE_PAIRING_STATUS: event.device_info.pairing_status.name, + ATTR_MODEL: event.device_info.model + if event.device_info.model + else UNKNOWN_MODEL, + ATTR_MANUFACTURER: manuf if manuf else UNKNOWN_MANUFACTURER, + ATTR_SIGNATURE: event.device_info.signature, + }, + }, + ) + + @callback + def handle_device_fully_initialized(self, event: DeviceFullInitEvent) -> None: + """Handle a device fully initialized event.""" + zha_device = self.gateway.get_device(event.device_info.ieee) + zha_device_proxy = self._async_get_or_create_device_proxy(zha_device) + + device_info = zha_device_proxy.zha_device_info + device_info[DEVICE_PAIRING_STATUS] = event.device_info.pairing_status.name + if event.new_join: + self._create_entity_metadata(zha_device_proxy) + async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, + ZHA_GW_MSG_DEVICE_INFO: device_info, + }, + ) + + @callback + def handle_group_member_removed(self, event: GroupEvent) -> None: + """Handle a group member removed event.""" + zha_group_proxy = self._async_get_or_create_group_proxy(event.group_info) + zha_group_proxy.info("group_member_removed - group_info: %s", event.group_info) + self._update_group_entities(event) + self._send_group_gateway_message( + zha_group_proxy, ZHA_GW_MSG_GROUP_MEMBER_REMOVED + ) + + @callback + def handle_group_member_added(self, event: GroupEvent) -> None: + """Handle a group member added event.""" + zha_group_proxy = self._async_get_or_create_group_proxy(event.group_info) + zha_group_proxy.info("group_member_added - group_info: %s", event.group_info) + self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_MEMBER_ADDED) + self._update_group_entities(event) + + @callback + def handle_group_added(self, event: GroupEvent) -> None: + """Handle a group added event.""" + zha_group_proxy = self._async_get_or_create_group_proxy(event.group_info) + zha_group_proxy.info("group_added") + self._update_group_entities(event) + self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_ADDED) + + @callback + def handle_group_removed(self, event: GroupEvent) -> None: + """Handle a group removed event.""" + zha_group_proxy = self.group_proxies.pop(event.group_info.group_id) + self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_REMOVED) + zha_group_proxy.info("group_removed") + self._cleanup_group_entity_registry_entries(zha_group_proxy) + + @callback + def async_enable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: + """Enable debug mode for ZHA.""" + self._log_levels[DEBUG_LEVEL_ORIGINAL] = async_capture_log_levels() + async_set_logger_levels(DEBUG_LEVELS) + self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() + + if filterer: + self._log_relay_handler.addFilter(filterer) + + for logger_name in DEBUG_RELAY_LOGGERS: + logging.getLogger(logger_name).addHandler(self._log_relay_handler) + + self.debug_enabled = True + + @callback + def async_disable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: + """Disable debug mode for ZHA.""" + async_set_logger_levels(self._log_levels[DEBUG_LEVEL_ORIGINAL]) + self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() + for logger_name in DEBUG_RELAY_LOGGERS: + logging.getLogger(logger_name).removeHandler(self._log_relay_handler) + if filterer: + self._log_relay_handler.removeFilter(filterer) + self.debug_enabled = False + + async def shutdown(self) -> None: + """Shutdown the gateway proxy.""" + for unsub in self._unsubs: + unsub() + await self.gateway.shutdown() + + def get_device_proxy(self, ieee: EUI64) -> ZHADeviceProxy | None: + """Return ZHADevice for given ieee.""" + return self.device_proxies.get(ieee) + + def get_group_proxy(self, group_id: int | str) -> ZHAGroupProxy | None: + """Return Group for given group id.""" + if isinstance(group_id, str): + for group_proxy in self.group_proxies.values(): + if group_proxy.group.name == group_id: + return group_proxy + return None + return self.group_proxies.get(group_id) + + def get_entity_reference(self, entity_id: str) -> EntityReference | None: + """Return entity reference for given entity_id if found.""" + for entity_reference in itertools.chain.from_iterable( + self.ha_entity_refs.values() + ): + if entity_id == entity_reference.ha_entity_id: + return entity_reference + return None + + def remove_entity_reference(self, entity: ZHAEntity) -> None: + """Remove entity reference for given entity_id if found.""" + if entity.zha_device.ieee in self.ha_entity_refs: + entity_refs = self.ha_entity_refs.get(entity.zha_device.ieee) + self.ha_entity_refs[entity.zha_device.ieee] = [ + e + for e in entity_refs # type: ignore[union-attr] + if e.ha_entity_id != entity.entity_id + ] + + def _async_get_or_create_device_proxy(self, zha_device: Device) -> ZHADeviceProxy: + """Get or create a ZHA device.""" + if (zha_device_proxy := self.device_proxies.get(zha_device.ieee)) is None: + zha_device_proxy = ZHADeviceProxy(zha_device, self) + self.device_proxies[zha_device_proxy.device.ieee] = zha_device_proxy + + device_registry = dr.async_get(self.hass) + device_registry_device = device_registry.async_get_or_create( + config_entry_id=self.config_entry.entry_id, + connections={(dr.CONNECTION_ZIGBEE, str(zha_device.ieee))}, + identifiers={(DOMAIN, str(zha_device.ieee))}, + name=zha_device.name, + manufacturer=zha_device.manufacturer, + model=zha_device.model, + ) + zha_device_proxy.device_id = device_registry_device.id + return zha_device_proxy + + def _async_get_or_create_group_proxy(self, group_info: GroupInfo) -> ZHAGroupProxy: + """Get or create a ZHA group.""" + zha_group_proxy = self.group_proxies.get(group_info.group_id) + if zha_group_proxy is None: + zha_group_proxy = ZHAGroupProxy( + self.gateway.groups[group_info.group_id], self + ) + self.group_proxies[group_info.group_id] = zha_group_proxy + return zha_group_proxy + + def _create_entity_metadata( + self, proxy_object: ZHADeviceProxy | ZHAGroupProxy + ) -> None: + """Create HA entity metadata.""" + ha_zha_data = get_zha_data(self.hass) + coordinator_proxy = self.device_proxies[ + self.gateway.coordinator_zha_device.ieee + ] + + if isinstance(proxy_object, ZHADeviceProxy): + for entity in proxy_object.device.platform_entities.values(): + ha_zha_data.platforms[Platform(entity.PLATFORM)].append( + EntityData( + entity=entity, device_proxy=proxy_object, group_proxy=None + ) + ) + else: + for entity in proxy_object.group.group_entities.values(): + ha_zha_data.platforms[Platform(entity.PLATFORM)].append( + EntityData( + entity=entity, + device_proxy=coordinator_proxy, + group_proxy=proxy_object, + ) + ) + + def _cleanup_group_entity_registry_entries( + self, zigpy_group: zigpy.group.Group + ) -> None: + """Remove entity registry entries for group entities when the groups are removed from HA.""" + # first we collect the potential unique ids for entities that could be created from this group + possible_entity_unique_ids = [ + f"{domain}_zha_group_0x{zigpy_group.group_id:04x}" + for domain in GROUP_ENTITY_DOMAINS + ] + + # then we get all group entity entries tied to the coordinator + entity_registry = er.async_get(self.hass) + assert self.coordinator_zha_device + all_group_entity_entries = er.async_entries_for_device( + entity_registry, + self.coordinator_zha_device.device_id, + include_disabled_entities=True, + ) + + # then we get the entity entries for this specific group + # by getting the entries that match + entries_to_remove = [ + entry + for entry in all_group_entity_entries + if entry.unique_id in possible_entity_unique_ids + ] + + # then we remove the entries from the entity registry + for entry in entries_to_remove: + _LOGGER.debug( + "cleaning up entity registry entry for entity: %s", entry.entity_id + ) + entity_registry.async_remove(entry.entity_id) + + def _update_group_entities(self, group_event: GroupEvent) -> None: + """Update group entities when a group event is received.""" + async_dispatcher_send( + self.hass, + f"{SIGNAL_REMOVE_ENTITIES}_group_{group_event.group_info.group_id}", + ) + self._create_entity_metadata( + self.group_proxies[group_event.group_info.group_id] + ) + async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) + + def _send_group_gateway_message( + self, zha_group_proxy: ZHAGroupProxy, gateway_message_type: str + ) -> None: + """Send the gateway event for a zigpy group event.""" + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: gateway_message_type, + ZHA_GW_MSG_GROUP_INFO: zha_group_proxy.group_info, + }, + ) + + async def _async_remove_device( + self, device: ZHADeviceProxy, entity_refs: list[EntityReference] | None + ) -> None: + if entity_refs is not None: + remove_tasks: list[asyncio.Future[Any]] = [ + entity_ref.remove_future for entity_ref in entity_refs + ] + if remove_tasks: + await asyncio.wait(remove_tasks) + + device_registry = dr.async_get(self.hass) + reg_device = device_registry.async_get(device.device_id) + if reg_device is not None: + device_registry.async_remove_device(reg_device.id) + + +@callback +def async_capture_log_levels() -> dict[str, int]: + """Capture current logger levels for ZHA.""" + return { + DEBUG_COMP_BELLOWS: logging.getLogger(DEBUG_COMP_BELLOWS).getEffectiveLevel(), + DEBUG_COMP_ZHA: logging.getLogger(DEBUG_COMP_ZHA).getEffectiveLevel(), + DEBUG_COMP_ZIGPY: logging.getLogger(DEBUG_COMP_ZIGPY).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_ZNP: logging.getLogger( + DEBUG_COMP_ZIGPY_ZNP + ).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_DECONZ: logging.getLogger( + DEBUG_COMP_ZIGPY_DECONZ + ).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_XBEE: logging.getLogger( + DEBUG_COMP_ZIGPY_XBEE + ).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_ZIGATE: logging.getLogger( + DEBUG_COMP_ZIGPY_ZIGATE + ).getEffectiveLevel(), + DEBUG_LIB_ZHA: logging.getLogger(DEBUG_LIB_ZHA).getEffectiveLevel(), + } + + +@callback +def async_set_logger_levels(levels: dict[str, int]) -> None: + """Set logger levels for ZHA.""" + logging.getLogger(DEBUG_COMP_BELLOWS).setLevel(levels[DEBUG_COMP_BELLOWS]) + logging.getLogger(DEBUG_COMP_ZHA).setLevel(levels[DEBUG_COMP_ZHA]) + logging.getLogger(DEBUG_COMP_ZIGPY).setLevel(levels[DEBUG_COMP_ZIGPY]) + logging.getLogger(DEBUG_COMP_ZIGPY_ZNP).setLevel(levels[DEBUG_COMP_ZIGPY_ZNP]) + logging.getLogger(DEBUG_COMP_ZIGPY_DECONZ).setLevel(levels[DEBUG_COMP_ZIGPY_DECONZ]) + logging.getLogger(DEBUG_COMP_ZIGPY_XBEE).setLevel(levels[DEBUG_COMP_ZIGPY_XBEE]) + logging.getLogger(DEBUG_COMP_ZIGPY_ZIGATE).setLevel(levels[DEBUG_COMP_ZIGPY_ZIGATE]) + logging.getLogger(DEBUG_LIB_ZHA).setLevel(levels[DEBUG_LIB_ZHA]) + + +class LogRelayHandler(logging.Handler): + """Log handler for error messages.""" + + def __init__(self, hass: HomeAssistant, gateway: ZHAGatewayProxy) -> None: + """Initialize a new LogErrorHandler.""" + super().__init__() + self.hass = hass + self.gateway = gateway + hass_path: str = HOMEASSISTANT_PATH[0] + config_dir = self.hass.config.config_dir + self.paths_re = re.compile( + r"(?:{})/(.*)".format( + "|".join([re.escape(x) for x in (hass_path, config_dir)]) + ) + ) + + def emit(self, record: LogRecord) -> None: + """Relay log message via dispatcher.""" + entry = LogEntry( + record, self.paths_re, figure_out_source=record.levelno >= logging.WARNING + ) + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + {ATTR_TYPE: ZHA_GW_MSG_LOG_OUTPUT, ZHA_GW_MSG_LOG_ENTRY: entry.to_dict()}, + ) + + +@dataclasses.dataclass(kw_only=True, slots=True) +class HAZHAData: + """ZHA data stored in `hass.data`.""" + + yaml_config: ConfigType = dataclasses.field(default_factory=dict) + config_entry: ConfigEntry | None = dataclasses.field(default=None) + device_trigger_cache: dict[str, tuple[str, dict]] = dataclasses.field( + default_factory=dict + ) + gateway_proxy: ZHAGatewayProxy | None = dataclasses.field(default=None) + platforms: collections.defaultdict[Platform, list] = dataclasses.field( + default_factory=lambda: collections.defaultdict(list) + ) + update_coordinator: ZHAFirmwareUpdateCoordinator | None = dataclasses.field( + default=None + ) + + +@dataclasses.dataclass(kw_only=True, slots=True) +class EntityData: + """ZHA entity data.""" + + entity: PlatformEntity | GroupEntity + device_proxy: ZHADeviceProxy + group_proxy: ZHAGroupProxy | None = dataclasses.field(default=None) + + @property + def is_group_entity(self) -> bool: + """Return if this is a group entity.""" + return self.group_proxy is not None and isinstance(self.entity, GroupEntity) + + +def get_zha_data(hass: HomeAssistant) -> HAZHAData: + """Get the global ZHA data object.""" + if DATA_ZHA not in hass.data: + hass.data[DATA_ZHA] = HAZHAData() + + return hass.data[DATA_ZHA] + + +def get_zha_gateway(hass: HomeAssistant) -> Gateway: + """Get the ZHA gateway object.""" + if (gateway_proxy := get_zha_data(hass).gateway_proxy) is None: + raise ValueError("No gateway object exists") + + return gateway_proxy.gateway + + +def get_zha_gateway_proxy(hass: HomeAssistant) -> ZHAGatewayProxy: + """Get the ZHA gateway object.""" + if (gateway_proxy := get_zha_data(hass).gateway_proxy) is None: + raise ValueError("No gateway object exists") + + return gateway_proxy + + +def get_config_entry(hass: HomeAssistant) -> ConfigEntry: + """Get the ZHA gateway object.""" + if (gateway_proxy := get_zha_data(hass).gateway_proxy) is None: + raise ValueError("No gateway object exists to retrieve the config entry from.") + + return gateway_proxy.config_entry + + +@callback +def async_get_zha_device_proxy(hass: HomeAssistant, device_id: str) -> ZHADeviceProxy: + """Get a ZHA device for the given device registry id.""" + device_registry = dr.async_get(hass) + registry_device = device_registry.async_get(device_id) + if not registry_device: + _LOGGER.error("Device id `%s` not found in registry", device_id) + raise KeyError(f"Device id `{device_id}` not found in registry.") + zha_gateway_proxy = get_zha_gateway_proxy(hass) + try: + ieee_address = list(registry_device.identifiers)[0][1] + ieee = EUI64.convert(ieee_address) + except (IndexError, ValueError) as ex: + _LOGGER.error( + "Unable to determine device IEEE for device with device id `%s`", device_id + ) + raise KeyError( + f"Unable to determine device IEEE for device with device id `{device_id}`." + ) from ex + return zha_gateway_proxy.device_proxies[ieee] + + +def cluster_command_schema_to_vol_schema(schema: CommandSchema) -> vol.Schema: + """Convert a cluster command schema to a voluptuous schema.""" + return vol.Schema( + { + vol.Optional(field.name) + if field.optional + else vol.Required(field.name): schema_type_to_vol(field.type) + for field in schema.fields + } + ) + + +def schema_type_to_vol(field_type: Any) -> Any: + """Convert a schema type to a voluptuous type.""" + if issubclass(field_type, enum.Flag) and field_type.__members__: + return cv.multi_select( + [key.replace("_", " ") for key in field_type.__members__] + ) + if issubclass(field_type, enum.Enum) and field_type.__members__: + return vol.In([key.replace("_", " ") for key in field_type.__members__]) + if ( + issubclass(field_type, zigpy.types.FixedIntType) + or issubclass(field_type, enum.Flag) + or issubclass(field_type, enum.Enum) + ): + return vol.All( + vol.Coerce(int), vol.Range(field_type.min_value, field_type.max_value) + ) + return str + + +def convert_to_zcl_values( + fields: dict[str, Any], schema: CommandSchema +) -> dict[str, Any]: + """Convert user input to ZCL values.""" + converted_fields: dict[str, Any] = {} + for field in schema.fields: + if field.name not in fields: + continue + value = fields[field.name] + if issubclass(field.type, enum.Flag) and isinstance(value, list): + new_value = 0 + + for flag in value: + if isinstance(flag, str): + new_value |= field.type[flag.replace(" ", "_")] + else: + new_value |= flag + + value = field.type(new_value) + elif issubclass(field.type, enum.Enum): + value = ( + field.type[value.replace(" ", "_")] + if isinstance(value, str) + else field.type(value) + ) + else: + value = field.type(value) + _LOGGER.debug( + "Converted ZCL schema field(%s) value from: %s to: %s", + field.name, + fields[field.name], + value, + ) + converted_fields[field.name] = value + return converted_fields + + +def async_cluster_exists(hass: HomeAssistant, cluster_id, skip_coordinator=True): + """Determine if a device containing the specified in cluster is paired.""" + zha_gateway = get_zha_gateway(hass) + zha_devices = zha_gateway.devices.values() + for zha_device in zha_devices: + if skip_coordinator and zha_device.is_coordinator: + continue + clusters_by_endpoint = zha_device.async_get_clusters() + for clusters in clusters_by_endpoint.values(): + if ( + cluster_id in clusters[CLUSTER_TYPE_IN] + or cluster_id in clusters[CLUSTER_TYPE_OUT] + ): + return True + return False + + +@callback +async def async_add_entities( + _async_add_entities: AddEntitiesCallback, + entity_class: type[ZHAEntity], + entities: list[EntityData], + **kwargs, +) -> None: + """Add entities helper.""" + if not entities: + return + + entities_to_add = [] + for entity_data in entities: + try: + entities_to_add.append(entity_class(entity_data)) + # broad exception to prevent a single entity from preventing an entire platform from loading + # this can potentially be caused by a misbehaving device or a bad quirk. Not ideal but the + # alternative is adding try/catch to each entity class __init__ method with a specific exception + except Exception: # noqa: BLE001 + _LOGGER.exception( + "Error while adding entity from entity data: %s", entity_data + ) + _async_add_entities(entities_to_add, update_before_add=False) + entities.clear() + + +def _clean_serial_port_path(path: str) -> str: + """Clean the serial port path, applying corrections where necessary.""" + + if path.startswith("socket://"): + path = path.strip() + + # Removes extraneous brackets from IP addresses (they don't parse in CPython 3.11.4) + if re.match(r"^socket://\[\d+\.\d+\.\d+\.\d+\]:\d+$", path): + path = path.replace("[", "").replace("]", "") + + return path + + +CONF_ZHA_OPTIONS_SCHEMA = vol.Schema( + { + vol.Optional(CONF_DEFAULT_LIGHT_TRANSITION, default=0): vol.All( + vol.Coerce(float), vol.Range(min=0, max=2**16 / 10) + ), + vol.Required(CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, default=False): cv.boolean, + vol.Required(CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, default=True): cv.boolean, + vol.Required(CONF_ALWAYS_PREFER_XY_COLOR_MODE, default=True): cv.boolean, + vol.Required(CONF_GROUP_MEMBERS_ASSUME_STATE, default=True): cv.boolean, + vol.Required(CONF_ENABLE_IDENTIFY_ON_JOIN, default=True): cv.boolean, + vol.Optional( + CONF_CONSIDER_UNAVAILABLE_MAINS, + default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, + ): cv.positive_int, + vol.Optional( + CONF_CONSIDER_UNAVAILABLE_BATTERY, + default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, + ): cv.positive_int, + } +) + +CONF_ZHA_ALARM_SCHEMA = vol.Schema( + { + vol.Required(CONF_ALARM_MASTER_CODE, default="1234"): cv.string, + vol.Required(CONF_ALARM_FAILED_TRIES, default=3): cv.positive_int, + vol.Required(CONF_ALARM_ARM_REQUIRES_CODE, default=False): cv.boolean, + } +) + + +def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData: + """Create ZHA lib configuration from HA config objects.""" + + # ensure that we have the necessary HA configuration data + assert ha_zha_data.config_entry is not None + assert ha_zha_data.yaml_config is not None + + # Remove brackets around IP addresses, this no longer works in CPython 3.11.4 + # This will be removed in 2023.11.0 + path = ha_zha_data.config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] + cleaned_path = _clean_serial_port_path(path) + + if path != cleaned_path: + _LOGGER.debug("Cleaned serial port path %r -> %r", path, cleaned_path) + ha_zha_data.config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] = cleaned_path + hass.config_entries.async_update_entry( + ha_zha_data.config_entry, data=ha_zha_data.config_entry.data + ) + + # deep copy the yaml config to avoid modifying the original and to safely + # pass it to the ZHA library + app_config = copy.deepcopy(ha_zha_data.yaml_config.get(CONF_ZIGPY, {})) + database = app_config.get( + CONF_DATABASE, + hass.config.path(DEFAULT_DATABASE_NAME), + ) + app_config[CONF_DATABASE] = database + app_config[CONF_DEVICE] = ha_zha_data.config_entry.data[CONF_DEVICE] + + radio_type = RadioType[ha_zha_data.config_entry.data[CONF_RADIO_TYPE]] + + # Until we have a way to coordinate channels with the Thread half of multi-PAN, + # stick to the old zigpy default of channel 15 instead of dynamically scanning + if ( + is_multiprotocol_url(app_config[CONF_DEVICE][CONF_DEVICE_PATH]) + and app_config.get(CONF_NWK, {}).get(CONF_NWK_CHANNEL) is None + ): + app_config.setdefault(CONF_NWK, {})[CONF_NWK_CHANNEL] = 15 + + options: MappingProxyType[str, Any] = ha_zha_data.config_entry.options.get( + CUSTOM_CONFIGURATION, {} + ) + zha_options = CONF_ZHA_OPTIONS_SCHEMA(options.get(ZHA_OPTIONS, {})) + ha_acp_options = CONF_ZHA_ALARM_SCHEMA(options.get(ZHA_ALARM_OPTIONS, {})) + light_options: LightOptions = LightOptions( + default_light_transition=zha_options.get(CONF_DEFAULT_LIGHT_TRANSITION), + enable_enhanced_light_transition=zha_options.get( + CONF_ENABLE_ENHANCED_LIGHT_TRANSITION + ), + enable_light_transitioning_flag=zha_options.get( + CONF_ENABLE_LIGHT_TRANSITIONING_FLAG + ), + always_prefer_xy_color_mode=zha_options.get(CONF_ALWAYS_PREFER_XY_COLOR_MODE), + group_members_assume_state=zha_options.get(CONF_GROUP_MEMBERS_ASSUME_STATE), + ) + device_options: DeviceOptions = DeviceOptions( + enable_identify_on_join=zha_options.get(CONF_ENABLE_IDENTIFY_ON_JOIN), + consider_unavailable_mains=zha_options.get(CONF_CONSIDER_UNAVAILABLE_MAINS), + consider_unavailable_battery=zha_options.get(CONF_CONSIDER_UNAVAILABLE_BATTERY), + ) + acp_options: AlarmControlPanelOptions = AlarmControlPanelOptions( + master_code=ha_acp_options.get(CONF_ALARM_MASTER_CODE), + failed_tries=ha_acp_options.get(CONF_ALARM_FAILED_TRIES), + arm_requires_code=ha_acp_options.get(CONF_ALARM_ARM_REQUIRES_CODE), + ) + coord_config: CoordinatorConfiguration = CoordinatorConfiguration( + path=app_config[CONF_DEVICE][CONF_DEVICE_PATH], + baudrate=app_config[CONF_DEVICE][CONF_BAUDRATE], + flow_control=app_config[CONF_DEVICE][CONF_FLOW_CONTROL], + radio_type=radio_type.name, + ) + quirks_config: QuirksConfiguration = QuirksConfiguration( + enabled=ha_zha_data.yaml_config.get(CONF_ENABLE_QUIRKS, True), + custom_quirks_path=ha_zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH), + ) + overrides_config: dict[str, DeviceOverridesConfiguration] = {} + overrides: dict[str, dict[str, Any]] = cast( + dict[str, dict[str, Any]], ha_zha_data.yaml_config.get(CONF_DEVICE_CONFIG) + ) + if overrides is not None: + for unique_id, override in overrides.items(): + overrides_config[unique_id] = DeviceOverridesConfiguration( + type=override["type"], + ) + + return ZHAData( + zigpy_config=app_config, + config=ZHAConfiguration( + light_options=light_options, + device_options=device_options, + alarm_control_panel_options=acp_options, + coordinator_configuration=coord_config, + quirks_configuration=quirks_config, + device_overrides=overrides_config, + ), + local_timezone=ZoneInfo(hass.config.time_zone), + ) + + +def convert_zha_error_to_ha_error( + func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], +) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: + """Decorate ZHA commands and re-raises ZHAException as HomeAssistantError.""" + + @functools.wraps(func) + async def handler(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None: + try: + return await func(self, *args, **kwargs) + except ZHAException as err: + raise HomeAssistantError(err) from err + + return handler + + +def exclude_none_values(obj: Mapping[str, Any]) -> dict[str, Any]: + """Return a new dictionary excluding keys with None values.""" + return {k: v for k, v in obj.items() if v is not None} diff --git a/homeassistant/components/zha/light.py b/homeassistant/components/zha/light.py index 6fd08de889f..4a36030a0dd 100644 --- a/homeassistant/components/zha/light.py +++ b/homeassistant/components/zha/light.py @@ -2,93 +2,63 @@ from __future__ import annotations -from collections import Counter -from collections.abc import Callable -from datetime import timedelta +from collections.abc import Mapping import functools -import itertools import logging -import random -from typing import TYPE_CHECKING, Any +from typing import Any -from zigpy.zcl.clusters.general import Identify, LevelControl, OnOff -from zigpy.zcl.clusters.lighting import Color -from zigpy.zcl.foundation import Status +from zha.application.platforms.light.const import ( + ColorMode as ZhaColorMode, + LightEntityFeature as ZhaLightEntityFeature, +) -from homeassistant.components import light from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, + ATTR_COLOR_TEMP, + ATTR_EFFECT, + ATTR_FLASH, + ATTR_HS_COLOR, + ATTR_TRANSITION, + ATTR_XY_COLOR, ColorMode, + LightEntity, LightEntityFeature, - brightness_supported, - filter_supported_color_modes, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_SUPPORTED_FEATURES, - STATE_ON, - STATE_UNAVAILABLE, - Platform, -) -from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback -from homeassistant.helpers.debounce import Debouncer -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) +from homeassistant.const import STATE_ON, Platform +from homeassistant.core import HomeAssistant, State, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later, async_track_time_interval -from .core import discovery, helpers -from .core.const import ( - CLUSTER_HANDLER_COLOR, - CLUSTER_HANDLER_LEVEL, - CLUSTER_HANDLER_ON_OFF, - CONF_ALWAYS_PREFER_XY_COLOR_MODE, - CONF_DEFAULT_LIGHT_TRANSITION, - CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, - CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, - CONF_GROUP_MEMBERS_ASSUME_STATE, - DATA_ZHA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, - SIGNAL_SET_LEVEL, - ZHA_OPTIONS, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import LogMixin, async_get_zha_config_value, get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity, ZhaGroupEntity -if TYPE_CHECKING: - from .core.device import ZHADevice - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_ON_OFF_TRANSITION = 1 # most bulbs default to a 1-second turn on/off transition -DEFAULT_EXTRA_TRANSITION_DELAY_SHORT = 0.25 -DEFAULT_EXTRA_TRANSITION_DELAY_LONG = 2.0 -DEFAULT_LONG_TRANSITION_TIME = 10 -DEFAULT_MIN_BRIGHTNESS = 2 -ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY = 0.05 - -FLASH_EFFECTS = { - light.FLASH_SHORT: Identify.EffectIdentifier.Blink, - light.FLASH_LONG: Identify.EffectIdentifier.Breathe, +ZHA_TO_HA_COLOR_MODE = { + ZhaColorMode.UNKNOWN: ColorMode.UNKNOWN, + ZhaColorMode.ONOFF: ColorMode.ONOFF, + ZhaColorMode.BRIGHTNESS: ColorMode.BRIGHTNESS, + ZhaColorMode.COLOR_TEMP: ColorMode.COLOR_TEMP, + ZhaColorMode.HS: ColorMode.HS, + ZhaColorMode.XY: ColorMode.XY, + ZhaColorMode.RGB: ColorMode.RGB, + ZhaColorMode.RGBW: ColorMode.RGBW, + ZhaColorMode.RGBWW: ColorMode.RGBWW, + ZhaColorMode.WHITE: ColorMode.WHITE, } -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.LIGHT) -GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, Platform.LIGHT) -SIGNAL_LIGHT_GROUP_STATE_CHANGED = "zha_light_group_state_changed" -SIGNAL_LIGHT_GROUP_TRANSITION_START = "zha_light_group_transition_start" -SIGNAL_LIGHT_GROUP_TRANSITION_FINISHED = "zha_light_group_transition_finished" -SIGNAL_LIGHT_GROUP_ASSUME_GROUP_STATE = "zha_light_group_assume_group_state" -DEFAULT_MIN_TRANSITION_MANUFACTURERS = {"sengled"} +HA_TO_ZHA_COLOR_MODE = {v: k for k, v in ZHA_TO_HA_COLOR_MODE.items()} -COLOR_MODES_GROUP_LIGHT = {ColorMode.COLOR_TEMP, ColorMode.XY} -SUPPORT_GROUP_LIGHT = ( - light.LightEntityFeature.EFFECT - | light.LightEntityFeature.FLASH - | light.LightEntityFeature.TRANSITION -) +OFF_BRIGHTNESS = "off_brightness" +OFF_WITH_TRANSITION = "off_with_transition" + +_LOGGER = logging.getLogger(__name__) async def async_setup_entry( @@ -104,1280 +74,144 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, Light, entities_to_create ), ) config_entry.async_on_unload(unsub) -class BaseLight(LogMixin, light.LightEntity): - """Operations common to all light entities.""" +class Light(LightEntity, ZHAEntity): + """Representation of a ZHA or ZLL light.""" - _FORCE_ON = False - _DEFAULT_MIN_TRANSITION_TIME: float = 0 + def __init__(self, entity_data: EntityData) -> None: + """Initialize the ZHA light.""" + super().__init__(entity_data) + color_modes: set[ColorMode] = set() + has_brightness = False + for color_mode in self.entity_data.entity.supported_color_modes: + if color_mode == ZhaColorMode.BRIGHTNESS: + has_brightness = True + if color_mode not in (ZhaColorMode.BRIGHTNESS, ZhaColorMode.ONOFF): + color_modes.add(ZHA_TO_HA_COLOR_MODE[color_mode]) + if color_modes: + self._attr_supported_color_modes = color_modes + elif has_brightness: + color_modes.add(ColorMode.BRIGHTNESS) + self._attr_supported_color_modes = color_modes + else: + color_modes.add(ColorMode.ONOFF) + self._attr_supported_color_modes = color_modes - def __init__(self, *args, **kwargs): - """Initialize the light.""" - self._zha_device: ZHADevice = None - super().__init__(*args, **kwargs) - self._attr_min_mireds: int | None = 153 - self._attr_max_mireds: int | None = 500 - self._attr_color_mode = ColorMode.UNKNOWN # Set by subclasses - self._attr_supported_features: int = 0 - self._attr_state: bool | None - self._off_with_transition: bool = False - self._off_brightness: int | None = None - self._zha_config_transition = self._DEFAULT_MIN_TRANSITION_TIME - self._zha_config_enhanced_light_transition: bool = False - self._zha_config_enable_light_transitioning_flag: bool = True - self._zha_config_always_prefer_xy_color_mode: bool = True - self._on_off_cluster_handler = None - self._level_cluster_handler = None - self._color_cluster_handler = None - self._identify_cluster_handler = None - self._transitioning_individual: bool = False - self._transitioning_group: bool = False - self._transition_listener: Callable[[], None] | None = None + features = LightEntityFeature(0) + zha_features: ZhaLightEntityFeature = self.entity_data.entity.supported_features - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - self._async_unsub_transition_listener() - await super().async_will_remove_from_hass() + if ZhaLightEntityFeature.EFFECT in zha_features: + features |= LightEntityFeature.EFFECT + if ZhaLightEntityFeature.FLASH in zha_features: + features |= LightEntityFeature.FLASH + if ZhaLightEntityFeature.TRANSITION in zha_features: + features |= LightEntityFeature.TRANSITION + + self._attr_supported_features = features @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return state attributes.""" + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return entity specific state attributes.""" + state = self.entity_data.entity.state return { - "off_with_transition": self._off_with_transition, - "off_brightness": self._off_brightness, + "off_with_transition": state.get("off_with_transition"), + "off_brightness": state.get("off_brightness"), } @property def is_on(self) -> bool: """Return true if entity is on.""" - if self._attr_state is None: - return False - return self._attr_state - - @callback - def set_level(self, value: int) -> None: - """Set the brightness of this light between 0..254. - - brightness level 255 is a special value instructing the device to come - on at `on_level` Zigbee attribute value, regardless of the last set - level - """ - if self.is_transitioning: - self.debug( - "received level %s while transitioning - skipping update", - value, - ) - return - value = max(0, min(254, value)) - self._attr_brightness = value - self.async_write_ha_state() - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - transition = kwargs.get(light.ATTR_TRANSITION) - duration = ( - transition if transition is not None else self._zha_config_transition - ) or ( - # if 0 is passed in some devices still need the minimum default - self._DEFAULT_MIN_TRANSITION_TIME - ) - brightness = kwargs.get(light.ATTR_BRIGHTNESS) - effect = kwargs.get(light.ATTR_EFFECT) - flash = kwargs.get(light.ATTR_FLASH) - temperature = kwargs.get(light.ATTR_COLOR_TEMP) - xy_color = kwargs.get(light.ATTR_XY_COLOR) - hs_color = kwargs.get(light.ATTR_HS_COLOR) - - execute_if_off_supported = ( - self._GROUP_SUPPORTS_EXECUTE_IF_OFF - if isinstance(self, LightGroup) - else self._color_cluster_handler - and self._color_cluster_handler.execute_if_off_supported - ) - - set_transition_flag = ( - brightness_supported(self._attr_supported_color_modes) - or temperature is not None - or xy_color is not None - or hs_color is not None - ) and self._zha_config_enable_light_transitioning_flag - transition_time = ( - ( - duration + DEFAULT_EXTRA_TRANSITION_DELAY_SHORT - if ( - (brightness is not None or transition is not None) - and brightness_supported(self._attr_supported_color_modes) - or (self._off_with_transition and self._off_brightness is not None) - or temperature is not None - or xy_color is not None - or hs_color is not None - ) - else DEFAULT_ON_OFF_TRANSITION + DEFAULT_EXTRA_TRANSITION_DELAY_SHORT - ) - if set_transition_flag - else 0 - ) - - # If we need to pause attribute report parsing, we'll do so here. - # After successful calls, we later start a timer to unset the flag after - # transition_time. - # - On an error on the first move to level call, we unset the flag immediately - # if no previous timer is running. - # - On an error on subsequent calls, we start the transition timer, - # as a brightness call might have come through. - if set_transition_flag: - self.async_transition_set_flag() - - # If the light is currently off but a turn_on call with a color/temperature is - # sent, the light needs to be turned on first at a low brightness level where - # the light is immediately transitioned to the correct color. Afterwards, the - # transition is only from the low brightness to the new brightness. - # Otherwise, the transition is from the color the light had before being turned - # on to the new color. This can look especially bad with transitions longer than - # a second. We do not want to do this for devices that need to be forced to use - # the on command because we would end up with 4 commands sent: - # move to level, on, color, move to level... We also will not set this - # if the bulb is already in the desired color mode with the desired color - # or color temperature. - new_color_provided_while_off = ( - self._zha_config_enhanced_light_transition - and not self._FORCE_ON - and not self._attr_state - and ( - ( - temperature is not None - and ( - self._attr_color_temp != temperature - or self._attr_color_mode != ColorMode.COLOR_TEMP - ) - ) - or ( - xy_color is not None - and ( - self._attr_xy_color != xy_color - or self._attr_color_mode != ColorMode.XY - ) - ) - or ( - hs_color is not None - and ( - self._attr_hs_color != hs_color - or self._attr_color_mode != ColorMode.HS - ) - ) - ) - and brightness_supported(self._attr_supported_color_modes) - and not execute_if_off_supported - ) - - if ( - brightness is None - and (self._off_with_transition or new_color_provided_while_off) - and self._off_brightness is not None - ): - brightness = self._off_brightness - - if brightness is not None: - level = min(254, brightness) - else: - level = self._attr_brightness or 254 - - t_log = {} - - if new_color_provided_while_off: - # If the light is currently off, we first need to turn it on at a low - # brightness level with no transition. - # After that, we set it to the desired color/temperature with no transition. - result = await self._level_cluster_handler.move_to_level_with_on_off( - level=DEFAULT_MIN_BRIGHTNESS, - transition_time=int(10 * self._DEFAULT_MIN_TRANSITION_TIME), - ) - t_log["move_to_level_with_on_off"] = result - if result[1] is not Status.SUCCESS: - # First 'move to level' call failed, so if the transitioning delay - # isn't running from a previous call, - # the flag can be unset immediately - if set_transition_flag and not self._transition_listener: - self.async_transition_complete() - self.debug("turned on: %s", t_log) - return - # Currently only setting it to "on", as the correct level state will - # be set at the second move_to_level call - self._attr_state = True - - if execute_if_off_supported: - self.debug("handling color commands before turning on/level") - if not await self.async_handle_color_commands( - temperature, - duration, # duration is ignored by lights when off - hs_color, - xy_color, - new_color_provided_while_off, - t_log, - ): - # Color calls before on/level calls failed, - # so if the transitioning delay isn't running from a previous call, - # the flag can be unset immediately - if set_transition_flag and not self._transition_listener: - self.async_transition_complete() - self.debug("turned on: %s", t_log) - return - - if ( - (brightness is not None or transition is not None) - and not new_color_provided_while_off - and brightness_supported(self._attr_supported_color_modes) - ): - result = await self._level_cluster_handler.move_to_level_with_on_off( - level=level, - transition_time=int(10 * duration), - ) - t_log["move_to_level_with_on_off"] = result - if result[1] is not Status.SUCCESS: - # First 'move to level' call failed, so if the transitioning delay - # isn't running from a previous call, the flag can be unset immediately - if set_transition_flag and not self._transition_listener: - self.async_transition_complete() - self.debug("turned on: %s", t_log) - return - self._attr_state = bool(level) - if level: - self._attr_brightness = level - - if ( - (brightness is None and transition is None) - and not new_color_provided_while_off - or (self._FORCE_ON and brightness != 0) - ): - # since FORCE_ON lights don't turn on with move_to_level_with_on_off, - # we should call the on command on the on_off cluster - # if brightness is not 0. - result = await self._on_off_cluster_handler.on() - t_log["on_off"] = result - if result[1] is not Status.SUCCESS: - # 'On' call failed, but as brightness may still transition - # (for FORCE_ON lights), we start the timer to unset the flag after - # the transition_time if necessary. - self.async_transition_start_timer(transition_time) - self.debug("turned on: %s", t_log) - return - self._attr_state = True - - if not execute_if_off_supported: - self.debug("handling color commands after turning on/level") - if not await self.async_handle_color_commands( - temperature, - duration, - hs_color, - xy_color, - new_color_provided_while_off, - t_log, - ): - # Color calls failed, but as brightness may still transition, - # we start the timer to unset the flag - self.async_transition_start_timer(transition_time) - self.debug("turned on: %s", t_log) - return - - if new_color_provided_while_off: - # The light has the correct color, so we can now transition - # it to the correct brightness level. - result = await self._level_cluster_handler.move_to_level( - level=level, transition_time=int(10 * duration) - ) - t_log["move_to_level_if_color"] = result - if result[1] is not Status.SUCCESS: - self.debug("turned on: %s", t_log) - return - self._attr_state = bool(level) - if level: - self._attr_brightness = level - - # Our light is guaranteed to have just started the transitioning process - # if necessary, so we start the delay for the transition (to stop parsing - # attribute reports after the completed transition). - self.async_transition_start_timer(transition_time) - - if effect == light.EFFECT_COLORLOOP: - result = await self._color_cluster_handler.color_loop_set( - update_flags=( - Color.ColorLoopUpdateFlags.Action - | Color.ColorLoopUpdateFlags.Direction - | Color.ColorLoopUpdateFlags.Time - ), - action=Color.ColorLoopAction.Activate_from_current_hue, - direction=Color.ColorLoopDirection.Increment, - time=transition if transition else 7, - start_hue=0, - ) - t_log["color_loop_set"] = result - self._attr_effect = light.EFFECT_COLORLOOP - elif ( - self._attr_effect == light.EFFECT_COLORLOOP - and effect != light.EFFECT_COLORLOOP - ): - result = await self._color_cluster_handler.color_loop_set( - update_flags=Color.ColorLoopUpdateFlags.Action, - action=Color.ColorLoopAction.Deactivate, - direction=Color.ColorLoopDirection.Decrement, - time=0, - start_hue=0, - ) - t_log["color_loop_set"] = result - self._attr_effect = None - - if flash is not None: - result = await self._identify_cluster_handler.trigger_effect( - effect_id=FLASH_EFFECTS[flash], - effect_variant=Identify.EffectVariant.Default, - ) - t_log["trigger_effect"] = result - - self._off_with_transition = False - self._off_brightness = None - self.debug("turned on: %s", t_log) - self.async_write_ha_state() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - transition = kwargs.get(light.ATTR_TRANSITION) - supports_level = brightness_supported(self._attr_supported_color_modes) - - transition_time = ( - transition or self._DEFAULT_MIN_TRANSITION_TIME - if transition is not None - else DEFAULT_ON_OFF_TRANSITION - ) + DEFAULT_EXTRA_TRANSITION_DELAY_SHORT - - # Start pausing attribute report parsing - if self._zha_config_enable_light_transitioning_flag: - self.async_transition_set_flag() - - # is not none looks odd here, but it will override built in bulb - # transition times if we pass 0 in here - if transition is not None and supports_level: - result = await self._level_cluster_handler.move_to_level_with_on_off( - level=0, - transition_time=int( - 10 * (transition or self._DEFAULT_MIN_TRANSITION_TIME) - ), - ) - else: - result = await self._on_off_cluster_handler.off() - - # Pause parsing attribute reports until transition is complete - if self._zha_config_enable_light_transitioning_flag: - self.async_transition_start_timer(transition_time) - self.debug("turned off: %s", result) - if result[1] is not Status.SUCCESS: - return - self._attr_state = False - - if supports_level and not self._off_with_transition: - # store current brightness so that the next turn_on uses it: - # when using "enhanced turn on" - self._off_brightness = self._attr_brightness - if transition is not None: - # save for when calling turn_on without a brightness: - # current_level is set to 1 after transitioning to level 0, - # needed for correct state with light groups - self._attr_brightness = 1 - self._off_with_transition = transition is not None - - self.async_write_ha_state() - - async def async_handle_color_commands( - self, - temperature, - duration, - hs_color, - xy_color, - new_color_provided_while_off, - t_log, - ): - """Process ZCL color commands.""" - - transition_time = ( - self._DEFAULT_MIN_TRANSITION_TIME - if new_color_provided_while_off - else duration - ) - - if temperature is not None: - result = await self._color_cluster_handler.move_to_color_temp( - color_temp_mireds=temperature, - transition_time=int(10 * transition_time), - ) - t_log["move_to_color_temp"] = result - if result[1] is not Status.SUCCESS: - return False - self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = temperature - self._attr_xy_color = None - self._attr_hs_color = None - - if hs_color is not None: - if ( - not isinstance(self, LightGroup) - and self._color_cluster_handler.enhanced_hue_supported - ): - result = await self._color_cluster_handler.enhanced_move_to_hue_and_saturation( - enhanced_hue=int(hs_color[0] * 65535 / 360), - saturation=int(hs_color[1] * 2.54), - transition_time=int(10 * transition_time), - ) - t_log["enhanced_move_to_hue_and_saturation"] = result - else: - result = await self._color_cluster_handler.move_to_hue_and_saturation( - hue=int(hs_color[0] * 254 / 360), - saturation=int(hs_color[1] * 2.54), - transition_time=int(10 * transition_time), - ) - t_log["move_to_hue_and_saturation"] = result - if result[1] is not Status.SUCCESS: - return False - self._attr_color_mode = ColorMode.HS - self._attr_hs_color = hs_color - self._attr_xy_color = None - self._attr_color_temp = None - xy_color = None # don't set xy_color if it is also present - - if xy_color is not None: - result = await self._color_cluster_handler.move_to_color( - color_x=int(xy_color[0] * 65535), - color_y=int(xy_color[1] * 65535), - transition_time=int(10 * transition_time), - ) - t_log["move_to_color"] = result - if result[1] is not Status.SUCCESS: - return False - self._attr_color_mode = ColorMode.XY - self._attr_xy_color = xy_color - self._attr_color_temp = None - self._attr_hs_color = None - - return True + return self.entity_data.entity.is_on @property - def is_transitioning(self) -> bool: - """Return if the light is transitioning.""" - return self._transitioning_individual or self._transitioning_group + def brightness(self) -> int: + """Return the brightness of this light.""" + return self.entity_data.entity.brightness - @callback - def async_transition_set_flag(self) -> None: - """Set _transitioning to True.""" - self.debug("setting transitioning flag to True") - self._transitioning_individual = True - self._transitioning_group = False - if isinstance(self, LightGroup): - async_dispatcher_send( - self.hass, - SIGNAL_LIGHT_GROUP_TRANSITION_START, - {"entity_ids": self._entity_ids}, - ) - self._async_unsub_transition_listener() - - @callback - def async_transition_start_timer(self, transition_time) -> None: - """Start a timer to unset _transitioning_individual after transition_time. - - If necessary. - """ - if not transition_time: - return - # For longer transitions, we want to extend the timer a bit more - if transition_time >= DEFAULT_LONG_TRANSITION_TIME: - transition_time += DEFAULT_EXTRA_TRANSITION_DELAY_LONG - self.debug("starting transitioning timer for %s", transition_time) - self._transition_listener = async_call_later( - self._zha_device.hass, - transition_time, - self.async_transition_complete, - ) - - @callback - def _async_unsub_transition_listener(self) -> None: - """Unsubscribe transition listener.""" - if self._transition_listener: - self._transition_listener() - self._transition_listener = None - - @callback - def async_transition_complete(self, _=None) -> None: - """Set _transitioning_individual to False and write HA state.""" - self.debug("transition complete - future attribute reports will write HA state") - self._transitioning_individual = False - self._async_unsub_transition_listener() - self.async_write_ha_state() - if isinstance(self, LightGroup): - async_dispatcher_send( - self.hass, - SIGNAL_LIGHT_GROUP_TRANSITION_FINISHED, - {"entity_ids": self._entity_ids}, - ) - if self._debounced_member_refresh is not None: - self.debug("transition complete - refreshing group member states") - assert self.platform.config_entry - self.platform.config_entry.async_create_background_task( - self.hass, - self._debounced_member_refresh.async_call(), - "zha.light-refresh-debounced-member", - ) - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, -) -class Light(BaseLight, ZhaEntity): - """Representation of a ZHA or ZLL light.""" - - _attr_supported_color_modes: set[ColorMode] - _attr_translation_key: str = "light" - _REFRESH_INTERVAL = (45, 75) - - def __init__( - self, unique_id, zha_device: ZHADevice, cluster_handlers, **kwargs - ) -> None: - """Initialize the ZHA light.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._on_off_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_ON_OFF] - self._attr_state = bool(self._on_off_cluster_handler.on_off) - self._level_cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_LEVEL) - self._color_cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_COLOR) - self._identify_cluster_handler = zha_device.identify_ch - if self._color_cluster_handler: - self._attr_min_mireds: int = self._color_cluster_handler.min_mireds - self._attr_max_mireds: int = self._color_cluster_handler.max_mireds - self._cancel_refresh_handle: CALLBACK_TYPE | None = None - effect_list = [] - - self._zha_config_always_prefer_xy_color_mode = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ALWAYS_PREFER_XY_COLOR_MODE, - True, - ) - - self._attr_supported_color_modes = {ColorMode.ONOFF} - if self._level_cluster_handler: - self._attr_supported_color_modes.add(ColorMode.BRIGHTNESS) - self._attr_supported_features |= light.LightEntityFeature.TRANSITION - self._attr_brightness = self._level_cluster_handler.current_level - - if self._color_cluster_handler: - if self._color_cluster_handler.color_temp_supported: - self._attr_supported_color_modes.add(ColorMode.COLOR_TEMP) - self._attr_color_temp = self._color_cluster_handler.color_temperature - - if self._color_cluster_handler.xy_supported and ( - self._zha_config_always_prefer_xy_color_mode - or not self._color_cluster_handler.hs_supported - ): - self._attr_supported_color_modes.add(ColorMode.XY) - curr_x = self._color_cluster_handler.current_x - curr_y = self._color_cluster_handler.current_y - if curr_x is not None and curr_y is not None: - self._attr_xy_color = (curr_x / 65535, curr_y / 65535) - else: - self._attr_xy_color = (0, 0) - - if ( - self._color_cluster_handler.hs_supported - and not self._zha_config_always_prefer_xy_color_mode - ): - self._attr_supported_color_modes.add(ColorMode.HS) - if ( - self._color_cluster_handler.enhanced_hue_supported - and self._color_cluster_handler.enhanced_current_hue is not None - ): - curr_hue = ( - self._color_cluster_handler.enhanced_current_hue * 65535 / 360 - ) - elif self._color_cluster_handler.current_hue is not None: - curr_hue = self._color_cluster_handler.current_hue * 254 / 360 - else: - curr_hue = 0 - - if ( - curr_saturation := self._color_cluster_handler.current_saturation - ) is None: - curr_saturation = 0 - - self._attr_hs_color = ( - int(curr_hue), - int(curr_saturation * 2.54), - ) - - if self._color_cluster_handler.color_loop_supported: - self._attr_supported_features |= light.LightEntityFeature.EFFECT - effect_list.append(light.EFFECT_COLORLOOP) - if self._color_cluster_handler.color_loop_active == 1: - self._attr_effect = light.EFFECT_COLORLOOP - self._attr_supported_color_modes = filter_supported_color_modes( - self._attr_supported_color_modes - ) - if len(self._attr_supported_color_modes) == 1: - self._attr_color_mode = next(iter(self._attr_supported_color_modes)) - else: # Light supports color_temp + hs, determine which mode the light is in - assert self._color_cluster_handler - if ( - self._color_cluster_handler.color_mode - == Color.ColorMode.Color_temperature - ): - self._attr_color_mode = ColorMode.COLOR_TEMP - else: - self._attr_color_mode = ColorMode.XY - - if self._identify_cluster_handler: - self._attr_supported_features |= light.LightEntityFeature.FLASH - - if effect_list: - self._attr_effect_list = effect_list - - self._zha_config_transition = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_DEFAULT_LIGHT_TRANSITION, - 0, - ) - self._zha_config_enhanced_light_transition = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, - False, - ) - self._zha_config_enable_light_transitioning_flag = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, - True, - ) - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Set the state.""" - if self.is_transitioning: - self.debug( - "received onoff %s while transitioning - skipping update", - value, - ) - return - self._attr_state = bool(value) - if value: - self._off_with_transition = False - self._off_brightness = None - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._on_off_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - if self._level_cluster_handler: - self.async_accept_signal( - self._level_cluster_handler, SIGNAL_SET_LEVEL, self.set_level - ) - refresh_interval = random.randint(*(x * 60 for x in self._REFRESH_INTERVAL)) - self._cancel_refresh_handle = async_track_time_interval( - self.hass, self._refresh, timedelta(seconds=refresh_interval) - ) - self.debug("started polling with refresh interval of %s", refresh_interval) - self.async_accept_signal( - None, - SIGNAL_LIGHT_GROUP_STATE_CHANGED, - self._maybe_force_refresh, - signal_override=True, - ) - - @callback - def transition_on(signal): - """Handle a transition start event from a group.""" - if self.entity_id in signal["entity_ids"]: - self.debug( - "group transition started - setting member transitioning flag" - ) - self._transitioning_group = True - - self.async_accept_signal( - None, - SIGNAL_LIGHT_GROUP_TRANSITION_START, - transition_on, - signal_override=True, - ) - - @callback - def transition_off(signal): - """Handle a transition finished event from a group.""" - if self.entity_id in signal["entity_ids"]: - self.debug( - "group transition completed - unsetting member transitioning flag" - ) - self._transitioning_group = False - - self.async_accept_signal( - None, - SIGNAL_LIGHT_GROUP_TRANSITION_FINISHED, - transition_off, - signal_override=True, - ) - - self.async_accept_signal( - None, - SIGNAL_LIGHT_GROUP_ASSUME_GROUP_STATE, - self._assume_group_state, - signal_override=True, - ) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - assert self._cancel_refresh_handle - self._cancel_refresh_handle() - self._cancel_refresh_handle = None - self.debug("stopped polling during device removal") - await super().async_will_remove_from_hass() - - @callback - def async_restore_last_state(self, last_state): - """Restore previous state.""" - self._attr_state = last_state.state == STATE_ON - if "brightness" in last_state.attributes: - self._attr_brightness = last_state.attributes["brightness"] - if "off_with_transition" in last_state.attributes: - self._off_with_transition = last_state.attributes["off_with_transition"] - if "off_brightness" in last_state.attributes: - self._off_brightness = last_state.attributes["off_brightness"] - if (color_mode := last_state.attributes.get("color_mode")) is not None: - self._attr_color_mode = ColorMode(color_mode) - if "color_temp" in last_state.attributes: - self._attr_color_temp = last_state.attributes["color_temp"] - if "xy_color" in last_state.attributes: - self._attr_xy_color = last_state.attributes["xy_color"] - if "hs_color" in last_state.attributes: - self._attr_hs_color = last_state.attributes["hs_color"] - if "effect" in last_state.attributes: - self._attr_effect = last_state.attributes["effect"] - - async def async_get_state(self) -> None: - """Attempt to retrieve the state from the light.""" - if not self._attr_available: - return - self.debug("polling current state") - - if self._on_off_cluster_handler: - state = await self._on_off_cluster_handler.get_attribute_value( - "on_off", from_cache=False - ) - # check if transition started whilst waiting for polled state - if self.is_transitioning: - return - - if state is not None: - self._attr_state = state - if state: # reset "off with transition" flag if the light is on - self._off_with_transition = False - self._off_brightness = None - - if self._level_cluster_handler: - level = await self._level_cluster_handler.get_attribute_value( - "current_level", from_cache=False - ) - # check if transition started whilst waiting for polled state - if self.is_transitioning: - return - if level is not None: - self._attr_brightness = level - - if self._color_cluster_handler: - attributes = [ - "color_mode", - "current_x", - "current_y", - ] - if ( - not self._zha_config_always_prefer_xy_color_mode - and self._color_cluster_handler.enhanced_hue_supported - ): - attributes.append("enhanced_current_hue") - attributes.append("current_saturation") - if ( - self._color_cluster_handler.hs_supported - and not self._color_cluster_handler.enhanced_hue_supported - and not self._zha_config_always_prefer_xy_color_mode - ): - attributes.append("current_hue") - attributes.append("current_saturation") - if self._color_cluster_handler.color_temp_supported: - attributes.append("color_temperature") - if self._color_cluster_handler.color_loop_supported: - attributes.append("color_loop_active") - - results = await self._color_cluster_handler.get_attributes( - attributes, from_cache=False, only_cache=False - ) - - # although rare, a transition might have been started while we were waiting - # for the polled attributes, so abort if we are transitioning, - # as that state will not be accurate - if self.is_transitioning: - return - - if (color_mode := results.get("color_mode")) is not None: - if color_mode == Color.ColorMode.Color_temperature: - self._attr_color_mode = ColorMode.COLOR_TEMP - color_temp = results.get("color_temperature") - if color_temp is not None and color_mode: - self._attr_color_temp = color_temp - self._attr_xy_color = None - self._attr_hs_color = None - elif ( - color_mode == Color.ColorMode.Hue_and_saturation - and not self._zha_config_always_prefer_xy_color_mode - ): - self._attr_color_mode = ColorMode.HS - if self._color_cluster_handler.enhanced_hue_supported: - current_hue = results.get("enhanced_current_hue") - else: - current_hue = results.get("current_hue") - current_saturation = results.get("current_saturation") - if current_hue is not None and current_saturation is not None: - self._attr_hs_color = ( - int(current_hue * 360 / 65535) - if self._color_cluster_handler.enhanced_hue_supported - else int(current_hue * 360 / 254), - int(current_saturation / 2.54), - ) - self._attr_xy_color = None - self._attr_color_temp = None - else: - self._attr_color_mode = ColorMode.XY - color_x = results.get("current_x") - color_y = results.get("current_y") - if color_x is not None and color_y is not None: - self._attr_xy_color = (color_x / 65535, color_y / 65535) - self._attr_color_temp = None - self._attr_hs_color = None - - color_loop_active = results.get("color_loop_active") - if color_loop_active is not None: - if color_loop_active == 1: - self._attr_effect = light.EFFECT_COLORLOOP - else: - self._attr_effect = None - - async def async_update(self) -> None: - """Update to the latest state.""" - if self.is_transitioning: - self.debug("skipping async_update while transitioning") - return - await self.async_get_state() - - async def _refresh(self, time): - """Call async_get_state at an interval.""" - if self.is_transitioning: - self.debug("skipping _refresh while transitioning") - return - if self._zha_device.available and self.hass.data[DATA_ZHA].allow_polling: - self.debug("polling for updated state") - await self.async_get_state() - self.async_write_ha_state() - else: - self.debug( - "skipping polling for updated state, available: %s, allow polled requests: %s", - self._zha_device.available, - self.hass.data[DATA_ZHA].allow_polling, - ) - - async def _maybe_force_refresh(self, signal): - """Force update the state if the signal contains the entity id for this entity.""" - if self.entity_id in signal["entity_ids"]: - if self.is_transitioning: - self.debug("skipping _maybe_force_refresh while transitioning") - return - if self._zha_device.available and self.hass.data[DATA_ZHA].allow_polling: - self.debug("forcing polling for updated state") - await self.async_get_state() - self.async_write_ha_state() - else: - self.debug( - "skipping _maybe_force_refresh, available: %s, allow polled requests: %s", - self._zha_device.available, - self.hass.data[DATA_ZHA].allow_polling, - ) - - @callback - def _assume_group_state(self, signal, update_params) -> None: - """Handle an assume group state event from a group.""" - if self.entity_id in signal["entity_ids"] and self._attr_available: - self.debug("member assuming group state with: %s", update_params) - - state = update_params["state"] - brightness = update_params.get(light.ATTR_BRIGHTNESS) - color_mode = update_params.get(light.ATTR_COLOR_MODE) - color_temp = update_params.get(light.ATTR_COLOR_TEMP) - xy_color = update_params.get(light.ATTR_XY_COLOR) - hs_color = update_params.get(light.ATTR_HS_COLOR) - effect = update_params.get(light.ATTR_EFFECT) - - supported_modes = self._attr_supported_color_modes - - # unset "off brightness" and "off with transition" - # if group turned on this light - if state and not self._attr_state: - self._off_with_transition = False - self._off_brightness = None - - # set "off brightness" and "off with transition" - # if group turned off this light, and the light was not already off - # (to not override _off_with_transition) - elif ( - not state and self._attr_state and brightness_supported(supported_modes) - ): - # use individual brightness, instead of possibly averaged - # brightness from group - self._off_brightness = self._attr_brightness - self._off_with_transition = update_params["off_with_transition"] - - # Note: If individual lights have off_with_transition set, but not the - # group, and the group is then turned on without a level, individual lights - # might fall back to brightness level 1. - # Since all lights might need different brightness levels to be turned on, - # we can't use one group call. And making individual calls when turning on - # a ZHA group would cause a lot of traffic. In this case, - # turn_on should either just be called with a level or individual turn_on - # calls can be used. - - # state is always set (light.turn_on/light.turn_off) - self._attr_state = state - - # before assuming a group state attribute, check if the attribute - # was actually set in that call - if brightness is not None and brightness_supported(supported_modes): - self._attr_brightness = brightness - if color_mode is not None and color_mode in supported_modes: - self._attr_color_mode = color_mode - if color_temp is not None and ColorMode.COLOR_TEMP in supported_modes: - self._attr_color_temp = color_temp - if xy_color is not None and ColorMode.XY in supported_modes: - self._attr_xy_color = xy_color - if hs_color is not None and ColorMode.HS in supported_modes: - self._attr_hs_color = hs_color - # the effect is always deactivated in async_turn_on if not provided - if effect is None: - self._attr_effect = None - elif self._attr_effect_list and effect in self._attr_effect_list: - self._attr_effect = effect - - self.async_write_ha_state() - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, - manufacturers={"Philips", "Signify Netherlands B.V."}, -) -class HueLight(Light): - """Representation of a HUE light which does not report attributes.""" - - _REFRESH_INTERVAL = (3, 5) - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, - manufacturers={"Jasco", "Jasco Products", "Quotra-Vision", "eWeLight", "eWeLink"}, -) -class ForceOnLight(Light): - """Representation of a light which does not respect on/off for move_to_level_with_on_off commands.""" - - _FORCE_ON = True - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, - manufacturers=DEFAULT_MIN_TRANSITION_MANUFACTURERS, -) -class MinTransitionLight(Light): - """Representation of a light which does not react to any "move to" calls with 0 as a transition.""" - - # Transitions are counted in 1/10th of a second increments, so this is the smallest - _DEFAULT_MIN_TRANSITION_TIME = 0.1 - - -@GROUP_MATCH() -class LightGroup(BaseLight, ZhaGroupEntity): - """Representation of a light group.""" - - _attr_translation_key: str = "light_group" - - def __init__( - self, - entity_ids: list[str], - unique_id: str, - group_id: int, - zha_device: ZHADevice, - **kwargs: Any, - ) -> None: - """Initialize a light group.""" - super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) - group = self.zha_device.gateway.get_group(self._group_id) - - self._GROUP_SUPPORTS_EXECUTE_IF_OFF = True - - for member in group.members: - # Ensure we do not send group commands that violate the minimum transition - # time of any members. - if member.device.manufacturer in DEFAULT_MIN_TRANSITION_MANUFACTURERS: - self._DEFAULT_MIN_TRANSITION_TIME = ( - MinTransitionLight._DEFAULT_MIN_TRANSITION_TIME # noqa: SLF001 - ) - - # Check all group members to see if they support execute_if_off. - # If at least one member has a color cluster and doesn't support it, - # it's not used. - for endpoint in member.device._endpoints.values(): # noqa: SLF001 - for cluster_handler in endpoint.all_cluster_handlers.values(): - if ( - cluster_handler.name == CLUSTER_HANDLER_COLOR - and not cluster_handler.execute_if_off_supported - ): - self._GROUP_SUPPORTS_EXECUTE_IF_OFF = False - break - - self._on_off_cluster_handler = group.endpoint[OnOff.cluster_id] - self._level_cluster_handler = group.endpoint[LevelControl.cluster_id] - self._color_cluster_handler = group.endpoint[Color.cluster_id] - self._identify_cluster_handler = group.endpoint[Identify.cluster_id] - self._debounced_member_refresh: Debouncer | None = None - self._zha_config_transition = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_DEFAULT_LIGHT_TRANSITION, - 0, - ) - self._zha_config_enable_light_transitioning_flag = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, - True, - ) - self._zha_config_always_prefer_xy_color_mode = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ALWAYS_PREFER_XY_COLOR_MODE, - True, - ) - self._zha_config_group_members_assume_state = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_GROUP_MEMBERS_ASSUME_STATE, - True, - ) - if self._zha_config_group_members_assume_state: - self._update_group_from_child_delay = ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY - self._zha_config_enhanced_light_transition = False - - self._attr_color_mode = ColorMode.UNKNOWN - self._attr_supported_color_modes = {ColorMode.ONOFF} - - # remove this when all ZHA platforms and base entities are updated @property - def available(self) -> bool: - """Return entity availability.""" - return self._attr_available + def min_mireds(self) -> int: + """Return the coldest color_temp that this light supports.""" + return self.entity_data.entity.min_mireds - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - if self._debounced_member_refresh is None: - force_refresh_debouncer = Debouncer( - self.hass, - _LOGGER, - cooldown=3, - immediate=True, - function=self._force_member_updates, - ) - self._debounced_member_refresh = force_refresh_debouncer - self.async_on_remove(force_refresh_debouncer.async_cancel) + @property + def max_mireds(self) -> int: + """Return the warmest color_temp that this light supports.""" + return self.entity_data.entity.max_mireds + @property + def hs_color(self) -> tuple[float, float] | None: + """Return the hs color value [int, int].""" + return self.entity_data.entity.hs_color + + @property + def xy_color(self) -> tuple[float, float] | None: + """Return the xy color value [float, float].""" + return self.entity_data.entity.xy_color + + @property + def color_temp(self) -> int | None: + """Return the CT color value in mireds.""" + return self.entity_data.entity.color_temp + + @property + def color_mode(self) -> ColorMode | None: + """Return the color mode.""" + if self.entity_data.entity.color_mode is None: + return None + return ZHA_TO_HA_COLOR_MODE[self.entity_data.entity.color_mode] + + @property + def effect_list(self) -> list[str] | None: + """Return the list of supported effects.""" + return self.entity_data.entity.effect_list + + @property + def effect(self) -> str | None: + """Return the current effect.""" + return self.entity_data.entity.effect + + @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - # "off with transition" and "off brightness" will get overridden when - # turning on the group, but they are needed for setting the assumed - # member state correctly, so save them here - off_brightness = self._off_brightness if self._off_with_transition else None - await super().async_turn_on(**kwargs) - if self._zha_config_group_members_assume_state: - self._send_member_assume_state_event(True, kwargs, off_brightness) - if self.is_transitioning: # when transitioning, state is refreshed at the end - return - if self._debounced_member_refresh: - await self._debounced_member_refresh.async_call() + await self.entity_data.entity.async_turn_on( + transition=kwargs.get(ATTR_TRANSITION), + brightness=kwargs.get(ATTR_BRIGHTNESS), + effect=kwargs.get(ATTR_EFFECT), + flash=kwargs.get(ATTR_FLASH), + color_temp=kwargs.get(ATTR_COLOR_TEMP), + xy_color=kwargs.get(ATTR_XY_COLOR), + hs_color=kwargs.get(ATTR_HS_COLOR), + ) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - await super().async_turn_off(**kwargs) - if self._zha_config_group_members_assume_state: - self._send_member_assume_state_event(False, kwargs) - if self.is_transitioning: - return - if self._debounced_member_refresh: - await self._debounced_member_refresh.async_call() - - async def async_update(self) -> None: - """Query all members and determine the light group state.""" - self.debug("updating group state") - all_states = [self.hass.states.get(x) for x in self._entity_ids] - states: list[State] = list(filter(None, all_states)) - on_states = [state for state in states if state.state == STATE_ON] - - self._attr_state = len(on_states) > 0 - - # reset "off with transition" flag if any member is on - if self._attr_state: - self._off_with_transition = False - self._off_brightness = None - - self._attr_available = any(state.state != STATE_UNAVAILABLE for state in states) - - self._attr_brightness = helpers.reduce_attribute( - on_states, light.ATTR_BRIGHTNESS + await self.entity_data.entity.async_turn_off( + transition=kwargs.get(ATTR_TRANSITION) ) + self.async_write_ha_state() - self._attr_xy_color = helpers.reduce_attribute( - on_states, light.ATTR_XY_COLOR, reduce=helpers.mean_tuple - ) - - if not self._zha_config_always_prefer_xy_color_mode: - self._attr_hs_color = helpers.reduce_attribute( - on_states, light.ATTR_HS_COLOR, reduce=helpers.mean_tuple - ) - - self._attr_color_temp = helpers.reduce_attribute( - on_states, light.ATTR_COLOR_TEMP - ) - self._attr_min_mireds = helpers.reduce_attribute( - states, light.ATTR_MIN_MIREDS, default=153, reduce=min - ) - self._attr_max_mireds = helpers.reduce_attribute( - states, light.ATTR_MAX_MIREDS, default=500, reduce=max - ) - - self._attr_effect_list = None - all_effect_lists = list( - helpers.find_state_attributes(states, light.ATTR_EFFECT_LIST) - ) - if all_effect_lists: - # Merge all effects from all effect_lists with a union merge. - self._attr_effect_list = list(set().union(*all_effect_lists)) - - self._attr_effect = None - all_effects = list(helpers.find_state_attributes(on_states, light.ATTR_EFFECT)) - if all_effects: - # Report the most common effect. - effects_count = Counter(itertools.chain(all_effects)) - self._attr_effect = effects_count.most_common(1)[0][0] - - supported_color_modes = {ColorMode.ONOFF} - all_supported_color_modes: list[set[ColorMode]] = list( - helpers.find_state_attributes(states, light.ATTR_SUPPORTED_COLOR_MODES) - ) - if all_supported_color_modes: - # Merge all color modes. - supported_color_modes = filter_supported_color_modes( - set().union(*all_supported_color_modes) - ) - - self._attr_supported_color_modes = supported_color_modes - - self._attr_color_mode = ColorMode.UNKNOWN - all_color_modes = list( - helpers.find_state_attributes(on_states, light.ATTR_COLOR_MODE) - ) - if all_color_modes: - # Report the most common color mode, select brightness and onoff last - color_mode_count = Counter(itertools.chain(all_color_modes)) - if ColorMode.ONOFF in color_mode_count: - if ColorMode.ONOFF in supported_color_modes: - color_mode_count[ColorMode.ONOFF] = -1 - else: - color_mode_count.pop(ColorMode.ONOFF) - if ColorMode.BRIGHTNESS in color_mode_count: - if ColorMode.BRIGHTNESS in supported_color_modes: - color_mode_count[ColorMode.BRIGHTNESS] = 0 - else: - color_mode_count.pop(ColorMode.BRIGHTNESS) - if color_mode_count: - self._attr_color_mode = color_mode_count.most_common(1)[0][0] - else: - self._attr_color_mode = next(iter(supported_color_modes)) - - if self._attr_color_mode == ColorMode.HS and ( - color_mode_count[ColorMode.HS] != len(self._group.members) - or self._zha_config_always_prefer_xy_color_mode - ): # switch to XY if all members do not support HS - self._attr_color_mode = ColorMode.XY - - self._attr_supported_features = LightEntityFeature(0) - for support in helpers.find_state_attributes(states, ATTR_SUPPORTED_FEATURES): - # Merge supported features by emulating support for every feature - # we find. - self._attr_supported_features |= support - # Bitwise-and the supported features with the GroupedLight's features - # so that we don't break in the future when a new feature is added. - self._attr_supported_features &= SUPPORT_GROUP_LIGHT - - async def _force_member_updates(self) -> None: - """Force the update of member entities to ensure the states are correct for bulbs that don't report their state.""" - async_dispatcher_send( - self.hass, - SIGNAL_LIGHT_GROUP_STATE_CHANGED, - {"entity_ids": self._entity_ids}, - ) - - def _send_member_assume_state_event( - self, state, service_kwargs, off_brightness=None - ) -> None: - """Send an assume event to all members of the group.""" - update_params = { - "state": state, - "off_with_transition": self._off_with_transition, - } - - # check if the parameters were actually updated - # in the service call before updating members - if light.ATTR_BRIGHTNESS in service_kwargs: # or off brightness - update_params[light.ATTR_BRIGHTNESS] = self._attr_brightness - elif off_brightness is not None: - # if we turn on the group light with "off brightness", - # pass that to the members - update_params[light.ATTR_BRIGHTNESS] = off_brightness - - if light.ATTR_COLOR_TEMP in service_kwargs: - update_params[light.ATTR_COLOR_MODE] = self._attr_color_mode - update_params[light.ATTR_COLOR_TEMP] = self._attr_color_temp - - if light.ATTR_XY_COLOR in service_kwargs: - update_params[light.ATTR_COLOR_MODE] = self._attr_color_mode - update_params[light.ATTR_XY_COLOR] = self._attr_xy_color - - if light.ATTR_HS_COLOR in service_kwargs: - update_params[light.ATTR_COLOR_MODE] = self._attr_color_mode - update_params[light.ATTR_HS_COLOR] = self._attr_hs_color - - if light.ATTR_EFFECT in service_kwargs: - update_params[light.ATTR_EFFECT] = self._attr_effect - - async_dispatcher_send( - self.hass, - SIGNAL_LIGHT_GROUP_ASSUME_GROUP_STATE, - {"entity_ids": self._entity_ids}, - update_params, + @callback + def restore_external_state_attributes(self, state: State) -> None: + """Restore entity state.""" + self.entity_data.entity.restore_external_state_attributes( + state=(state.state == STATE_ON), + off_with_transition=state.attributes.get(OFF_WITH_TRANSITION), + off_brightness=state.attributes.get(OFF_BRIGHTNESS), + brightness=state.attributes.get(ATTR_BRIGHTNESS), + color_temp=state.attributes.get(ATTR_COLOR_TEMP), + xy_color=state.attributes.get(ATTR_XY_COLOR), + hs_color=state.attributes.get(ATTR_HS_COLOR), + color_mode=( + HA_TO_ZHA_COLOR_MODE[ColorMode(state.attributes[ATTR_COLOR_MODE])] + if state.attributes.get(ATTR_COLOR_MODE) is not None + else None + ), + effect=state.attributes.get(ATTR_EFFECT), ) diff --git a/homeassistant/components/zha/lock.py b/homeassistant/components/zha/lock.py index fa719075c05..ebac03eb7b8 100644 --- a/homeassistant/components/zha/lock.py +++ b/homeassistant/components/zha/lock.py @@ -4,35 +4,25 @@ import functools from typing import Any import voluptuous as vol -from zigpy.zcl.foundation import Status -from homeassistant.components.lock import STATE_LOCKED, STATE_UNLOCKED, LockEntity +from homeassistant.components.lock import LockEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, async_get_current_platform, ) -from homeassistant.helpers.typing import StateType -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_DOORLOCK, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -# The first state is Zigbee 'Not fully locked' -STATE_LIST = [STATE_UNLOCKED, STATE_LOCKED, STATE_UNLOCKED] -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.LOCK) - -VALUE_TO_STATE = dict(enumerate(STATE_LIST)) SERVICE_SET_LOCK_USER_CODE = "set_lock_user_code" SERVICE_ENABLE_LOCK_USER_CODE = "enable_lock_user_code" @@ -53,7 +43,7 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, ZhaDoorLock, entities_to_create ), ) config_entry.async_on_unload(unsub) @@ -94,105 +84,57 @@ async def async_setup_entry( ) -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_DOORLOCK) -class ZhaDoorLock(ZhaEntity, LockEntity): +class ZhaDoorLock(ZHAEntity, LockEntity): """Representation of a ZHA lock.""" _attr_translation_key: str = "door_lock" - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._doorlock_cluster_handler = self.cluster_handlers.get( - CLUSTER_HANDLER_DOORLOCK - ) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._doorlock_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - @callback - def async_restore_last_state(self, last_state): - """Restore previous state.""" - self._state = VALUE_TO_STATE.get(last_state.state, last_state.state) - @property def is_locked(self) -> bool: """Return true if entity is locked.""" - if self._state is None: - return False - return self._state == STATE_LOCKED - - @property - def extra_state_attributes(self) -> dict[str, StateType]: - """Return state attributes.""" - return self.state_attributes + return self.entity_data.entity.is_locked + @convert_zha_error_to_ha_error async def async_lock(self, **kwargs: Any) -> None: """Lock the lock.""" - result = await self._doorlock_cluster_handler.lock_door() - if result[0] is not Status.SUCCESS: - self.error("Error with lock_door: %s", result) - return + await self.entity_data.entity.async_lock() self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - result = await self._doorlock_cluster_handler.unlock_door() - if result[0] is not Status.SUCCESS: - self.error("Error with unlock_door: %s", result) - return + await self.entity_data.entity.async_unlock() self.async_write_ha_state() - async def async_update(self) -> None: - """Attempt to retrieve state from the lock.""" - await super().async_update() - await self.async_get_state() - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Handle state update from cluster handler.""" - self._state = VALUE_TO_STATE.get(value, self._state) - self.async_write_ha_state() - - async def async_get_state(self, from_cache=True): - """Attempt to retrieve state from the lock.""" - if self._doorlock_cluster_handler: - state = await self._doorlock_cluster_handler.get_attribute_value( - "lock_state", from_cache=from_cache - ) - if state is not None: - self._state = VALUE_TO_STATE.get(state, self._state) - - async def refresh(self, time): - """Call async_get_state at an interval.""" - await self.async_get_state(from_cache=False) - + @convert_zha_error_to_ha_error async def async_set_lock_user_code(self, code_slot: int, user_code: str) -> None: """Set the user_code to index X on the lock.""" - if self._doorlock_cluster_handler: - await self._doorlock_cluster_handler.async_set_user_code( - code_slot, user_code - ) - self.debug("User code at slot %s set", code_slot) + await self.entity_data.entity.async_set_lock_user_code( + code_slot=code_slot, user_code=user_code + ) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_enable_lock_user_code(self, code_slot: int) -> None: """Enable user_code at index X on the lock.""" - if self._doorlock_cluster_handler: - await self._doorlock_cluster_handler.async_enable_user_code(code_slot) - self.debug("User code at slot %s enabled", code_slot) + await self.entity_data.entity.async_enable_lock_user_code(code_slot=code_slot) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_disable_lock_user_code(self, code_slot: int) -> None: """Disable user_code at index X on the lock.""" - if self._doorlock_cluster_handler: - await self._doorlock_cluster_handler.async_disable_user_code(code_slot) - self.debug("User code at slot %s disabled", code_slot) + await self.entity_data.entity.async_disable_lock_user_code(code_slot=code_slot) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_clear_lock_user_code(self, code_slot: int) -> None: """Clear the user_code at index X on the lock.""" - if self._doorlock_cluster_handler: - await self._doorlock_cluster_handler.async_clear_user_code(code_slot) - self.debug("User code at slot %s cleared", code_slot) + await self.entity_data.entity.async_clear_lock_user_code(code_slot=code_slot) + self.async_write_ha_state() + + @callback + def restore_external_state_attributes(self, state: State) -> None: + """Restore entity state.""" + self.entity_data.entity.restore_external_state_attributes( + state=state.state, + ) diff --git a/homeassistant/components/zha/logbook.py b/homeassistant/components/zha/logbook.py index e63ef565824..3de81e1255d 100644 --- a/homeassistant/components/zha/logbook.py +++ b/homeassistant/components/zha/logbook.py @@ -5,16 +5,18 @@ from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING +from zha.application.const import ZHA_EVENT + from homeassistant.components.logbook import LOGBOOK_ENTRY_MESSAGE, LOGBOOK_ENTRY_NAME from homeassistant.const import ATTR_COMMAND, ATTR_DEVICE_ID from homeassistant.core import Event, HomeAssistant, callback import homeassistant.helpers.device_registry as dr -from .core.const import DOMAIN as ZHA_DOMAIN, ZHA_EVENT -from .core.helpers import async_get_zha_device +from .const import DOMAIN as ZHA_DOMAIN +from .helpers import async_get_zha_device_proxy if TYPE_CHECKING: - from .core.device import ZHADevice + from zha.zigbee.device import Device @callback @@ -30,7 +32,7 @@ def async_describe_events( """Describe ZHA logbook event.""" device: dr.DeviceEntry | None = None device_name: str = "Unknown device" - zha_device: ZHADevice | None = None + zha_device: Device | None = None event_data = event.data event_type: str | None = None event_subtype: str | None = None @@ -39,7 +41,9 @@ def async_describe_events( device = device_registry.devices[event.data[ATTR_DEVICE_ID]] if device: device_name = device.name_by_user or device.name or "Unknown device" - zha_device = async_get_zha_device(hass, event.data[ATTR_DEVICE_ID]) + zha_device = async_get_zha_device_proxy( + hass, event.data[ATTR_DEVICE_ID] + ).device except (KeyError, AttributeError): pass diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index f517742f16f..a5e57fcb1ec 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -18,20 +18,10 @@ "zigpy_xbee", "zigpy_zigate", "zigpy_znp", + "zha", "universal_silabs_flasher" ], - "requirements": [ - "bellows==0.39.1", - "pyserial==3.5", - "zha-quirks==0.0.116", - "zigpy-deconz==0.23.1", - "zigpy==0.64.1", - "zigpy-xbee==0.20.1", - "zigpy-zigate==0.12.0", - "zigpy-znp==0.12.1", - "universal-silabs-flasher==0.0.20", - "pyserial-asyncio-fast==0.11" - ], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.31"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/number.py b/homeassistant/components/zha/number.py index 9320b4494a4..263f5262994 100644 --- a/homeassistant/components/zha/number.py +++ b/homeassistant/components/zha/number.py @@ -4,267 +4,25 @@ from __future__ import annotations import functools import logging -from typing import TYPE_CHECKING, Any, Self -from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT -from zigpy.quirks.v2 import NumberMetadata -from zigpy.zcl.clusters.hvac import Thermostat - -from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode +from homeassistant.components.number import RestoreNumber from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - EntityCategory, - Platform, - UnitOfMass, - UnitOfTemperature, - UnitOfTime, -) -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UndefinedType -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_ANALOG_OUTPUT, - CLUSTER_HANDLER_BASIC, - CLUSTER_HANDLER_COLOR, - CLUSTER_HANDLER_INOVELLI, - CLUSTER_HANDLER_LEVEL, - CLUSTER_HANDLER_OCCUPANCY, - CLUSTER_HANDLER_THERMOSTAT, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data, validate_device_class, validate_unit -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice _LOGGER = logging.getLogger(__name__) -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.NUMBER) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.NUMBER -) - - -UNITS = { - 0: "Square-meters", - 1: "Square-feet", - 2: "Milliamperes", - 3: "Amperes", - 4: "Ohms", - 5: "Volts", - 6: "Kilo-volts", - 7: "Mega-volts", - 8: "Volt-amperes", - 9: "Kilo-volt-amperes", - 10: "Mega-volt-amperes", - 11: "Volt-amperes-reactive", - 12: "Kilo-volt-amperes-reactive", - 13: "Mega-volt-amperes-reactive", - 14: "Degrees-phase", - 15: "Power-factor", - 16: "Joules", - 17: "Kilojoules", - 18: "Watt-hours", - 19: "Kilowatt-hours", - 20: "BTUs", - 21: "Therms", - 22: "Ton-hours", - 23: "Joules-per-kilogram-dry-air", - 24: "BTUs-per-pound-dry-air", - 25: "Cycles-per-hour", - 26: "Cycles-per-minute", - 27: "Hertz", - 28: "Grams-of-water-per-kilogram-dry-air", - 29: "Percent-relative-humidity", - 30: "Millimeters", - 31: "Meters", - 32: "Inches", - 33: "Feet", - 34: "Watts-per-square-foot", - 35: "Watts-per-square-meter", - 36: "Lumens", - 37: "Luxes", - 38: "Foot-candles", - 39: "Kilograms", - 40: "Pounds-mass", - 41: "Tons", - 42: "Kilograms-per-second", - 43: "Kilograms-per-minute", - 44: "Kilograms-per-hour", - 45: "Pounds-mass-per-minute", - 46: "Pounds-mass-per-hour", - 47: "Watts", - 48: "Kilowatts", - 49: "Megawatts", - 50: "BTUs-per-hour", - 51: "Horsepower", - 52: "Tons-refrigeration", - 53: "Pascals", - 54: "Kilopascals", - 55: "Bars", - 56: "Pounds-force-per-square-inch", - 57: "Centimeters-of-water", - 58: "Inches-of-water", - 59: "Millimeters-of-mercury", - 60: "Centimeters-of-mercury", - 61: "Inches-of-mercury", - 62: "°C", - 63: "°K", - 64: "°F", - 65: "Degree-days-Celsius", - 66: "Degree-days-Fahrenheit", - 67: "Years", - 68: "Months", - 69: "Weeks", - 70: "Days", - 71: "Hours", - 72: "Minutes", - 73: "Seconds", - 74: "Meters-per-second", - 75: "Kilometers-per-hour", - 76: "Feet-per-second", - 77: "Feet-per-minute", - 78: "Miles-per-hour", - 79: "Cubic-feet", - 80: "Cubic-meters", - 81: "Imperial-gallons", - 82: "Liters", - 83: "Us-gallons", - 84: "Cubic-feet-per-minute", - 85: "Cubic-meters-per-second", - 86: "Imperial-gallons-per-minute", - 87: "Liters-per-second", - 88: "Liters-per-minute", - 89: "Us-gallons-per-minute", - 90: "Degrees-angular", - 91: "Degrees-Celsius-per-hour", - 92: "Degrees-Celsius-per-minute", - 93: "Degrees-Fahrenheit-per-hour", - 94: "Degrees-Fahrenheit-per-minute", - 95: None, - 96: "Parts-per-million", - 97: "Parts-per-billion", - 98: "%", - 99: "Percent-per-second", - 100: "Per-minute", - 101: "Per-second", - 102: "Psi-per-Degree-Fahrenheit", - 103: "Radians", - 104: "Revolutions-per-minute", - 105: "Currency1", - 106: "Currency2", - 107: "Currency3", - 108: "Currency4", - 109: "Currency5", - 110: "Currency6", - 111: "Currency7", - 112: "Currency8", - 113: "Currency9", - 114: "Currency10", - 115: "Square-inches", - 116: "Square-centimeters", - 117: "BTUs-per-pound", - 118: "Centimeters", - 119: "Pounds-mass-per-second", - 120: "Delta-Degrees-Fahrenheit", - 121: "Delta-Degrees-Kelvin", - 122: "Kilohms", - 123: "Megohms", - 124: "Millivolts", - 125: "Kilojoules-per-kilogram", - 126: "Megajoules", - 127: "Joules-per-degree-Kelvin", - 128: "Joules-per-kilogram-degree-Kelvin", - 129: "Kilohertz", - 130: "Megahertz", - 131: "Per-hour", - 132: "Milliwatts", - 133: "Hectopascals", - 134: "Millibars", - 135: "Cubic-meters-per-hour", - 136: "Liters-per-hour", - 137: "Kilowatt-hours-per-square-meter", - 138: "Kilowatt-hours-per-square-foot", - 139: "Megajoules-per-square-meter", - 140: "Megajoules-per-square-foot", - 141: "Watts-per-square-meter-Degree-Kelvin", - 142: "Cubic-feet-per-second", - 143: "Percent-obscuration-per-foot", - 144: "Percent-obscuration-per-meter", - 145: "Milliohms", - 146: "Megawatt-hours", - 147: "Kilo-BTUs", - 148: "Mega-BTUs", - 149: "Kilojoules-per-kilogram-dry-air", - 150: "Megajoules-per-kilogram-dry-air", - 151: "Kilojoules-per-degree-Kelvin", - 152: "Megajoules-per-degree-Kelvin", - 153: "Newton", - 154: "Grams-per-second", - 155: "Grams-per-minute", - 156: "Tons-per-hour", - 157: "Kilo-BTUs-per-hour", - 158: "Hundredths-seconds", - 159: "Milliseconds", - 160: "Newton-meters", - 161: "Millimeters-per-second", - 162: "Millimeters-per-minute", - 163: "Meters-per-minute", - 164: "Meters-per-hour", - 165: "Cubic-meters-per-minute", - 166: "Meters-per-second-per-second", - 167: "Amperes-per-meter", - 168: "Amperes-per-square-meter", - 169: "Ampere-square-meters", - 170: "Farads", - 171: "Henrys", - 172: "Ohm-meters", - 173: "Siemens", - 174: "Siemens-per-meter", - 175: "Teslas", - 176: "Volts-per-degree-Kelvin", - 177: "Volts-per-meter", - 178: "Webers", - 179: "Candelas", - 180: "Candelas-per-square-meter", - 181: "Kelvins-per-hour", - 182: "Kelvins-per-minute", - 183: "Joule-seconds", - 185: "Square-meters-per-Newton", - 186: "Kilogram-per-cubic-meter", - 187: "Newton-seconds", - 188: "Newtons-per-meter", - 189: "Watts-per-meter-per-degree-Kelvin", -} - -ICONS = { - 0: "mdi:temperature-celsius", - 1: "mdi:water-percent", - 2: "mdi:gauge", - 3: "mdi:speedometer", - 4: "mdi:percent", - 5: "mdi:air-filter", - 6: "mdi:fan", - 7: "mdi:flash", - 8: "mdi:current-ac", - 9: "mdi:flash", - 10: "mdi:flash", - 11: "mdi:flash", - 12: "mdi:counter", - 13: "mdi:thermometer-lines", - 14: "mdi:timer", - 15: "mdi:palette", - 16: "mdi:brightness-percent", -} - async def async_setup_entry( hass: HomeAssistant, @@ -279,875 +37,53 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, ZhaNumber, entities_to_create ), ) config_entry.async_on_unload(unsub) -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ANALOG_OUTPUT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ZhaNumber(ZhaEntity, NumberEntity): +class ZhaNumber(ZHAEntity, RestoreNumber): """Representation of a ZHA Number entity.""" - _attr_translation_key: str = "number" - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this entity.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._analog_output_cluster_handler = self.cluster_handlers[ - CLUSTER_HANDLER_ANALOG_OUTPUT - ] - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._analog_output_cluster_handler, - SIGNAL_ATTR_UPDATED, - self.async_set_state, - ) - - @property - def native_value(self) -> float | None: - """Return the current value.""" - return self._analog_output_cluster_handler.present_value - - @property - def native_min_value(self) -> float: - """Return the minimum value.""" - min_present_value = self._analog_output_cluster_handler.min_present_value - if min_present_value is not None: - return min_present_value - return 0 - - @property - def native_max_value(self) -> float: - """Return the maximum value.""" - max_present_value = self._analog_output_cluster_handler.max_present_value - if max_present_value is not None: - return max_present_value - return 1023 - - @property - def native_step(self) -> float | None: - """Return the value step.""" - resolution = self._analog_output_cluster_handler.resolution - if resolution is not None: - return resolution - return super().native_step - @property def name(self) -> str | UndefinedType | None: """Return the name of the number entity.""" - description = self._analog_output_cluster_handler.description - if description is not None and len(description) > 0: - return f"{super().name} {description}" - return super().name + if (description := self.entity_data.entity.description) is None: + return super().name + + # The name of this entity is reported by the device itself. + # For backwards compatibility, we keep the same format as before. This + # should probably be changed in the future to omit the prefix. + return f"{super().name} {description}" @property - def icon(self) -> str | None: - """Return the icon to be used for this entity.""" - application_type = self._analog_output_cluster_handler.application_type - if application_type is not None: - return ICONS.get(application_type >> 16, super().icon) - return super().icon - - @property - def native_unit_of_measurement(self) -> str | None: - """Return the unit the value is expressed in.""" - engineering_units = self._analog_output_cluster_handler.engineering_units - return UNITS.get(engineering_units) - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Handle value update from cluster handler.""" - self.async_write_ha_state() - - async def async_set_native_value(self, value: float) -> None: - """Update the current value from HA.""" - await self._analog_output_cluster_handler.async_set_present_value(float(value)) - self.async_write_ha_state() - - async def async_update(self) -> None: - """Attempt to retrieve the state of the entity.""" - await super().async_update() - _LOGGER.debug("polling current state") - if self._analog_output_cluster_handler: - value = await self._analog_output_cluster_handler.get_attribute_value( - "present_value", from_cache=False - ) - _LOGGER.debug("read value=%s", value) - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ZHANumberConfigurationEntity(ZhaEntity, NumberEntity): - """Representation of a ZHA number configuration entity.""" - - _attr_entity_category = EntityCategory.CONFIG - _attr_native_step: float = 1.0 - _attr_multiplier: float = 1 - _attribute_name: str - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - if ENTITY_METADATA not in kwargs and ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(cls._attribute_name) is None - ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, - ) - return None - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this number configuration entity.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: NumberMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - - if entity_metadata.min is not None: - self._attr_native_min_value = entity_metadata.min - if entity_metadata.max is not None: - self._attr_native_max_value = entity_metadata.max - if entity_metadata.step is not None: - self._attr_native_step = entity_metadata.step - if entity_metadata.multiplier is not None: - self._attr_multiplier = entity_metadata.multiplier - if entity_metadata.device_class is not None: - self._attr_device_class = validate_device_class( - NumberDeviceClass, - entity_metadata.device_class, - Platform.NUMBER.value, - _LOGGER, - ) - if entity_metadata.device_class is None and entity_metadata.unit is not None: - self._attr_native_unit_of_measurement = validate_unit( - entity_metadata.unit - ).value - - @property - def native_value(self) -> float: + def native_value(self) -> float | None: """Return the current value.""" - return ( - self._cluster_handler.cluster.get(self._attribute_name) - * self._attr_multiplier - ) - - async def async_set_native_value(self, value: float) -> None: - """Update the current value from HA.""" - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: int(value / self._attr_multiplier)} - ) - self.async_write_ha_state() - - async def async_update(self) -> None: - """Attempt to retrieve the state of the entity.""" - await super().async_update() - _LOGGER.debug("polling current state") - if self._cluster_handler: - value = await self._cluster_handler.get_attribute_value( - self._attribute_name, from_cache=False - ) - _LOGGER.debug("read value=%s", value) - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", - models={"lumi.motion.ac02", "lumi.motion.agl04"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraMotionDetectionInterval(ZHANumberConfigurationEntity): - """Representation of a ZHA motion detection interval configuration entity.""" - - _unique_id_suffix = "detection_interval" - _attr_native_min_value: float = 2 - _attr_native_max_value: float = 65535 - _attribute_name = "detection_interval" - _attr_translation_key: str = "detection_interval" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class OnOffTransitionTimeConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA on off transition time configuration entity.""" - - _unique_id_suffix = "on_off_transition_time" - _attr_native_min_value: float = 0x0000 - _attr_native_max_value: float = 0xFFFF - _attribute_name = "on_off_transition_time" - _attr_translation_key: str = "on_off_transition_time" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class OnLevelConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA on level configuration entity.""" - - _unique_id_suffix = "on_level" - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0xFF - _attribute_name = "on_level" - _attr_translation_key: str = "on_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class OnTransitionTimeConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA on transition time configuration entity.""" - - _unique_id_suffix = "on_transition_time" - _attr_native_min_value: float = 0x0000 - _attr_native_max_value: float = 0xFFFE - _attribute_name = "on_transition_time" - _attr_translation_key: str = "on_transition_time" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class OffTransitionTimeConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA off transition time configuration entity.""" - - _unique_id_suffix = "off_transition_time" - _attr_native_min_value: float = 0x0000 - _attr_native_max_value: float = 0xFFFE - _attribute_name = "off_transition_time" - _attr_translation_key: str = "off_transition_time" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DefaultMoveRateConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA default move rate configuration entity.""" - - _unique_id_suffix = "default_move_rate" - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0xFE - _attribute_name = "default_move_rate" - _attr_translation_key: str = "default_move_rate" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class StartUpCurrentLevelConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA startup current level configuration entity.""" - - _unique_id_suffix = "start_up_current_level" - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0xFF - _attribute_name = "start_up_current_level" - _attr_translation_key: str = "start_up_current_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_COLOR) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class StartUpColorTemperatureConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA startup color temperature configuration entity.""" - - _unique_id_suffix = "start_up_color_temperature" - _attr_native_min_value: float = 153 - _attr_native_max_value: float = 500 - _attribute_name = "start_up_color_temperature" - _attr_translation_key: str = "start_up_color_temperature" - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this ZHA startup color temperature entity.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - if self._cluster_handler: - self._attr_native_min_value: float = self._cluster_handler.min_mireds - self._attr_native_max_value: float = self._cluster_handler.max_mireds - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_htnnfasr", - }, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class TimerDurationMinutes(ZHANumberConfigurationEntity): - """Representation of a ZHA timer duration configuration entity.""" - - _unique_id_suffix = "timer_duration" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0x257 - _attr_native_unit_of_measurement: str | None = UNITS[72] - _attribute_name = "timer_duration" - _attr_translation_key: str = "timer_duration" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names="ikea_airpurifier") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class FilterLifeTime(ZHANumberConfigurationEntity): - """Representation of a ZHA filter lifetime configuration entity.""" - - _unique_id_suffix = "filter_life_time" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0xFFFFFFFF - _attr_native_unit_of_measurement: str | None = UNITS[72] - _attribute_name = "filter_life_time" - _attr_translation_key: str = "filter_life_time" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_BASIC, - manufacturers={"TexasInstruments"}, - models={"ti.router"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class TiRouterTransmitPower(ZHANumberConfigurationEntity): - """Representation of a ZHA TI transmit power configuration entity.""" - - _unique_id_suffix = "transmit_power" - _attr_native_min_value: float = -20 - _attr_native_max_value: float = 20 - _attribute_name = "transmit_power" - _attr_translation_key: str = "transmit_power" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliRemoteDimmingUpSpeed(ZHANumberConfigurationEntity): - """Inovelli remote dimming up speed configuration entity.""" - - _unique_id_suffix = "dimming_speed_up_remote" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 126 - _attribute_name = "dimming_speed_up_remote" - _attr_translation_key: str = "dimming_speed_up_remote" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliButtonDelay(ZHANumberConfigurationEntity): - """Inovelli button delay configuration entity.""" - - _unique_id_suffix = "button_delay" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 9 - _attribute_name = "button_delay" - _attr_translation_key: str = "button_delay" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLocalDimmingUpSpeed(ZHANumberConfigurationEntity): - """Inovelli local dimming up speed configuration entity.""" - - _unique_id_suffix = "dimming_speed_up_local" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "dimming_speed_up_local" - _attr_translation_key: str = "dimming_speed_up_local" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLocalRampRateOffToOn(ZHANumberConfigurationEntity): - """Inovelli off to on local ramp rate configuration entity.""" - - _unique_id_suffix = "ramp_rate_off_to_on_local" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "ramp_rate_off_to_on_local" - _attr_translation_key: str = "ramp_rate_off_to_on_local" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliRemoteDimmingSpeedOffToOn(ZHANumberConfigurationEntity): - """Inovelli off to on remote ramp rate configuration entity.""" - - _unique_id_suffix = "ramp_rate_off_to_on_remote" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "ramp_rate_off_to_on_remote" - _attr_translation_key: str = "ramp_rate_off_to_on_remote" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliRemoteDimmingDownSpeed(ZHANumberConfigurationEntity): - """Inovelli remote dimming down speed configuration entity.""" - - _unique_id_suffix = "dimming_speed_down_remote" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "dimming_speed_down_remote" - _attr_translation_key: str = "dimming_speed_down_remote" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLocalDimmingDownSpeed(ZHANumberConfigurationEntity): - """Inovelli local dimming down speed configuration entity.""" - - _unique_id_suffix = "dimming_speed_down_local" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "dimming_speed_down_local" - _attr_translation_key: str = "dimming_speed_down_local" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLocalRampRateOnToOff(ZHANumberConfigurationEntity): - """Inovelli local on to off ramp rate configuration entity.""" - - _unique_id_suffix = "ramp_rate_on_to_off_local" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "ramp_rate_on_to_off_local" - _attr_translation_key: str = "ramp_rate_on_to_off_local" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliRemoteDimmingSpeedOnToOff(ZHANumberConfigurationEntity): - """Inovelli remote on to off ramp rate configuration entity.""" - - _unique_id_suffix = "ramp_rate_on_to_off_remote" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "ramp_rate_on_to_off_remote" - _attr_translation_key: str = "ramp_rate_on_to_off_remote" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliMinimumLoadDimmingLevel(ZHANumberConfigurationEntity): - """Inovelli minimum load dimming level configuration entity.""" - - _unique_id_suffix = "minimum_level" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 1 - _attr_native_max_value: float = 254 - _attribute_name = "minimum_level" - _attr_translation_key: str = "minimum_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliMaximumLoadDimmingLevel(ZHANumberConfigurationEntity): - """Inovelli maximum load dimming level configuration entity.""" - - _unique_id_suffix = "maximum_level" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 2 - _attr_native_max_value: float = 255 - _attribute_name = "maximum_level" - _attr_translation_key: str = "maximum_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliAutoShutoffTimer(ZHANumberConfigurationEntity): - """Inovelli automatic switch shutoff timer configuration entity.""" - - _unique_id_suffix = "auto_off_timer" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 32767 - _attribute_name = "auto_off_timer" - _attr_translation_key: str = "auto_off_timer" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliQuickStartTime(ZHANumberConfigurationEntity): - """Inovelli fan quick start time configuration entity.""" - - _unique_id_suffix = "quick_start_time" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 10 - _attribute_name = "quick_start_time" - _attr_translation_key: str = "quick_start_time" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLoadLevelIndicatorTimeout(ZHANumberConfigurationEntity): - """Inovelli load level indicator timeout configuration entity.""" - - _unique_id_suffix = "load_level_indicator_timeout" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 11 - _attribute_name = "load_level_indicator_timeout" - _attr_translation_key: str = "load_level_indicator_timeout" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDefaultAllLEDOnColor(ZHANumberConfigurationEntity): - """Inovelli default all led color when on configuration entity.""" - - _unique_id_suffix = "led_color_when_on" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 255 - _attribute_name = "led_color_when_on" - _attr_translation_key: str = "led_color_when_on" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDefaultAllLEDOffColor(ZHANumberConfigurationEntity): - """Inovelli default all led color when off configuration entity.""" - - _unique_id_suffix = "led_color_when_off" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 255 - _attribute_name = "led_color_when_off" - _attr_translation_key: str = "led_color_when_off" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDefaultAllLEDOnIntensity(ZHANumberConfigurationEntity): - """Inovelli default all led intensity when on configuration entity.""" - - _unique_id_suffix = "led_intensity_when_on" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 100 - _attribute_name = "led_intensity_when_on" - _attr_translation_key: str = "led_intensity_when_on" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDefaultAllLEDOffIntensity(ZHANumberConfigurationEntity): - """Inovelli default all led intensity when off configuration entity.""" - - _unique_id_suffix = "led_intensity_when_off" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 100 - _attribute_name = "led_intensity_when_off" - _attr_translation_key: str = "led_intensity_when_off" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDoubleTapUpLevel(ZHANumberConfigurationEntity): - """Inovelli double tap up level configuration entity.""" - - _unique_id_suffix = "double_tap_up_level" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 2 - _attr_native_max_value: float = 254 - _attribute_name = "double_tap_up_level" - _attr_translation_key: str = "double_tap_up_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDoubleTapDownLevel(ZHANumberConfigurationEntity): - """Inovelli double tap down level configuration entity.""" - - _unique_id_suffix = "double_tap_down_level" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 254 - _attribute_name = "double_tap_down_level" - _attr_translation_key: str = "double_tap_down_level" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederServingSize(ZHANumberConfigurationEntity): - """Aqara pet feeder serving size configuration entity.""" - - _unique_id_suffix = "serving_size" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 1 - _attr_native_max_value: float = 10 - _attribute_name = "serving_size" - _attr_translation_key: str = "serving_size" - - _attr_mode: NumberMode = NumberMode.BOX - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederPortionWeight(ZHANumberConfigurationEntity): - """Aqara pet feeder portion weight configuration entity.""" - - _unique_id_suffix = "portion_weight" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 1 - _attr_native_max_value: float = 100 - _attribute_name = "portion_weight" - _attr_translation_key: str = "portion_weight" - - _attr_mode: NumberMode = NumberMode.BOX - _attr_native_unit_of_measurement: str = UnitOfMass.GRAMS - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraThermostatAwayTemp(ZHANumberConfigurationEntity): - """Aqara away preset temperature configuration entity.""" - - _unique_id_suffix = "away_preset_temperature" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 5 - _attr_native_max_value: float = 30 - _attr_multiplier: float = 0.01 - _attribute_name = "away_preset_temperature" - _attr_translation_key: str = "away_preset_temperature" - - _attr_mode: NumberMode = NumberMode.SLIDER - _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ThermostatLocalTempCalibration(ZHANumberConfigurationEntity): - """Local temperature calibration.""" - - _unique_id_suffix = "local_temperature_calibration" - _attr_native_min_value: float = -2.5 - _attr_native_max_value: float = 2.5 - _attr_native_step: float = 0.1 - _attr_multiplier: float = 0.1 - _attribute_name = "local_temperature_calibration" - _attr_translation_key: str = "local_temperature_calibration" - - _attr_mode: NumberMode = NumberMode.SLIDER - _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - models={"TRVZB"}, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SonoffThermostatLocalTempCalibration(ThermostatLocalTempCalibration): - """Local temperature calibration for the Sonoff TRVZB.""" - - _attr_native_min_value: float = -7 - _attr_native_max_value: float = 7 - _attr_native_step: float = 0.2 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY, models={"SNZB-06P"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SonoffPresenceSenorTimeout(ZHANumberConfigurationEntity): - """Configuration of Sonoff sensor presence detection timeout.""" - - _unique_id_suffix = "presence_detection_timeout" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: int = 15 - _attr_native_max_value: int = 60 - _attribute_name = "ultrasonic_o_to_u_delay" - _attr_translation_key: str = "presence_detection_timeout" - - _attr_mode: NumberMode = NumberMode.BOX - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ZCLTemperatureEntity(ZHANumberConfigurationEntity): - """Common entity class for ZCL temperature input.""" - - _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS - _attr_mode: NumberMode = NumberMode.BOX - _attr_native_step: float = 0.01 - _attr_multiplier: float = 0.01 - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ZCLHeatSetpointLimitEntity(ZCLTemperatureEntity): - """Min or max heat setpoint setting on thermostats.""" - - _attr_icon: str = "mdi:thermostat" - _attr_native_step: float = 0.5 - - _min_source = Thermostat.AttributeDefs.abs_min_heat_setpoint_limit.name - _max_source = Thermostat.AttributeDefs.abs_max_heat_setpoint_limit.name + return self.entity_data.entity.native_value @property def native_min_value(self) -> float: """Return the minimum value.""" - # The spec says 0x954D, which is a signed integer, therefore the value is in decimals - min_present_value = self._cluster_handler.cluster.get(self._min_source, -27315) - return min_present_value * self._attr_multiplier + return self.entity_data.entity.native_min_value @property def native_max_value(self) -> float: """Return the maximum value.""" - max_present_value = self._cluster_handler.cluster.get(self._max_source, 0x7FFF) - return max_present_value * self._attr_multiplier + return self.entity_data.entity.native_max_value + @property + def native_step(self) -> float | None: + """Return the value step.""" + return self.entity_data.entity.native_step -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class MaxHeatSetpointLimit(ZCLHeatSetpointLimitEntity): - """Max heat setpoint setting on thermostats. + @property + def native_unit_of_measurement(self) -> str | None: + """Return the unit the value is expressed in.""" + return self.entity_data.entity.native_unit_of_measurement - Optional thermostat attribute. - """ - - _unique_id_suffix = "max_heat_setpoint_limit" - _attribute_name: str = "max_heat_setpoint_limit" - _attr_translation_key: str = "max_heat_setpoint_limit" - _attr_entity_category = EntityCategory.CONFIG - - _min_source = Thermostat.AttributeDefs.min_heat_setpoint_limit.name - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class MinHeatSetpointLimit(ZCLHeatSetpointLimitEntity): - """Min heat setpoint setting on thermostats. - - Optional thermostat attribute. - """ - - _unique_id_suffix = "min_heat_setpoint_limit" - _attribute_name: str = "min_heat_setpoint_limit" - _attr_translation_key: str = "min_heat_setpoint_limit" - _attr_entity_category = EntityCategory.CONFIG - - _max_source = Thermostat.AttributeDefs.max_heat_setpoint_limit.name - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossExerciseTriggerTime(ZHANumberConfigurationEntity): - """Danfoss proprietary attribute to set the time to exercise the valve.""" - - _unique_id_suffix = "exercise_trigger_time" - _attribute_name: str = "exercise_trigger_time" - _attr_translation_key: str = "exercise_trigger_time" - _attr_native_min_value: int = 0 - _attr_native_max_value: int = 1439 - _attr_mode: NumberMode = NumberMode.BOX - _attr_native_unit_of_measurement: str = UnitOfTime.MINUTES - _attr_icon: str = "mdi:clock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossExternalMeasuredRoomSensor(ZCLTemperatureEntity): - """Danfoss proprietary attribute to communicate the value of the external temperature sensor.""" - - _unique_id_suffix = "external_measured_room_sensor" - _attribute_name: str = "external_measured_room_sensor" - _attr_translation_key: str = "external_temperature_sensor" - _attr_native_min_value: float = -80 - _attr_native_max_value: float = 35 - _attr_icon: str = "mdi:thermometer" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossLoadRoomMean(ZHANumberConfigurationEntity): - """Danfoss proprietary attribute to set a value for the load.""" - - _unique_id_suffix = "load_room_mean" - _attribute_name: str = "load_room_mean" - _attr_translation_key: str = "load_room_mean" - _attr_native_min_value: int = -8000 - _attr_native_max_value: int = 2000 - _attr_mode: NumberMode = NumberMode.BOX - _attr_icon: str = "mdi:scale-balance" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossRegulationSetpointOffset(ZHANumberConfigurationEntity): - """Danfoss proprietary attribute to set the regulation setpoint offset.""" - - _unique_id_suffix = "regulation_setpoint_offset" - _attribute_name: str = "regulation_setpoint_offset" - _attr_translation_key: str = "regulation_setpoint_offset" - _attr_mode: NumberMode = NumberMode.BOX - _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS - _attr_icon: str = "mdi:thermostat" - _attr_native_min_value: float = -2.5 - _attr_native_max_value: float = 2.5 - _attr_native_step: float = 0.1 - _attr_multiplier = 1 / 10 + @convert_zha_error_to_ha_error + async def async_set_native_value(self, value: float) -> None: + """Update the current value from HA.""" + await self.entity_data.entity.async_set_native_value(value=value) + self.async_write_ha_state() diff --git a/homeassistant/components/zha/radio_manager.py b/homeassistant/components/zha/radio_manager.py index 44b7304c58e..82c30b7678a 100644 --- a/homeassistant/components/zha/radio_manager.py +++ b/homeassistant/components/zha/radio_manager.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from collections.abc import AsyncIterator import contextlib from contextlib import suppress import copy @@ -13,6 +14,7 @@ from typing import Any, Self from bellows.config import CONF_USE_THREAD import voluptuous as vol +from zha.application.const import RadioType from zigpy.application import ControllerApplication import zigpy.backups from zigpy.config import ( @@ -29,14 +31,13 @@ from homeassistant.components import usb from homeassistant.core import HomeAssistant from . import repairs -from .core.const import ( +from .const import ( CONF_RADIO_TYPE, CONF_ZIGPY, DEFAULT_DATABASE_NAME, EZSP_OVERWRITE_EUI64, - RadioType, ) -from .core.helpers import get_zha_data +from .helpers import get_zha_data # Only the common radio types will be autoprobed, ordered by new device popularity. # XBee takes too long to probe since it scans through all possible bauds and likely has @@ -157,7 +158,7 @@ class ZhaRadioManager: return mgr @contextlib.asynccontextmanager - async def connect_zigpy_app(self) -> ControllerApplication: + async def connect_zigpy_app(self) -> AsyncIterator[ControllerApplication]: """Connect to the radio with the current config and then clean up.""" assert self.radio_type is not None @@ -177,7 +178,6 @@ class ZhaRadioManager: app_config[CONF_DEVICE] = self.device_settings app_config[CONF_NWK_BACKUP_ENABLED] = False app_config[CONF_USE_THREAD] = False - app_config = self.radio_type.controller.SCHEMA(app_config) app = await self.radio_type.controller.new( app_config, auto_form=False, start_radio=False diff --git a/homeassistant/components/zha/repairs/__init__.py b/homeassistant/components/zha/repairs/__init__.py index 3d8f2553baa..3fcbdb66bbc 100644 --- a/homeassistant/components/zha/repairs/__init__.py +++ b/homeassistant/components/zha/repairs/__init__.py @@ -8,7 +8,7 @@ from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir -from ..core.const import DOMAIN +from ..const import DOMAIN from .network_settings_inconsistent import ( ISSUE_INCONSISTENT_NETWORK_SETTINGS, NetworkSettingsInconsistentFlow, diff --git a/homeassistant/components/zha/repairs/network_settings_inconsistent.py b/homeassistant/components/zha/repairs/network_settings_inconsistent.py index 2598ff8f98a..ef38ebc3d47 100644 --- a/homeassistant/components/zha/repairs/network_settings_inconsistent.py +++ b/homeassistant/components/zha/repairs/network_settings_inconsistent.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import issue_registry as ir -from ..core.const import DOMAIN +from ..const import DOMAIN from ..radio_manager import ZhaRadioManager _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py index 3cd22c99ec7..4d6d1ae52d8 100644 --- a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py +++ b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py @@ -19,7 +19,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir -from ..core.const import DOMAIN +from ..const import DOMAIN _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zha/select.py b/homeassistant/components/zha/select.py index 026a85fbfdc..fdb47b550fe 100644 --- a/homeassistant/components/zha/select.py +++ b/homeassistant/components/zha/select.py @@ -2,56 +2,26 @@ from __future__ import annotations -from enum import Enum import functools import logging -from typing import TYPE_CHECKING, Any, Self - -from zhaquirks.danfoss import thermostat as danfoss_thermostat -from zhaquirks.quirk_ids import ( - DANFOSS_ALLY_THERMOSTAT, - TUYA_PLUG_MANUFACTURER, - TUYA_PLUG_ONOFF, -) -from zhaquirks.xiaomi.aqara.magnet_ac01 import OppleCluster as MagnetAC01OppleCluster -from zhaquirks.xiaomi.aqara.switch_acn047 import OppleCluster as T2RelayOppleCluster -from zigpy import types -from zigpy.quirks.v2 import ZCLEnumMetadata -from zigpy.zcl.clusters.general import OnOff -from zigpy.zcl.clusters.security import IasWd +from typing import Any from homeassistant.components.select import SelectEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNKNOWN, EntityCategory, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_HUE_OCCUPANCY, - CLUSTER_HANDLER_IAS_WD, - CLUSTER_HANDLER_INOVELLI, - CLUSTER_HANDLER_OCCUPANCY, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_THERMOSTAT, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, - Strobe, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - - -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.SELECT -) _LOGGER = logging.getLogger(__name__) @@ -68,731 +38,38 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, + zha_async_add_entities, async_add_entities, + ZHAEnumSelectEntity, entities_to_create, ), ) config_entry.async_on_unload(unsub) -class ZHAEnumSelectEntity(ZhaEntity, SelectEntity): +class ZHAEnumSelectEntity(ZHAEntity, SelectEntity): """Representation of a ZHA select entity.""" - _attr_entity_category = EntityCategory.CONFIG - _attribute_name: str - _enum: type[Enum] - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this select entity.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - self._attribute_name = self._enum.__name__ - self._attr_options = [entry.name.replace("_", " ") for entry in self._enum] - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA select entity.""" + super().__init__(entity_data, **kwargs) + self._attr_options = self.entity_data.entity.info_object.options @property def current_option(self) -> str | None: """Return the selected entity option to represent the entity state.""" - option = self._cluster_handler.data_cache.get(self._attribute_name) - if option is None: - return None - return option.name.replace("_", " ") + return self.entity_data.entity.current_option + @convert_zha_error_to_ha_error async def async_select_option(self, option: str) -> None: """Change the selected option.""" - self._cluster_handler.data_cache[self._attribute_name] = self._enum[ - option.replace(" ", "_") - ] + await self.entity_data.entity.async_select_option(option=option) self.async_write_ha_state() @callback - def async_restore_last_state(self, last_state) -> None: - """Restore previous state.""" - if last_state.state and last_state.state != STATE_UNKNOWN: - self._cluster_handler.data_cache[self._attribute_name] = self._enum[ - last_state.state.replace(" ", "_") - ] - - -class ZHANonZCLSelectEntity(ZHAEnumSelectEntity): - """Representation of a ZHA select entity with no ZCL interaction.""" - - @property - def available(self) -> bool: - """Return entity availability.""" - return True - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHADefaultToneSelectEntity(ZHANonZCLSelectEntity): - """Representation of a ZHA default siren tone select entity.""" - - _unique_id_suffix = IasWd.Warning.WarningMode.__name__ - _enum = IasWd.Warning.WarningMode - _attr_translation_key: str = "default_siren_tone" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHADefaultSirenLevelSelectEntity(ZHANonZCLSelectEntity): - """Representation of a ZHA default siren level select entity.""" - - _unique_id_suffix = IasWd.Warning.SirenLevel.__name__ - _enum = IasWd.Warning.SirenLevel - _attr_translation_key: str = "default_siren_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHADefaultStrobeLevelSelectEntity(ZHANonZCLSelectEntity): - """Representation of a ZHA default siren strobe level select entity.""" - - _unique_id_suffix = IasWd.StrobeLevel.__name__ - _enum = IasWd.StrobeLevel - _attr_translation_key: str = "default_strobe_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHADefaultStrobeSelectEntity(ZHANonZCLSelectEntity): - """Representation of a ZHA default siren strobe select entity.""" - - _unique_id_suffix = Strobe.__name__ - _enum = Strobe - _attr_translation_key: str = "default_strobe" - - -class ZCLEnumSelectEntity(ZhaEntity, SelectEntity): - """Representation of a ZHA ZCL enum select entity.""" - - _attribute_name: str - _attr_entity_category = EntityCategory.CONFIG - _enum: type[Enum] - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - if ENTITY_METADATA not in kwargs and ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(cls._attribute_name) is None - ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, + def restore_external_state_attributes(self, state: State) -> None: + """Restore entity state.""" + if state.state and state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): + self.entity_data.entity.restore_external_state_attributes( + state=state.state, ) - return None - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this select entity.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - self._attr_options = [entry.name.replace("_", " ") for entry in self._enum] - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: ZCLEnumMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - self._enum = entity_metadata.enum - - @property - def current_option(self) -> str | None: - """Return the selected entity option to represent the entity state.""" - option = self._cluster_handler.cluster.get(self._attribute_name) - if option is None: - return None - option = self._enum(option) - return option.name.replace("_", " ") - - async def async_select_option(self, option: str) -> None: - """Change the selected option.""" - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: self._enum[option.replace(" ", "_")]} - ) - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any): - """Handle state update from cluster handler.""" - self.async_write_ha_state() - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF) -class ZHAStartupOnOffSelectEntity(ZCLEnumSelectEntity): - """Representation of a ZHA startup onoff select entity.""" - - _unique_id_suffix = OnOff.StartUpOnOff.__name__ - _attribute_name = "start_up_on_off" - _enum = OnOff.StartUpOnOff - _attr_translation_key: str = "start_up_on_off" - - -class TuyaPowerOnState(types.enum8): - """Tuya power on state enum.""" - - Off = 0x00 - On = 0x01 - LastState = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF -) -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", quirk_ids=TUYA_PLUG_MANUFACTURER -) -class TuyaPowerOnStateSelectEntity(ZCLEnumSelectEntity): - """Representation of a ZHA power on state select entity.""" - - _unique_id_suffix = "power_on_state" - _attribute_name = "power_on_state" - _enum = TuyaPowerOnState - _attr_translation_key: str = "power_on_state" - - -class TuyaBacklightMode(types.enum8): - """Tuya switch backlight mode enum.""" - - Off = 0x00 - LightWhenOn = 0x01 - LightWhenOff = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF -) -class TuyaBacklightModeSelectEntity(ZCLEnumSelectEntity): - """Representation of a ZHA backlight mode select entity.""" - - _unique_id_suffix = "backlight_mode" - _attribute_name = "backlight_mode" - _enum = TuyaBacklightMode - _attr_translation_key: str = "backlight_mode" - - -class MoesBacklightMode(types.enum8): - """MOES switch backlight mode enum.""" - - Off = 0x00 - LightWhenOn = 0x01 - LightWhenOff = 0x02 - Freeze = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", quirk_ids=TUYA_PLUG_MANUFACTURER -) -class MoesBacklightModeSelectEntity(ZCLEnumSelectEntity): - """Moes devices have a different backlight mode select options.""" - - _unique_id_suffix = "backlight_mode" - _attribute_name = "backlight_mode" - _enum = MoesBacklightMode - _attr_translation_key: str = "backlight_mode" - - -class AqaraMotionSensitivities(types.enum8): - """Aqara motion sensitivities.""" - - Low = 0x01 - Medium = 0x02 - High = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", - models={"lumi.motion.ac01", "lumi.motion.ac02", "lumi.motion.agl04"}, -) -class AqaraMotionSensitivity(ZCLEnumSelectEntity): - """Representation of a ZHA motion sensitivity configuration entity.""" - - _unique_id_suffix = "motion_sensitivity" - _attribute_name = "motion_sensitivity" - _enum = AqaraMotionSensitivities - _attr_translation_key: str = "motion_sensitivity" - - -class HueV1MotionSensitivities(types.enum8): - """Hue v1 motion sensitivities.""" - - Low = 0x00 - Medium = 0x01 - High = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY, - manufacturers={"Philips", "Signify Netherlands B.V."}, - models={"SML001"}, -) -class HueV1MotionSensitivity(ZCLEnumSelectEntity): - """Representation of a ZHA motion sensitivity configuration entity.""" - - _unique_id_suffix = "motion_sensitivity" - _attribute_name = "sensitivity" - _enum = HueV1MotionSensitivities - _attr_translation_key: str = "motion_sensitivity" - - -class HueV2MotionSensitivities(types.enum8): - """Hue v2 motion sensitivities.""" - - Lowest = 0x00 - Low = 0x01 - Medium = 0x02 - High = 0x03 - Highest = 0x04 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY, - manufacturers={"Philips", "Signify Netherlands B.V."}, - models={"SML002", "SML003", "SML004"}, -) -class HueV2MotionSensitivity(ZCLEnumSelectEntity): - """Representation of a ZHA motion sensitivity configuration entity.""" - - _unique_id_suffix = "motion_sensitivity" - _attribute_name = "sensitivity" - _enum = HueV2MotionSensitivities - _attr_translation_key: str = "motion_sensitivity" - - -class AqaraMonitoringModess(types.enum8): - """Aqara monitoring modes.""" - - Undirected = 0x00 - Left_Right = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.motion.ac01"} -) -class AqaraMonitoringMode(ZCLEnumSelectEntity): - """Representation of a ZHA monitoring mode configuration entity.""" - - _unique_id_suffix = "monitoring_mode" - _attribute_name = "monitoring_mode" - _enum = AqaraMonitoringModess - _attr_translation_key: str = "monitoring_mode" - - -class AqaraApproachDistances(types.enum8): - """Aqara approach distances.""" - - Far = 0x00 - Medium = 0x01 - Near = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.motion.ac01"} -) -class AqaraApproachDistance(ZCLEnumSelectEntity): - """Representation of a ZHA approach distance configuration entity.""" - - _unique_id_suffix = "approach_distance" - _attribute_name = "approach_distance" - _enum = AqaraApproachDistances - _attr_translation_key: str = "approach_distance" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.magnet.ac01"} -) -class AqaraMagnetAC01DetectionDistance(ZCLEnumSelectEntity): - """Representation of a ZHA detection distance configuration entity.""" - - _unique_id_suffix = "detection_distance" - _attribute_name = "detection_distance" - _enum = MagnetAC01OppleCluster.DetectionDistance - _attr_translation_key: str = "detection_distance" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} -) -class AqaraT2RelaySwitchMode(ZCLEnumSelectEntity): - """Representation of a ZHA switch mode configuration entity.""" - - _unique_id_suffix = "switch_mode" - _attribute_name = "switch_mode" - _enum = T2RelayOppleCluster.SwitchMode - _attr_translation_key: str = "switch_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} -) -class AqaraT2RelaySwitchType(ZCLEnumSelectEntity): - """Representation of a ZHA switch type configuration entity.""" - - _unique_id_suffix = "switch_type" - _attribute_name = "switch_type" - _enum = T2RelayOppleCluster.SwitchType - _attr_translation_key: str = "switch_type" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} -) -class AqaraT2RelayStartupOnOff(ZCLEnumSelectEntity): - """Representation of a ZHA startup on off configuration entity.""" - - _unique_id_suffix = "startup_on_off" - _attribute_name = "startup_on_off" - _enum = T2RelayOppleCluster.StartupOnOff - _attr_translation_key: str = "start_up_on_off" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} -) -class AqaraT2RelayDecoupledMode(ZCLEnumSelectEntity): - """Representation of a ZHA switch decoupled mode configuration entity.""" - - _unique_id_suffix = "decoupled_mode" - _attribute_name = "decoupled_mode" - _enum = T2RelayOppleCluster.DecoupledMode - _attr_translation_key: str = "decoupled_mode" - - -class InovelliOutputMode(types.enum1): - """Inovelli output mode.""" - - Dimmer = 0x00 - OnOff = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliOutputModeEntity(ZCLEnumSelectEntity): - """Inovelli output mode control.""" - - _unique_id_suffix = "output_mode" - _attribute_name = "output_mode" - _enum = InovelliOutputMode - _attr_translation_key: str = "output_mode" - - -class InovelliSwitchType(types.enum8): - """Inovelli switch mode.""" - - Single_Pole = 0x00 - Three_Way_Dumb = 0x01 - Three_Way_AUX = 0x02 - Single_Pole_Full_Sine = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM31-SN"} -) -class InovelliSwitchTypeEntity(ZCLEnumSelectEntity): - """Inovelli switch type control.""" - - _unique_id_suffix = "switch_type" - _attribute_name = "switch_type" - _enum = InovelliSwitchType - _attr_translation_key: str = "switch_type" - - -class InovelliFanSwitchType(types.enum1): - """Inovelli fan switch mode.""" - - Load_Only = 0x00 - Three_Way_AUX = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} -) -class InovelliFanSwitchTypeEntity(ZCLEnumSelectEntity): - """Inovelli fan switch type control.""" - - _unique_id_suffix = "switch_type" - _attribute_name = "switch_type" - _enum = InovelliFanSwitchType - _attr_translation_key: str = "switch_type" - - -class InovelliLedScalingMode(types.enum1): - """Inovelli led mode.""" - - VZM31SN = 0x00 - LZW31SN = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliLedScalingModeEntity(ZCLEnumSelectEntity): - """Inovelli led mode control.""" - - _unique_id_suffix = "led_scaling_mode" - _attribute_name = "led_scaling_mode" - _enum = InovelliLedScalingMode - _attr_translation_key: str = "led_scaling_mode" - - -class InovelliFanLedScalingMode(types.enum8): - """Inovelli fan led mode.""" - - VZM31SN = 0x00 - Grade_1 = 0x01 - Grade_2 = 0x02 - Grade_3 = 0x03 - Grade_4 = 0x04 - Grade_5 = 0x05 - Grade_6 = 0x06 - Grade_7 = 0x07 - Grade_8 = 0x08 - Grade_9 = 0x09 - Adaptive = 0x0A - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} -) -class InovelliFanLedScalingModeEntity(ZCLEnumSelectEntity): - """Inovelli fan switch led mode control.""" - - _unique_id_suffix = "smart_fan_led_display_levels" - _attribute_name = "smart_fan_led_display_levels" - _enum = InovelliFanLedScalingMode - _attr_translation_key: str = "smart_fan_led_display_levels" - - -class InovelliNonNeutralOutput(types.enum1): - """Inovelli non neutral output selection.""" - - Low = 0x00 - High = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliNonNeutralOutputEntity(ZCLEnumSelectEntity): - """Inovelli non neutral output control.""" - - _unique_id_suffix = "increased_non_neutral_output" - _attribute_name = "increased_non_neutral_output" - _enum = InovelliNonNeutralOutput - _attr_translation_key: str = "increased_non_neutral_output" - - -class AqaraFeedingMode(types.enum8): - """Feeding mode.""" - - Manual = 0x00 - Schedule = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -class AqaraPetFeederMode(ZCLEnumSelectEntity): - """Representation of an Aqara pet feeder mode configuration entity.""" - - _unique_id_suffix = "feeding_mode" - _attribute_name = "feeding_mode" - _enum = AqaraFeedingMode - _attr_translation_key: str = "feeding_mode" - - -class AqaraThermostatPresetMode(types.enum8): - """Thermostat preset mode.""" - - Manual = 0x00 - Auto = 0x01 - Away = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatPreset(ZCLEnumSelectEntity): - """Representation of an Aqara thermostat preset configuration entity.""" - - _unique_id_suffix = "preset" - _attribute_name = "preset" - _enum = AqaraThermostatPresetMode - _attr_translation_key: str = "preset" - - -class SonoffPresenceDetectionSensitivityEnum(types.enum8): - """Enum for detection sensitivity select entity.""" - - Low = 0x01 - Medium = 0x02 - High = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY, models={"SNZB-06P"} -) -class SonoffPresenceDetectionSensitivity(ZCLEnumSelectEntity): - """Entity to set the detection sensitivity of the Sonoff SNZB-06P.""" - - _unique_id_suffix = "detection_sensitivity" - _attribute_name = "ultrasonic_u_to_o_threshold" - _enum = SonoffPresenceDetectionSensitivityEnum - _attr_translation_key: str = "detection_sensitivity" - - -class KeypadLockoutEnum(types.enum8): - """Keypad lockout options.""" - - Unlock = 0x00 - Lock1 = 0x01 - Lock2 = 0x02 - Lock3 = 0x03 - Lock4 = 0x04 - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names="thermostat_ui") -class KeypadLockout(ZCLEnumSelectEntity): - """Mandatory attribute for thermostat_ui cluster. - - Often only the first two are implemented, and Lock2 to Lock4 should map to Lock1 in the firmware. - This however covers all bases. - """ - - _unique_id_suffix = "keypad_lockout" - _attribute_name: str = "keypad_lockout" - _enum = KeypadLockoutEnum - _attr_translation_key: str = "keypad_lockout" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossExerciseDayOfTheWeek(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for setting the day of the week for exercising.""" - - _unique_id_suffix = "exercise_day_of_week" - _attribute_name = "exercise_day_of_week" - _attr_translation_key: str = "exercise_day_of_week" - _enum = danfoss_thermostat.DanfossExerciseDayOfTheWeekEnum - _attr_icon: str = "mdi:wrench-clock" - - -class DanfossOrientationEnum(types.enum8): - """Vertical or Horizontal.""" - - Horizontal = 0x00 - Vertical = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossOrientation(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for setting the orientation of the valve. - - Needed for biasing the internal temperature sensor. - This is implemented as an enum here, but is a boolean on the device. - """ - - _unique_id_suffix = "orientation" - _attribute_name = "orientation" - _attr_translation_key: str = "valve_orientation" - _enum = DanfossOrientationEnum - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossAdaptationRunControl(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for controlling the current adaptation run.""" - - _unique_id_suffix = "adaptation_run_control" - _attribute_name = "adaptation_run_control" - _attr_translation_key: str = "adaptation_run_command" - _enum = danfoss_thermostat.DanfossAdaptationRunControlEnum - - -class DanfossControlAlgorithmScaleFactorEnum(types.enum8): - """The time scale factor for changing the opening of the valve. - - Not all values are given, therefore there are some extrapolated values with a margin of error of about 5 minutes. - This is implemented as an enum here, but is a number on the device. - """ - - quick_5min = 0x01 - - quick_10min = 0x02 # extrapolated - quick_15min = 0x03 # extrapolated - quick_25min = 0x04 # extrapolated - - moderate_30min = 0x05 - - moderate_40min = 0x06 # extrapolated - moderate_50min = 0x07 # extrapolated - moderate_60min = 0x08 # extrapolated - moderate_70min = 0x09 # extrapolated - - slow_80min = 0x0A - - quick_open_disabled = 0x11 # not sure what it does; also requires lower 4 bits to be in [1, 10] I assume - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossControlAlgorithmScaleFactor(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for setting the scale factor of the setpoint filter time constant.""" - - _unique_id_suffix = "control_algorithm_scale_factor" - _attribute_name = "control_algorithm_scale_factor" - _attr_translation_key: str = "setpoint_response_time" - _enum = DanfossControlAlgorithmScaleFactorEnum - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="thermostat_ui", - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossViewingDirection(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for setting the viewing direction of the screen.""" - - _unique_id_suffix = "viewing_direction" - _attribute_name = "viewing_direction" - _attr_translation_key: str = "viewing_direction" - _enum = danfoss_thermostat.DanfossViewingDirectionEnum diff --git a/homeassistant/components/zha/sensor.py b/homeassistant/components/zha/sensor.py index 99d950dc06a..dde000b24b5 100644 --- a/homeassistant/components/zha/sensor.py +++ b/homeassistant/components/zha/sensor.py @@ -2,115 +2,71 @@ from __future__ import annotations -import asyncio -from dataclasses import dataclass -from datetime import timedelta -import enum +from collections.abc import Mapping import functools import logging -import numbers -import random -from typing import TYPE_CHECKING, Any, Self +from typing import Any -from zhaquirks.danfoss import thermostat as danfoss_thermostat -from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT -from zigpy import types -from zigpy.quirks.v2 import ZCLEnumMetadata, ZCLSensorMetadata -from zigpy.state import Counter, State -from zigpy.zcl.clusters.closures import WindowCovering -from zigpy.zcl.clusters.general import Basic - -from homeassistant.components.climate import HVACAction from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, - SensorEntityDescription, SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - CONCENTRATION_PARTS_PER_BILLION, - CONCENTRATION_PARTS_PER_MILLION, - LIGHT_LUX, - PERCENTAGE, - SIGNAL_STRENGTH_DECIBELS_MILLIWATT, - EntityCategory, - Platform, - UnitOfApparentPower, - UnitOfElectricCurrent, - UnitOfElectricPotential, - UnitOfEnergy, - UnitOfFrequency, - UnitOfMass, - UnitOfPower, - UnitOfPressure, - UnitOfTemperature, - UnitOfTime, - UnitOfVolume, - UnitOfVolumeFlowRate, -) -from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import StateType -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_ANALOG_INPUT, - CLUSTER_HANDLER_BASIC, - CLUSTER_HANDLER_COVER, - CLUSTER_HANDLER_DEVICE_TEMPERATURE, - CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - CLUSTER_HANDLER_HUMIDITY, - CLUSTER_HANDLER_ILLUMINANCE, - CLUSTER_HANDLER_LEAF_WETNESS, - CLUSTER_HANDLER_POWER_CONFIGURATION, - CLUSTER_HANDLER_PRESSURE, - CLUSTER_HANDLER_SMARTENERGY_METERING, - CLUSTER_HANDLER_SOIL_MOISTURE, - CLUSTER_HANDLER_TEMPERATURE, - CLUSTER_HANDLER_THERMOSTAT, - DATA_ZHA, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + EntityData, + async_add_entities as zha_async_add_entities, + exclude_none_values, + get_zha_data, ) -from .core.helpers import get_zha_data, validate_device_class, validate_unit -from .core.registries import SMARTTHINGS_HUMIDITY_CLUSTER, ZHA_ENTITIES -from .entity import BaseZhaEntity, ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - -BATTERY_SIZES = { - 0: "No battery", - 1: "Built in", - 2: "Other", - 3: "AA", - 4: "AAA", - 5: "C", - 6: "D", - 7: "CR2", - 8: "CR123A", - 9: "CR2450", - 10: "CR2032", - 11: "CR1632", - 255: "Unknown", -} _LOGGER = logging.getLogger(__name__) -CLUSTER_HANDLER_ST_HUMIDITY_CLUSTER = ( - f"cluster_handler_0x{SMARTTHINGS_HUMIDITY_CLUSTER:04x}" -) -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.SENSOR) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.SENSOR) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.SENSOR -) +# For backwards compatibility and transparency, all expected extra state attributes are +# explicitly listed below. These should have been sensors themselves but for whatever +# reason were not created as such. They will be migrated to independent sensor entities +# in a future release. +_EXTRA_STATE_ATTRIBUTES: set[str] = { + # Battery + "battery_size", + "battery_quantity", + "battery_voltage", + # Power + "measurement_type", + "apparent_power_max", + "rms_current_max", + "rms_voltage_max", + "ac_frequency_max", + "power_factor_max", + "active_power_max", + # Smart Energy metering + "device_type", + "status", + "zcl_unit_of_measurement", + # Danfoss bitmaps + "In_progress", + "Valve_characteristic_found", + "Valve_characteristic_lost", + "Top_pcb_sensor_error", + "Side_pcb_sensor_error", + "Non_volatile_memory_error", + "Unknown_hw_error", + "Motor_error", + "Invalid_internal_communication", + "Invalid_clock_information", + "Radio_communication_error", + "Encoder_jammed", + "Low_battery", + "Critical_low_battery", +} async def async_setup_entry( @@ -126,1504 +82,76 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, Sensor, entities_to_create ), ) config_entry.async_on_unload(unsub) # pylint: disable-next=hass-invalid-inheritance # needs fixing -class Sensor(ZhaEntity, SensorEntity): - """Base ZHA sensor.""" +class Sensor(ZHAEntity, SensorEntity): + """ZHA sensor.""" - _attribute_name: int | str | None = None - _decimals: int = 1 - _divisor: int = 1 - _multiplier: int | float = 1 + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA select entity.""" + super().__init__(entity_data, **kwargs) + entity = self.entity_data.entity - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. + if entity.device_class is not None: + self._attr_device_class = SensorDeviceClass(entity.device_class) - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - if ENTITY_METADATA not in kwargs and ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name + if entity.state_class is not None: + self._attr_state_class = SensorStateClass(entity.state_class) + + if hasattr(entity.info_object, "unit") and entity.info_object.unit is not None: + self._attr_native_unit_of_measurement = entity.info_object.unit + + if ( + hasattr(entity, "entity_description") + and entity.entity_description is not None ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, - ) - return None + entity_description = entity.entity_description - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this sensor.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: ZCLSensorMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - if entity_metadata.divisor is not None: - self._divisor = entity_metadata.divisor - if entity_metadata.multiplier is not None: - self._multiplier = entity_metadata.multiplier - if entity_metadata.device_class is not None: - self._attr_device_class = validate_device_class( - SensorDeviceClass, - entity_metadata.device_class, - Platform.SENSOR.value, - _LOGGER, - ) - if entity_metadata.device_class is None and entity_metadata.unit is not None: - self._attr_native_unit_of_measurement = validate_unit( - entity_metadata.unit - ).value - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - @property - def native_value(self) -> StateType: - """Return the state of the entity.""" - assert self._attribute_name is not None - raw_state = self._cluster_handler.cluster.get(self._attribute_name) - if raw_state is None: - return None - return self.formatter(raw_state) - - @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any) -> None: - """Handle state update from cluster handler.""" - self.async_write_ha_state() - - def formatter(self, value: int | enum.IntEnum) -> int | float | str | None: - """Numeric pass-through formatter.""" - if self._decimals > 0: - return round( - float(value * self._multiplier) / self._divisor, self._decimals - ) - return round(float(value * self._multiplier) / self._divisor) - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PollableSensor(Sensor): - """Base ZHA sensor that polls for state.""" - - _use_custom_polling: bool = True - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._cancel_refresh_handle: CALLBACK_TYPE | None = None - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - if self._use_custom_polling: - refresh_interval = random.randint(30, 60) - self._cancel_refresh_handle = async_track_time_interval( - self.hass, self._refresh, timedelta(seconds=refresh_interval) - ) - self.debug("started polling with refresh interval of %s", refresh_interval) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - if self._cancel_refresh_handle is not None: - self._cancel_refresh_handle() - self._cancel_refresh_handle = None - self.debug("stopped polling during device removal") - await super().async_will_remove_from_hass() - - async def _refresh(self, time): - """Call async_update at a constrained random interval.""" - if self._zha_device.available and self.hass.data[DATA_ZHA].allow_polling: - self.debug("polling for updated state") - await self.async_update() - self.async_write_ha_state() - else: - self.debug( - "skipping polling for updated state, available: %s, allow polled requests: %s", - self._zha_device.available, - self.hass.data[DATA_ZHA].allow_polling, - ) - - -class DeviceCounterSensor(BaseZhaEntity, SensorEntity): - """Device counter sensor.""" - - _attr_should_poll = True - _attr_state_class: SensorStateClass = SensorStateClass.TOTAL - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_entity_registry_enabled_default = False - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - counter_groups: str, - counter_group: str, - counter: str, - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - return cls( - unique_id, zha_device, counter_groups, counter_group, counter, **kwargs - ) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - counter_groups: str, - counter_group: str, - counter: str, - **kwargs: Any, - ) -> None: - """Init this sensor.""" - super().__init__(unique_id, zha_device, **kwargs) - state: State = self._zha_device.gateway.application_controller.state - self._zigpy_counter: Counter = ( - getattr(state, counter_groups).get(counter_group, {}).get(counter, None) - ) - self._attr_name: str = self._zigpy_counter.name - self.remove_future: asyncio.Future - - @property - def available(self) -> bool: - """Return entity availability.""" - return self._zha_device.available - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - self.remove_future = self.hass.loop.create_future() - self._zha_device.gateway.register_entity_reference( - self._zha_device.ieee, - self.entity_id, - self._zha_device, - {}, - self.device_info, - self.remove_future, - ) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - await super().async_will_remove_from_hass() - self.zha_device.gateway.remove_entity_reference(self) - self.remove_future.set_result(True) - - @property - def native_value(self) -> StateType: - """Return the state of the entity.""" - return self._zigpy_counter.value - - async def async_update(self) -> None: - """Retrieve latest state.""" - self.async_write_ha_state() - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class EnumSensor(Sensor): - """Sensor with value from enum.""" - - _attr_device_class: SensorDeviceClass = SensorDeviceClass.ENUM - _enum: type[enum.Enum] - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._attr_options = [e.name for e in self._enum] - - def _init_from_quirks_metadata(self, entity_metadata: ZCLEnumMetadata) -> None: - """Init this entity from the quirks metadata.""" - ZhaEntity._init_from_quirks_metadata(self, entity_metadata) # noqa: SLF001 - self._attribute_name = entity_metadata.attribute_name - self._enum = entity_metadata.enum - - def formatter(self, value: int) -> str | None: - """Use name of enum.""" - assert self._enum is not None - return self._enum(value).name - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ANALOG_INPUT, - manufacturers="Digi", - stop_on_match_group=CLUSTER_HANDLER_ANALOG_INPUT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AnalogInput(Sensor): - """Sensor that displays analog input values.""" - - _attribute_name = "present_value" - _attr_translation_key: str = "analog_input" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_POWER_CONFIGURATION) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Battery(Sensor): - """Battery sensor of power configuration cluster.""" - - _attribute_name = "battery_percentage_remaining" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.BATTERY - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_native_unit_of_measurement = PERCENTAGE - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Unlike any other entity, PowerConfiguration cluster may not support - battery_percent_remaining attribute, but zha-device-handlers takes care of it - so create the entity regardless - """ - if zha_device.is_mains_powered: - return None - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - @staticmethod - def formatter(value: int) -> int | None: - """Return the state of the entity.""" - # per zcl specs battery percent is reported at 200% ¯\_(ツ)_/¯ - if not isinstance(value, numbers.Number) or value == -1 or value == 255: - return None - return round(value / 2) - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return device state attrs for battery sensors.""" - state_attrs = {} - battery_size = self._cluster_handler.cluster.get("battery_size") - if battery_size is not None: - state_attrs["battery_size"] = BATTERY_SIZES.get(battery_size, "Unknown") - battery_quantity = self._cluster_handler.cluster.get("battery_quantity") - if battery_quantity is not None: - state_attrs["battery_quantity"] = battery_quantity - battery_voltage = self._cluster_handler.cluster.get("battery_voltage") - if battery_voltage is not None: - state_attrs["battery_voltage"] = round(battery_voltage / 10, 2) - return state_attrs - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - stop_on_match_group=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - models={"VZM31-SN", "SP 234", "outletv4"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurement(PollableSensor): - """Active power measurement.""" - - _use_custom_polling: bool = False - _attribute_name = "active_power" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.POWER - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_native_unit_of_measurement: str = UnitOfPower.WATT - _div_mul_prefix: str | None = "ac_power" - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return device state attrs for sensor.""" - attrs = {} - if self._cluster_handler.measurement_type is not None: - attrs["measurement_type"] = self._cluster_handler.measurement_type - - max_attr_name = f"{self._attribute_name}_max" - - try: - max_v = self._cluster_handler.cluster.get(max_attr_name) - except KeyError: - pass - else: - if max_v is not None: - attrs[max_attr_name] = str(self.formatter(max_v)) - - return attrs - - def formatter(self, value: int) -> int | float: - """Return 'normalized' value.""" - if self._div_mul_prefix: - multiplier = getattr( - self._cluster_handler, f"{self._div_mul_prefix}_multiplier" - ) - divisor = getattr(self._cluster_handler, f"{self._div_mul_prefix}_divisor") - else: - multiplier = self._multiplier - divisor = self._divisor - value = float(value * multiplier) / divisor - if value < 100 and divisor > 1: - return round(value, self._decimals) - return round(value) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - stop_on_match_group=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PolledElectricalMeasurement(ElectricalMeasurement): - """Polled active power measurement.""" - - _use_custom_polling: bool = True - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementApparentPower(PolledElectricalMeasurement): - """Apparent power measurement.""" - - _attribute_name = "apparent_power" - _unique_id_suffix = "apparent_power" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.APPARENT_POWER - _attr_native_unit_of_measurement = UnitOfApparentPower.VOLT_AMPERE - _div_mul_prefix = "ac_power" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementRMSCurrent(PolledElectricalMeasurement): - """RMS current measurement.""" - - _attribute_name = "rms_current" - _unique_id_suffix = "rms_current" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.CURRENT - _attr_native_unit_of_measurement = UnitOfElectricCurrent.AMPERE - _div_mul_prefix = "ac_current" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementRMSVoltage(PolledElectricalMeasurement): - """RMS Voltage measurement.""" - - _attribute_name = "rms_voltage" - _unique_id_suffix = "rms_voltage" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.VOLTAGE - _attr_native_unit_of_measurement = UnitOfElectricPotential.VOLT - _div_mul_prefix = "ac_voltage" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementFrequency(PolledElectricalMeasurement): - """Frequency measurement.""" - - _attribute_name = "ac_frequency" - _unique_id_suffix = "ac_frequency" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.FREQUENCY - _attr_translation_key: str = "ac_frequency" - _attr_native_unit_of_measurement = UnitOfFrequency.HERTZ - _div_mul_prefix = "ac_frequency" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementPowerFactor(PolledElectricalMeasurement): - """Power Factor measurement.""" - - _attribute_name = "power_factor" - _unique_id_suffix = "power_factor" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.POWER_FACTOR - _attr_native_unit_of_measurement = PERCENTAGE - _div_mul_prefix = None - - -@MULTI_MATCH( - generic_ids=CLUSTER_HANDLER_ST_HUMIDITY_CLUSTER, - stop_on_match_group=CLUSTER_HANDLER_HUMIDITY, -) -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_HUMIDITY, - stop_on_match_group=CLUSTER_HANDLER_HUMIDITY, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Humidity(Sensor): - """Humidity sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.HUMIDITY - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _divisor = 100 - _attr_native_unit_of_measurement = PERCENTAGE - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_SOIL_MOISTURE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SoilMoisture(Sensor): - """Soil Moisture sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.HUMIDITY - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_translation_key: str = "soil_moisture" - _divisor = 100 - _attr_native_unit_of_measurement = PERCENTAGE - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEAF_WETNESS) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class LeafWetness(Sensor): - """Leaf Wetness sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.HUMIDITY - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_translation_key: str = "leaf_wetness" - _divisor = 100 - _attr_native_unit_of_measurement = PERCENTAGE - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ILLUMINANCE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Illuminance(Sensor): - """Illuminance Sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.ILLUMINANCE - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_native_unit_of_measurement = LIGHT_LUX - - def formatter(self, value: int) -> int | None: - """Convert illumination data.""" - if value == 0: - return 0 - if value == 0xFFFF: - return None - return round(pow(10, ((value - 1) / 10000))) - - -@dataclass(frozen=True, kw_only=True) -class SmartEnergyMeteringEntityDescription(SensorEntityDescription): - """Dataclass that describes a Zigbee smart energy metering entity.""" - - key: str = "instantaneous_demand" - state_class: SensorStateClass | None = SensorStateClass.MEASUREMENT - scale: int = 1 - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - stop_on_match_group=CLUSTER_HANDLER_SMARTENERGY_METERING, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SmartEnergyMetering(PollableSensor): - """Metering sensor.""" - - entity_description: SmartEnergyMeteringEntityDescription - _use_custom_polling: bool = False - _attribute_name = "instantaneous_demand" - _attr_translation_key: str = "instantaneous_demand" - - _ENTITY_DESCRIPTION_MAP = { - 0x00: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - ), - 0x01: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, - device_class=None, # volume flow rate is not supported yet - ), - 0x02: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, - device_class=None, # volume flow rate is not supported yet - ), - 0x03: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, - device_class=None, # volume flow rate is not supported yet - scale=100, - ), - 0x04: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"{UnitOfVolume.GALLONS}/{UnitOfTime.HOURS}", # US gallons per hour - device_class=None, # volume flow rate is not supported yet - ), - 0x05: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"IMP {UnitOfVolume.GALLONS}/{UnitOfTime.HOURS}", # IMP gallons per hour - device_class=None, # needs to be None as imperial gallons are not supported - ), - 0x06: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfPower.BTU_PER_HOUR, - device_class=None, - state_class=None, - ), - 0x07: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"l/{UnitOfTime.HOURS}", - device_class=None, # volume flow rate is not supported yet - ), - 0x08: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfPressure.KPA, - device_class=SensorDeviceClass.PRESSURE, - ), # gauge - 0x09: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfPressure.KPA, - device_class=SensorDeviceClass.PRESSURE, - ), # absolute - 0x0A: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"{UnitOfVolume.CUBIC_FEET}/{UnitOfTime.HOURS}", # cubic feet per hour - device_class=None, # volume flow rate is not supported yet - scale=1000, - ), - 0x0B: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement="unitless", device_class=None, state_class=None - ), - 0x0C: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"{UnitOfEnergy.MEGA_JOULE}/{UnitOfTime.SECONDS}", - device_class=None, # needs to be None as MJ/s is not supported - ), - } - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - entity_description = self._ENTITY_DESCRIPTION_MAP.get( - self._cluster_handler.unit_of_measurement - ) - if entity_description is not None: - self.entity_description = entity_description - - def formatter(self, value: int) -> int | float: - """Pass through cluster handler formatter.""" - return self._cluster_handler.demand_formatter(value) - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return device state attrs for battery sensors.""" - attrs = {} - if self._cluster_handler.device_type is not None: - attrs["device_type"] = self._cluster_handler.device_type - if (status := self._cluster_handler.status) is not None: - if isinstance(status, enum.IntFlag): - attrs["status"] = str( - status.name if status.name is not None else status.value + if entity_description.state_class is not None: + self._attr_state_class = SensorStateClass( + entity_description.state_class.value + ) + + if entity_description.scale is not None: + self._attr_scale = entity_description.scale + + if entity_description.native_unit_of_measurement is not None: + self._attr_native_unit_of_measurement = ( + entity_description.native_unit_of_measurement + ) + + if entity_description.device_class is not None: + self._attr_device_class = SensorDeviceClass( + entity_description.device_class.value ) - else: - attrs["status"] = str(status)[len(status.__class__.__name__) + 1 :] - return attrs @property def native_value(self) -> StateType: """Return the state of the entity.""" - state = super().native_value - if hasattr(self, "entity_description") and state is not None: - return float(state) * self.entity_description.scale - - return state - - -@dataclass(frozen=True, kw_only=True) -class SmartEnergySummationEntityDescription(SmartEnergyMeteringEntityDescription): - """Dataclass that describes a Zigbee smart energy summation entity.""" - - key: str = "summation_delivered" - state_class: SensorStateClass | None = SensorStateClass.TOTAL_INCREASING - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - stop_on_match_group=CLUSTER_HANDLER_SMARTENERGY_METERING, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SmartEnergySummation(SmartEnergyMetering): - """Smart Energy Metering summation sensor.""" - - entity_description: SmartEnergySummationEntityDescription - _attribute_name = "current_summ_delivered" - _unique_id_suffix = "summation_delivered" - _attr_translation_key: str = "summation_delivered" - - _ENTITY_DESCRIPTION_MAP = { - 0x00: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - ), - 0x01: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, - device_class=SensorDeviceClass.VOLUME, - ), - 0x02: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.CUBIC_FEET, - device_class=SensorDeviceClass.VOLUME, - ), - 0x03: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.CUBIC_FEET, - device_class=SensorDeviceClass.VOLUME, - scale=100, - ), - 0x04: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.GALLONS, # US gallons - device_class=SensorDeviceClass.VOLUME, - ), - 0x05: SmartEnergySummationEntityDescription( - native_unit_of_measurement=f"IMP {UnitOfVolume.GALLONS}", - device_class=None, # needs to be None as imperial gallons are not supported - ), - 0x06: SmartEnergySummationEntityDescription( - native_unit_of_measurement="BTU", device_class=None, state_class=None - ), - 0x07: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.LITERS, - device_class=SensorDeviceClass.VOLUME, - ), - 0x08: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfPressure.KPA, - device_class=SensorDeviceClass.PRESSURE, - state_class=SensorStateClass.MEASUREMENT, - ), # gauge - 0x09: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfPressure.KPA, - device_class=SensorDeviceClass.PRESSURE, - state_class=SensorStateClass.MEASUREMENT, - ), # absolute - 0x0A: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.CUBIC_FEET, - device_class=SensorDeviceClass.VOLUME, - scale=1000, - ), - 0x0B: SmartEnergySummationEntityDescription( - native_unit_of_measurement="unitless", device_class=None, state_class=None - ), - 0x0C: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfEnergy.MEGA_JOULE, - device_class=SensorDeviceClass.ENERGY, - ), - } - - def formatter(self, value: int) -> int | float: - """Numeric pass-through formatter.""" - if self._cluster_handler.unit_of_measurement != 0: - return self._cluster_handler.summa_formatter(value) - - cooked = ( - float(self._cluster_handler.multiplier * value) - / self._cluster_handler.divisor - ) - return round(cooked, 3) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"TS011F", "ZLinky_TIC", "TICMeter"}, - stop_on_match_group=CLUSTER_HANDLER_SMARTENERGY_METERING, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PolledSmartEnergySummation(SmartEnergySummation): - """Polled Smart Energy Metering summation sensor.""" - - _use_custom_polling: bool = True - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier1SmartEnergySummation(PolledSmartEnergySummation): - """Tier 1 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier1_summ_delivered" - _unique_id_suffix = "tier1_summation_delivered" - _attr_translation_key: str = "tier1_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier2SmartEnergySummation(PolledSmartEnergySummation): - """Tier 2 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier2_summ_delivered" - _unique_id_suffix = "tier2_summation_delivered" - _attr_translation_key: str = "tier2_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier3SmartEnergySummation(PolledSmartEnergySummation): - """Tier 3 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier3_summ_delivered" - _unique_id_suffix = "tier3_summation_delivered" - _attr_translation_key: str = "tier3_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier4SmartEnergySummation(PolledSmartEnergySummation): - """Tier 4 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier4_summ_delivered" - _unique_id_suffix = "tier4_summation_delivered" - _attr_translation_key: str = "tier4_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier5SmartEnergySummation(PolledSmartEnergySummation): - """Tier 5 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier5_summ_delivered" - _unique_id_suffix = "tier5_summation_delivered" - _attr_translation_key: str = "tier5_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier6SmartEnergySummation(PolledSmartEnergySummation): - """Tier 6 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier6_summ_delivered" - _unique_id_suffix = "tier6_summation_delivered" - _attr_translation_key: str = "tier6_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SmartEnergySummationReceived(PolledSmartEnergySummation): - """Smart Energy Metering summation received sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_summ_received" - _unique_id_suffix = "summation_received" - _attr_translation_key: str = "summation_received" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - This attribute only started to be initialized in HA 2024.2.0, - so the entity would be created on the first HA start after the - upgrade for existing devices, as the initialization to see if - an attribute is unsupported happens later in the background. - To avoid creating unnecessary entities for existing devices, - wait until the attribute was properly initialized once for now. - """ - if cluster_handlers[0].cluster.get(cls._attribute_name) is None: - return None - return super().create_entity(unique_id, zha_device, cluster_handlers, **kwargs) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_PRESSURE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Pressure(Sensor): - """Pressure sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.PRESSURE - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _attr_native_unit_of_measurement = UnitOfPressure.HPA - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_TEMPERATURE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Temperature(Sensor): - """Temperature Sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.TEMPERATURE - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _divisor = 100 - _attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_DEVICE_TEMPERATURE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DeviceTemperature(Sensor): - """Device Temperature Sensor.""" - - _attribute_name = "current_temperature" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.TEMPERATURE - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_translation_key: str = "device_temperature" - _divisor = 100 - _attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@MULTI_MATCH(cluster_handler_names="carbon_dioxide_concentration") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class CarbonDioxideConcentration(Sensor): - """Carbon Dioxide Concentration sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.CO2 - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1e6 - _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION - - -@MULTI_MATCH(cluster_handler_names="carbon_monoxide_concentration") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class CarbonMonoxideConcentration(Sensor): - """Carbon Monoxide Concentration sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.CO - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1e6 - _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION - - -@MULTI_MATCH(generic_ids="cluster_handler_0x042e", stop_on_match_group="voc_level") -@MULTI_MATCH(cluster_handler_names="voc_level", stop_on_match_group="voc_level") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class VOCLevel(Sensor): - """VOC Level sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1e6 - _attr_native_unit_of_measurement = CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - - -@MULTI_MATCH( - cluster_handler_names="voc_level", - models="lumi.airmonitor.acn01", - stop_on_match_group="voc_level", -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PPBVOCLevel(Sensor): - """VOC Level sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = ( - SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS - ) - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1 - _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_BILLION - - -@MULTI_MATCH(cluster_handler_names="pm25") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PM25(Sensor): - """Particulate Matter 2.5 microns or less sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.PM25 - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1 - _attr_native_unit_of_measurement = CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - - -@MULTI_MATCH(cluster_handler_names="formaldehyde_concentration") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class FormaldehydeConcentration(Sensor): - """Formaldehyde Concentration sensor.""" - - _attribute_name = "measured_value" - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_translation_key: str = "formaldehyde" - _decimals = 0 - _multiplier = 1e6 - _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ThermostatHVACAction(Sensor): - """Thermostat HVAC action sensor.""" - - _unique_id_suffix = "hvac_action" - _attr_translation_key: str = "hvac_action" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) + return self.entity_data.entity.native_value @property - def native_value(self) -> str | None: - """Return the current HVAC action.""" - if ( - self._cluster_handler.pi_heating_demand is None - and self._cluster_handler.pi_cooling_demand is None - ): - return self._rm_rs_action - return self._pi_demand_action - - @property - def _rm_rs_action(self) -> HVACAction | None: - """Return the current HVAC action based on running mode and running state.""" - - if (running_state := self._cluster_handler.running_state) is None: + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return entity specific state attributes.""" + entity = self.entity_data.entity + if entity.extra_state_attribute_names is None: return None - rs_heat = ( - self._cluster_handler.RunningState.Heat_State_On - | self._cluster_handler.RunningState.Heat_2nd_Stage_On + if not entity.extra_state_attribute_names <= _EXTRA_STATE_ATTRIBUTES: + _LOGGER.warning( + "Unexpected extra state attributes found for sensor %s: %s", + entity, + entity.extra_state_attribute_names - _EXTRA_STATE_ATTRIBUTES, + ) + + return exclude_none_values( + { + name: entity.state.get(name) + for name in entity.extra_state_attribute_names + } ) - if running_state & rs_heat: - return HVACAction.HEATING - - rs_cool = ( - self._cluster_handler.RunningState.Cool_State_On - | self._cluster_handler.RunningState.Cool_2nd_Stage_On - ) - if running_state & rs_cool: - return HVACAction.COOLING - - running_state = self._cluster_handler.running_state - if running_state and running_state & ( - self._cluster_handler.RunningState.Fan_State_On - | self._cluster_handler.RunningState.Fan_2nd_Stage_On - | self._cluster_handler.RunningState.Fan_3rd_Stage_On - ): - return HVACAction.FAN - - running_state = self._cluster_handler.running_state - if running_state and running_state & self._cluster_handler.RunningState.Idle: - return HVACAction.IDLE - - if self._cluster_handler.system_mode != self._cluster_handler.SystemMode.Off: - return HVACAction.IDLE - return HVACAction.OFF - - @property - def _pi_demand_action(self) -> HVACAction: - """Return the current HVAC action based on pi_demands.""" - - heating_demand = self._cluster_handler.pi_heating_demand - if heating_demand is not None and heating_demand > 0: - return HVACAction.HEATING - cooling_demand = self._cluster_handler.pi_cooling_demand - if cooling_demand is not None and cooling_demand > 0: - return HVACAction.COOLING - - if self._cluster_handler.system_mode != self._cluster_handler.SystemMode.Off: - return HVACAction.IDLE - return HVACAction.OFF - - -@MULTI_MATCH( - cluster_handler_names={CLUSTER_HANDLER_THERMOSTAT}, - manufacturers="Sinope Technologies", - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SinopeHVACAction(ThermostatHVACAction): - """Sinope Thermostat HVAC action sensor.""" - - @property - def _rm_rs_action(self) -> HVACAction: - """Return the current HVAC action based on running mode and running state.""" - - running_mode = self._cluster_handler.running_mode - if running_mode == self._cluster_handler.RunningMode.Heat: - return HVACAction.HEATING - if running_mode == self._cluster_handler.RunningMode.Cool: - return HVACAction.COOLING - - running_state = self._cluster_handler.running_state - if running_state and running_state & ( - self._cluster_handler.RunningState.Fan_State_On - | self._cluster_handler.RunningState.Fan_2nd_Stage_On - | self._cluster_handler.RunningState.Fan_3rd_Stage_On - ): - return HVACAction.FAN - if ( - self._cluster_handler.system_mode != self._cluster_handler.SystemMode.Off - and running_mode == self._cluster_handler.SystemMode.Off - ): - return HVACAction.IDLE - return HVACAction.OFF - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_BASIC) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class RSSISensor(Sensor): - """RSSI sensor for a device.""" - - _attribute_name = "rssi" - _unique_id_suffix = "rssi" - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_device_class: SensorDeviceClass | None = SensorDeviceClass.SIGNAL_STRENGTH - _attr_native_unit_of_measurement: str | None = SIGNAL_STRENGTH_DECIBELS_MILLIWATT - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_entity_registry_enabled_default = False - _attr_should_poll = True # BaseZhaEntity defaults to False - _attr_translation_key: str = "rssi" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - key = f"{CLUSTER_HANDLER_BASIC}_{cls._unique_id_suffix}" - if ZHA_ENTITIES.prevent_entity_creation(Platform.SENSOR, zha_device.ieee, key): - return None - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - @property - def native_value(self) -> StateType: - """Return the state of the entity.""" - return getattr(self._zha_device.device, self._attribute_name) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_BASIC) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class LQISensor(RSSISensor): - """LQI sensor for a device.""" - - _attribute_name = "lqi" - _unique_id_suffix = "lqi" - _attr_device_class = None - _attr_native_unit_of_measurement = None - _attr_translation_key = "lqi" - - -@MULTI_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_htnnfasr", - }, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class TimeLeft(Sensor): - """Sensor that displays time left value.""" - - _attribute_name = "timer_time_left" - _unique_id_suffix = "time_left" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.DURATION - _attr_translation_key: str = "timer_time_left" - _attr_native_unit_of_measurement = UnitOfTime.MINUTES - - -@MULTI_MATCH(cluster_handler_names="ikea_airpurifier") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class IkeaDeviceRunTime(Sensor): - """Sensor that displays device run time (in minutes).""" - - _attribute_name = "device_run_time" - _unique_id_suffix = "device_run_time" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.DURATION - _attr_translation_key: str = "device_run_time" - _attr_native_unit_of_measurement = UnitOfTime.MINUTES - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - - -@MULTI_MATCH(cluster_handler_names="ikea_airpurifier") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class IkeaFilterRunTime(Sensor): - """Sensor that displays run time of the current filter (in minutes).""" - - _attribute_name = "filter_run_time" - _unique_id_suffix = "filter_run_time" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.DURATION - _attr_translation_key: str = "filter_run_time" - _attr_native_unit_of_measurement = UnitOfTime.MINUTES - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - - -class AqaraFeedingSource(types.enum8): - """Aqara pet feeder feeding source.""" - - Feeder = 0x01 - HomeAssistant = 0x02 - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederLastFeedingSource(EnumSensor): - """Sensor that displays the last feeding source of pet feeder.""" - - _attribute_name = "last_feeding_source" - _unique_id_suffix = "last_feeding_source" - _attr_translation_key: str = "last_feeding_source" - _enum = AqaraFeedingSource - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederLastFeedingSize(Sensor): - """Sensor that displays the last feeding size of the pet feeder.""" - - _attribute_name = "last_feeding_size" - _unique_id_suffix = "last_feeding_size" - _attr_translation_key: str = "last_feeding_size" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederPortionsDispensed(Sensor): - """Sensor that displays the number of portions dispensed by the pet feeder.""" - - _attribute_name = "portions_dispensed" - _unique_id_suffix = "portions_dispensed" - _attr_translation_key: str = "portions_dispensed_today" - _attr_state_class: SensorStateClass = SensorStateClass.TOTAL_INCREASING - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederWeightDispensed(Sensor): - """Sensor that displays the weight dispensed by the pet feeder.""" - - _attribute_name = "weight_dispensed" - _unique_id_suffix = "weight_dispensed" - _attr_translation_key: str = "weight_dispensed_today" - _attr_native_unit_of_measurement = UnitOfMass.GRAMS - _attr_state_class: SensorStateClass = SensorStateClass.TOTAL_INCREASING - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraSmokeDensityDbm(Sensor): - """Sensor that displays the smoke density of an Aqara smoke sensor in dB/m.""" - - _attribute_name = "smoke_density_dbm" - _unique_id_suffix = "smoke_density_dbm" - _attr_translation_key: str = "smoke_density" - _attr_native_unit_of_measurement = "dB/m" - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_suggested_display_precision: int = 3 - - -class SonoffIlluminationStates(types.enum8): - """Enum for displaying last Illumination state.""" - - Dark = 0x00 - Light = 0x01 - - -@MULTI_MATCH(cluster_handler_names="sonoff_manufacturer", models={"SNZB-06P"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SonoffPresenceSenorIlluminationStatus(EnumSensor): - """Sensor that displays the illumination status the last time peresence was detected.""" - - _attribute_name = "last_illumination_state" - _unique_id_suffix = "last_illumination" - _attr_translation_key: str = "last_illumination_state" - _enum = SonoffIlluminationStates - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PiHeatingDemand(Sensor): - """Sensor that displays the percentage of heating power demanded. - - Optional thermostat attribute. - """ - - _unique_id_suffix = "pi_heating_demand" - _attribute_name = "pi_heating_demand" - _attr_translation_key: str = "pi_heating_demand" - _attr_native_unit_of_measurement = PERCENTAGE - _decimals = 0 - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -class SetpointChangeSourceEnum(types.enum8): - """The source of the setpoint change.""" - - Manual = 0x00 - Schedule = 0x01 - External = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SetpointChangeSource(EnumSensor): - """Sensor that displays the source of the setpoint change. - - Optional thermostat attribute. - """ - - _unique_id_suffix = "setpoint_change_source" - _attribute_name = "setpoint_change_source" - _attr_translation_key: str = "setpoint_change_source" - _attr_entity_category = EntityCategory.DIAGNOSTIC - _enum = SetpointChangeSourceEnum - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_COVER) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class WindowCoveringTypeSensor(EnumSensor): - """Sensor that displays the type of a cover device.""" - - _attribute_name: str = WindowCovering.AttributeDefs.window_covering_type.name - _enum = WindowCovering.WindowCoveringType - _unique_id_suffix: str = WindowCovering.AttributeDefs.window_covering_type.name - _attr_translation_key: str = WindowCovering.AttributeDefs.window_covering_type.name - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_icon = "mdi:curtains" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_BASIC, models={"lumi.curtain.agl001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraCurtainMotorPowerSourceSensor(EnumSensor): - """Sensor that displays the power source of the Aqara E1 curtain motor device.""" - - _attribute_name: str = Basic.AttributeDefs.power_source.name - _enum = Basic.PowerSource - _unique_id_suffix: str = Basic.AttributeDefs.power_source.name - _attr_translation_key: str = Basic.AttributeDefs.power_source.name - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_icon = "mdi:battery-positive" - - -class AqaraE1HookState(types.enum8): - """Aqara hook state.""" - - Unlocked = 0x00 - Locked = 0x01 - Locking = 0x02 - Unlocking = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.curtain.agl001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraCurtainHookStateSensor(EnumSensor): - """Representation of a ZHA curtain mode configuration entity.""" - - _attribute_name = "hooks_state" - _enum = AqaraE1HookState - _unique_id_suffix = "hooks_state" - _attr_translation_key: str = "hooks_state" - _attr_icon: str = "mdi:hook" - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class BitMapSensor(Sensor): - """A sensor with only state attributes. - - The sensor value will be an aggregate of the state attributes. - """ - - _bitmap: types.bitmap8 | types.bitmap16 - - def formatter(self, _value: int) -> str: - """Summary of all attributes.""" - binary_state_attributes = [ - key for (key, elem) in self.extra_state_attributes.items() if elem - ] - - return "something" if binary_state_attributes else "nothing" - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Bitmap.""" - value = self._cluster_handler.cluster.get(self._attribute_name) - - state_attr = {} - - for bit in list(self._bitmap): - if value is None: - state_attr[bit.name] = False - else: - state_attr[bit.name] = bit in self._bitmap(value) - - return state_attr - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossOpenWindowDetection(EnumSensor): - """Danfoss proprietary attribute. - - Sensor that displays whether the TRV detects an open window using the temperature sensor. - """ - - _unique_id_suffix = "open_window_detection" - _attribute_name = "open_window_detection" - _attr_translation_key: str = "open_window_detected" - _attr_icon: str = "mdi:window-open" - _enum = danfoss_thermostat.DanfossOpenWindowDetectionEnum - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossLoadEstimate(Sensor): - """Danfoss proprietary attribute for communicating its estimate of the radiator load.""" - - _unique_id_suffix = "load_estimate" - _attribute_name = "load_estimate" - _attr_translation_key: str = "load_estimate" - _attr_icon: str = "mdi:scale-balance" - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossAdaptationRunStatus(BitMapSensor): - """Danfoss proprietary attribute for showing the status of the adaptation run.""" - - _unique_id_suffix = "adaptation_run_status" - _attribute_name = "adaptation_run_status" - _attr_translation_key: str = "adaptation_run_status" - _attr_entity_category = EntityCategory.DIAGNOSTIC - _bitmap = danfoss_thermostat.DanfossAdaptationRunStatusBitmap - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossPreheatTime(Sensor): - """Danfoss proprietary attribute for communicating the time when it starts pre-heating.""" - - _unique_id_suffix = "preheat_time" - _attribute_name = "preheat_time" - _attr_translation_key: str = "preheat_time" - _attr_icon: str = "mdi:radiator" - _attr_entity_registry_enabled_default = False - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="diagnostic", - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossSoftwareErrorCode(BitMapSensor): - """Danfoss proprietary attribute for communicating the error code.""" - - _unique_id_suffix = "sw_error_code" - _attribute_name = "sw_error_code" - _attr_translation_key: str = "software_error" - _attr_entity_category = EntityCategory.DIAGNOSTIC - _bitmap = danfoss_thermostat.DanfossSoftwareErrorCodeBitmap - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="diagnostic", - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossMotorStepCounter(Sensor): - """Danfoss proprietary attribute for communicating the motor step counter.""" - - _unique_id_suffix = "motor_step_counter" - _attribute_name = "motor_step_counter" - _attr_translation_key: str = "motor_stepcount" - _attr_entity_category = EntityCategory.DIAGNOSTIC diff --git a/homeassistant/components/zha/siren.py b/homeassistant/components/zha/siren.py index 3aab332f746..9d876d9ca4d 100644 --- a/homeassistant/components/zha/siren.py +++ b/homeassistant/components/zha/siren.py @@ -2,11 +2,18 @@ from __future__ import annotations -from collections.abc import Callable import functools -from typing import TYPE_CHECKING, Any, cast +from typing import Any -from zigpy.zcl.clusters.security import IasWd as WD +from zha.application.const import ( + WARNING_DEVICE_MODE_BURGLAR, + WARNING_DEVICE_MODE_EMERGENCY, + WARNING_DEVICE_MODE_EMERGENCY_PANIC, + WARNING_DEVICE_MODE_FIRE, + WARNING_DEVICE_MODE_FIRE_PANIC, + WARNING_DEVICE_MODE_POLICE_PANIC, +) +from zha.application.platforms.siren import SirenEntityFeature as ZHASirenEntityFeature from homeassistant.components.siren import ( ATTR_DURATION, @@ -17,38 +24,18 @@ from homeassistant.components.siren import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later -from .core import discovery -from .core.cluster_handlers.security import IasWdClusterHandler -from .core.const import ( - CLUSTER_HANDLER_IAS_WD, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - WARNING_DEVICE_MODE_BURGLAR, - WARNING_DEVICE_MODE_EMERGENCY, - WARNING_DEVICE_MODE_EMERGENCY_PANIC, - WARNING_DEVICE_MODE_FIRE, - WARNING_DEVICE_MODE_FIRE_PANIC, - WARNING_DEVICE_MODE_POLICE_PANIC, - WARNING_DEVICE_MODE_STOP, - WARNING_DEVICE_SOUND_HIGH, - WARNING_DEVICE_STROBE_HIGH, - WARNING_DEVICE_STROBE_NO, - Strobe, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.SIREN) -DEFAULT_DURATION = 5 # seconds async def async_setup_entry( @@ -64,115 +51,61 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, ZHASiren, entities_to_create ), ) config_entry.async_on_unload(unsub) -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHASiren(ZhaEntity, SirenEntity): +class ZHASiren(ZHAEntity, SirenEntity): """Representation of a ZHA siren.""" - _attr_name: str = "Siren" + _attr_available_tones: list[int | str] | dict[int, str] | None = { + WARNING_DEVICE_MODE_BURGLAR: "Burglar", + WARNING_DEVICE_MODE_FIRE: "Fire", + WARNING_DEVICE_MODE_EMERGENCY: "Emergency", + WARNING_DEVICE_MODE_POLICE_PANIC: "Police Panic", + WARNING_DEVICE_MODE_FIRE_PANIC: "Fire Panic", + WARNING_DEVICE_MODE_EMERGENCY_PANIC: "Emergency Panic", + } - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs, - ) -> None: - """Init this siren.""" - self._attr_supported_features = ( - SirenEntityFeature.TURN_ON - | SirenEntityFeature.TURN_OFF - | SirenEntityFeature.DURATION - | SirenEntityFeature.VOLUME_SET - | SirenEntityFeature.TONES - ) - self._attr_available_tones: list[int | str] | dict[int, str] | None = { - WARNING_DEVICE_MODE_BURGLAR: "Burglar", - WARNING_DEVICE_MODE_FIRE: "Fire", - WARNING_DEVICE_MODE_EMERGENCY: "Emergency", - WARNING_DEVICE_MODE_POLICE_PANIC: "Police Panic", - WARNING_DEVICE_MODE_FIRE_PANIC: "Fire Panic", - WARNING_DEVICE_MODE_EMERGENCY_PANIC: "Emergency Panic", - } - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._cluster_handler: IasWdClusterHandler = cast( - IasWdClusterHandler, cluster_handlers[0] - ) - self._attr_is_on: bool = False - self._off_listener: Callable[[], None] | None = None + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA siren.""" + super().__init__(entity_data, **kwargs) + features: SirenEntityFeature = SirenEntityFeature(0) + zha_features: ZHASirenEntityFeature = self.entity_data.entity.supported_features + + if ZHASirenEntityFeature.TURN_ON in zha_features: + features |= SirenEntityFeature.TURN_ON + if ZHASirenEntityFeature.TURN_OFF in zha_features: + features |= SirenEntityFeature.TURN_OFF + if ZHASirenEntityFeature.TONES in zha_features: + features |= SirenEntityFeature.TONES + if ZHASirenEntityFeature.VOLUME_SET in zha_features: + features |= SirenEntityFeature.VOLUME_SET + if ZHASirenEntityFeature.DURATION in zha_features: + features |= SirenEntityFeature.DURATION + + self._attr_supported_features = features + + @property + def is_on(self) -> bool: + """Return True if entity is on.""" + return self.entity_data.entity.is_on + + @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn on siren.""" - if self._off_listener: - self._off_listener() - self._off_listener = None - tone_cache = self._cluster_handler.data_cache.get( - WD.Warning.WarningMode.__name__ - ) - siren_tone = ( - tone_cache.value - if tone_cache is not None - else WARNING_DEVICE_MODE_EMERGENCY - ) - siren_duration = DEFAULT_DURATION - level_cache = self._cluster_handler.data_cache.get( - WD.Warning.SirenLevel.__name__ - ) - siren_level = ( - level_cache.value if level_cache is not None else WARNING_DEVICE_SOUND_HIGH - ) - strobe_cache = self._cluster_handler.data_cache.get(Strobe.__name__) - should_strobe = ( - strobe_cache.value if strobe_cache is not None else Strobe.No_Strobe - ) - strobe_level_cache = self._cluster_handler.data_cache.get( - WD.StrobeLevel.__name__ - ) - strobe_level = ( - strobe_level_cache.value - if strobe_level_cache is not None - else WARNING_DEVICE_STROBE_HIGH - ) - if (duration := kwargs.get(ATTR_DURATION)) is not None: - siren_duration = duration - if (tone := kwargs.get(ATTR_TONE)) is not None: - siren_tone = tone - if (level := kwargs.get(ATTR_VOLUME_LEVEL)) is not None: - siren_level = int(level) - await self._cluster_handler.issue_start_warning( - mode=siren_tone, - warning_duration=siren_duration, - siren_level=siren_level, - strobe=should_strobe, - strobe_duty_cycle=50 if should_strobe else 0, - strobe_intensity=strobe_level, - ) - self._attr_is_on = True - self._off_listener = async_call_later( - self._zha_device.hass, siren_duration, self.async_set_off + await self.entity_data.entity.async_turn_on( + duration=kwargs.get(ATTR_DURATION), + tone=kwargs.get(ATTR_TONE), + volume_level=kwargs.get(ATTR_VOLUME_LEVEL), ) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn off siren.""" - await self._cluster_handler.issue_start_warning( - mode=WARNING_DEVICE_MODE_STOP, strobe=WARNING_DEVICE_STROBE_NO - ) - self._attr_is_on = False - self.async_write_ha_state() - - @callback - def async_set_off(self, _) -> None: - """Set is_on to False and write HA state.""" - self._attr_is_on = False - if self._off_listener: - self._off_listener() - self._off_listener = None + await self.entity_data.entity.async_turn_off() self.async_write_ha_state() diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index f25fdf1ebe4..5d81556564a 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -413,7 +413,7 @@ }, "warning_device_squawk": { "name": "Warning device squawk", - "description": "This service uses the WD capabilities to emit a quick audible/visible pulse called a \"squawk\". The squawk command has no effect if the WD is currently active (warning in progress).", + "description": "This action uses the WD capabilities to emit a quick audible/visible pulse called a \"squawk\". The squawk command has no effect if the WD is currently active (warning in progress).", "fields": { "ieee": { "name": "[%key:component::zha::services::permit::fields::ieee::name%]", @@ -435,7 +435,7 @@ }, "warning_device_warn": { "name": "Warning device starts alert", - "description": "This service starts the operation of the warning device. The warning device alerts the surrounding area by audible (siren) and visual (strobe) signals.", + "description": "This action starts the operation of the warning device. The warning device alerts the surrounding area by audible (siren) and visual (strobe) signals.", "fields": { "ieee": { "name": "[%key:component::zha::services::permit::fields::ieee::name%]", diff --git a/homeassistant/components/zha/switch.py b/homeassistant/components/zha/switch.py index f07d3d4c8e3..cb0268f98e0 100644 --- a/homeassistant/components/zha/switch.py +++ b/homeassistant/components/zha/switch.py @@ -4,44 +4,21 @@ from __future__ import annotations import functools import logging -from typing import TYPE_CHECKING, Any, Self - -from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT, TUYA_PLUG_ONOFF -from zigpy.quirks.v2 import SwitchMetadata -from zigpy.zcl.clusters.closures import ConfigStatus, WindowCovering, WindowCoveringMode -from zigpy.zcl.clusters.general import OnOff -from zigpy.zcl.foundation import Status +from typing import Any from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, EntityCategory, Platform -from homeassistant.core import HomeAssistant, State, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_BASIC, - CLUSTER_HANDLER_COVER, - CLUSTER_HANDLER_INOVELLI, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_THERMOSTAT, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, -) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity, ZhaGroupEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.SWITCH) -GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, Platform.SWITCH) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.SWITCH + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) _LOGGER = logging.getLogger(__name__) @@ -60,752 +37,28 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, Switch, entities_to_create ), ) config_entry.async_on_unload(unsub) -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF) -class Switch(ZhaEntity, SwitchEntity): +class Switch(ZHAEntity, SwitchEntity): """ZHA switch.""" - _attr_translation_key = "switch" - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Initialize the ZHA switch.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._on_off_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_ON_OFF] - @property def is_on(self) -> bool: """Return if the switch is on based on the statemachine.""" - if self._on_off_cluster_handler.on_off is None: - return False - return self._on_off_cluster_handler.on_off + return self.entity_data.entity.is_on + @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - await self._on_off_cluster_handler.turn_on() + await self.entity_data.entity.async_turn_on() self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - await self._on_off_cluster_handler.turn_off() + await self.entity_data.entity.async_turn_off() self.async_write_ha_state() - - @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any): - """Handle state update from cluster handler.""" - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._on_off_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - async def async_update(self) -> None: - """Attempt to retrieve on off state from the switch.""" - self.debug("Polling current state") - await self._on_off_cluster_handler.get_attribute_value( - "on_off", from_cache=False - ) - - -@GROUP_MATCH() -class SwitchGroup(ZhaGroupEntity, SwitchEntity): - """Representation of a switch group.""" - - def __init__( - self, - entity_ids: list[str], - unique_id: str, - group_id: int, - zha_device: ZHADevice, - **kwargs: Any, - ) -> None: - """Initialize a switch group.""" - super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) - self._available: bool - self._state: bool - group = self.zha_device.gateway.get_group(self._group_id) - self._on_off_cluster_handler = group.endpoint[OnOff.cluster_id] - - @property - def is_on(self) -> bool: - """Return if the switch is on based on the statemachine.""" - return bool(self._state) - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - result = await self._on_off_cluster_handler.on() - if result[1] is not Status.SUCCESS: - return - self._state = True - self.async_write_ha_state() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - result = await self._on_off_cluster_handler.off() - if result[1] is not Status.SUCCESS: - return - self._state = False - self.async_write_ha_state() - - async def async_update(self) -> None: - """Query all members and determine the switch group state.""" - all_states = [self.hass.states.get(x) for x in self._entity_ids] - states: list[State] = list(filter(None, all_states)) - on_states = [state for state in states if state.state == STATE_ON] - - self._state = len(on_states) > 0 - self._available = any(state.state != STATE_UNAVAILABLE for state in states) - - -class ZHASwitchConfigurationEntity(ZhaEntity, SwitchEntity): - """Representation of a ZHA switch configuration entity.""" - - _attr_entity_category = EntityCategory.CONFIG - _attribute_name: str - _inverter_attribute_name: str | None = None - _force_inverted: bool = False - _off_value: int = 0 - _on_value: int = 1 - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - if ENTITY_METADATA not in kwargs and ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(cls._attribute_name) is None - ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, - ) - return None - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this number configuration entity.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: SwitchMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - if entity_metadata.invert_attribute_name: - self._inverter_attribute_name = entity_metadata.invert_attribute_name - if entity_metadata.force_inverted: - self._force_inverted = entity_metadata.force_inverted - self._off_value = entity_metadata.off_value - self._on_value = entity_metadata.on_value - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any): - """Handle state update from cluster handler.""" - self.async_write_ha_state() - - @property - def inverted(self) -> bool: - """Return True if the switch is inverted.""" - if self._inverter_attribute_name: - return bool( - self._cluster_handler.cluster.get(self._inverter_attribute_name) - ) - return self._force_inverted - - @property - def is_on(self) -> bool: - """Return if the switch is on based on the statemachine.""" - if self._on_value != 1: - val = self._cluster_handler.cluster.get(self._attribute_name) - val = val == self._on_value - else: - val = bool(self._cluster_handler.cluster.get(self._attribute_name)) - return (not val) if self.inverted else val - - async def async_turn_on_off(self, state: bool) -> None: - """Turn the entity on or off.""" - if self.inverted: - state = not state - if state: - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: self._on_value} - ) - else: - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: self._off_value} - ) - self.async_write_ha_state() - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - await self.async_turn_on_off(True) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - await self.async_turn_on_off(False) - - async def async_update(self) -> None: - """Attempt to retrieve the state of the entity.""" - self.debug("Polling current state") - value = await self._cluster_handler.get_attribute_value( - self._attribute_name, from_cache=False - ) - await self._cluster_handler.get_attribute_value( - self._inverter_attribute_name, from_cache=False - ) - self.debug("read value=%s, inverted=%s", value, self.inverted) - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_b6wax7g0", - }, -) -class OnOffWindowDetectionFunctionConfigurationEntity(ZHASwitchConfigurationEntity): - """Representation of a ZHA window detection configuration entity.""" - - _unique_id_suffix = "on_off_window_opened_detection" - _attribute_name = "window_detection_function" - _inverter_attribute_name = "window_detection_function_inverter" - _attr_translation_key = "window_detection_function" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.motion.ac02"} -) -class P1MotionTriggerIndicatorSwitch(ZHASwitchConfigurationEntity): - """Representation of a ZHA motion triggering configuration entity.""" - - _unique_id_suffix = "trigger_indicator" - _attribute_name = "trigger_indicator" - _attr_translation_key = "trigger_indicator" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", - models={"lumi.plug.mmeu01", "lumi.plug.maeu01"}, -) -class XiaomiPlugPowerOutageMemorySwitch(ZHASwitchConfigurationEntity): - """Representation of a ZHA power outage memory configuration entity.""" - - _unique_id_suffix = "power_outage_memory" - _attribute_name = "power_outage_memory" - _attr_translation_key = "power_outage_memory" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_BASIC, - manufacturers={"Philips", "Signify Netherlands B.V."}, - models={"SML001", "SML002", "SML003", "SML004"}, -) -class HueMotionTriggerIndicatorSwitch(ZHASwitchConfigurationEntity): - """Representation of a ZHA motion triggering configuration entity.""" - - _unique_id_suffix = "trigger_indicator" - _attribute_name = "trigger_indicator" - _attr_translation_key = "trigger_indicator" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="ikea_airpurifier", - models={"STARKVIND Air purifier", "STARKVIND Air purifier table"}, -) -class ChildLock(ZHASwitchConfigurationEntity): - """ZHA BinarySensor.""" - - _unique_id_suffix = "child_lock" - _attribute_name = "child_lock" - _attr_translation_key = "child_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="ikea_airpurifier", - models={"STARKVIND Air purifier", "STARKVIND Air purifier table"}, -) -class DisableLed(ZHASwitchConfigurationEntity): - """ZHA BinarySensor.""" - - _unique_id_suffix = "disable_led" - _attribute_name = "disable_led" - _attr_translation_key = "disable_led" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliInvertSwitch(ZHASwitchConfigurationEntity): - """Inovelli invert switch control.""" - - _unique_id_suffix = "invert_switch" - _attribute_name = "invert_switch" - _attr_translation_key = "invert_switch" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliSmartBulbMode(ZHASwitchConfigurationEntity): - """Inovelli smart bulb mode control.""" - - _unique_id_suffix = "smart_bulb_mode" - _attribute_name = "smart_bulb_mode" - _attr_translation_key = "smart_bulb_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} -) -class InovelliSmartFanMode(ZHASwitchConfigurationEntity): - """Inovelli smart fan mode control.""" - - _unique_id_suffix = "smart_fan_mode" - _attribute_name = "smart_fan_mode" - _attr_translation_key = "smart_fan_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliDoubleTapUpEnabled(ZHASwitchConfigurationEntity): - """Inovelli double tap up enabled.""" - - _unique_id_suffix = "double_tap_up_enabled" - _attribute_name = "double_tap_up_enabled" - _attr_translation_key = "double_tap_up_enabled" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliDoubleTapDownEnabled(ZHASwitchConfigurationEntity): - """Inovelli double tap down enabled.""" - - _unique_id_suffix = "double_tap_down_enabled" - _attribute_name = "double_tap_down_enabled" - _attr_translation_key = "double_tap_down_enabled" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliAuxSwitchScenes(ZHASwitchConfigurationEntity): - """Inovelli unique aux switch scenes.""" - - _unique_id_suffix = "aux_switch_scenes" - _attribute_name = "aux_switch_scenes" - _attr_translation_key = "aux_switch_scenes" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliBindingOffToOnSyncLevel(ZHASwitchConfigurationEntity): - """Inovelli send move to level with on/off to bound devices.""" - - _unique_id_suffix = "binding_off_to_on_sync_level" - _attribute_name = "binding_off_to_on_sync_level" - _attr_translation_key = "binding_off_to_on_sync_level" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliLocalProtection(ZHASwitchConfigurationEntity): - """Inovelli local protection control.""" - - _unique_id_suffix = "local_protection" - _attribute_name = "local_protection" - _attr_translation_key = "local_protection" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliOnOffLEDMode(ZHASwitchConfigurationEntity): - """Inovelli only 1 LED mode control.""" - - _unique_id_suffix = "on_off_led_mode" - _attribute_name = "on_off_led_mode" - _attr_translation_key = "one_led_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliFirmwareProgressLED(ZHASwitchConfigurationEntity): - """Inovelli firmware progress LED control.""" - - _unique_id_suffix = "firmware_progress_led" - _attribute_name = "firmware_progress_led" - _attr_translation_key = "firmware_progress_led" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliRelayClickInOnOffMode(ZHASwitchConfigurationEntity): - """Inovelli relay click in on off mode control.""" - - _unique_id_suffix = "relay_click_in_on_off_mode" - _attribute_name = "relay_click_in_on_off_mode" - _attr_translation_key = "relay_click_in_on_off_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliDisableDoubleTapClearNotificationsMode(ZHASwitchConfigurationEntity): - """Inovelli disable clear notifications double tap control.""" - - _unique_id_suffix = "disable_clear_notifications_double_tap" - _attribute_name = "disable_clear_notifications_double_tap" - _attr_translation_key = "disable_clear_notifications_double_tap" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -class AqaraPetFeederLEDIndicator(ZHASwitchConfigurationEntity): - """Representation of a LED indicator configuration entity.""" - - _unique_id_suffix = "disable_led_indicator" - _attribute_name = "disable_led_indicator" - _attr_translation_key = "led_indicator" - _force_inverted = True - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -class AqaraPetFeederChildLock(ZHASwitchConfigurationEntity): - """Representation of a child lock configuration entity.""" - - _unique_id_suffix = "child_lock" - _attribute_name = "child_lock" - _attr_translation_key = "child_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF -) -class TuyaChildLockSwitch(ZHASwitchConfigurationEntity): - """Representation of a child lock configuration entity.""" - - _unique_id_suffix = "child_lock" - _attribute_name = "child_lock" - _attr_translation_key = "child_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatWindowDetection(ZHASwitchConfigurationEntity): - """Representation of an Aqara thermostat window detection configuration entity.""" - - _unique_id_suffix = "window_detection" - _attribute_name = "window_detection" - _attr_translation_key = "window_detection" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatValveDetection(ZHASwitchConfigurationEntity): - """Representation of an Aqara thermostat valve detection configuration entity.""" - - _unique_id_suffix = "valve_detection" - _attribute_name = "valve_detection" - _attr_translation_key = "valve_detection" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatChildLock(ZHASwitchConfigurationEntity): - """Representation of an Aqara thermostat child lock configuration entity.""" - - _unique_id_suffix = "child_lock" - _attribute_name = "child_lock" - _attr_translation_key = "child_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraHeartbeatIndicator(ZHASwitchConfigurationEntity): - """Representation of a heartbeat indicator configuration entity for Aqara smoke sensors.""" - - _unique_id_suffix = "heartbeat_indicator" - _attribute_name = "heartbeat_indicator" - _attr_translation_key = "heartbeat_indicator" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraLinkageAlarm(ZHASwitchConfigurationEntity): - """Representation of a linkage alarm configuration entity for Aqara smoke sensors.""" - - _unique_id_suffix = "linkage_alarm" - _attribute_name = "linkage_alarm" - _attr_translation_key = "linkage_alarm" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraBuzzerManualMute(ZHASwitchConfigurationEntity): - """Representation of a buzzer manual mute configuration entity for Aqara smoke sensors.""" - - _unique_id_suffix = "buzzer_manual_mute" - _attribute_name = "buzzer_manual_mute" - _attr_translation_key = "buzzer_manual_mute" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraBuzzerManualAlarm(ZHASwitchConfigurationEntity): - """Representation of a buzzer manual mute configuration entity for Aqara smoke sensors.""" - - _unique_id_suffix = "buzzer_manual_alarm" - _attribute_name = "buzzer_manual_alarm" - _attr_translation_key = "buzzer_manual_alarm" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_COVER) -class WindowCoveringInversionSwitch(ZHASwitchConfigurationEntity): - """Representation of a switch that controls inversion for window covering devices. - - This is necessary because this cluster uses 2 attributes to control inversion. - """ - - _unique_id_suffix = "inverted" - _attribute_name = WindowCovering.AttributeDefs.config_status.name - _attr_translation_key = "inverted" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - window_covering_mode_attr = ( - WindowCovering.AttributeDefs.window_covering_mode.name - ) - # this entity needs 2 attributes to function - if ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(cls._attribute_name) is None - or window_covering_mode_attr - in cluster_handler.cluster.unsupported_attributes - or window_covering_mode_attr - not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(window_covering_mode_attr) is None - ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, - ) - return None - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - @property - def is_on(self) -> bool: - """Return if the switch is on based on the statemachine.""" - config_status = ConfigStatus( - self._cluster_handler.cluster.get(self._attribute_name) - ) - return ConfigStatus.Open_up_commands_reversed in config_status - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - await self._async_on_off(True) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - await self._async_on_off(False) - - async def async_update(self) -> None: - """Attempt to retrieve the state of the entity.""" - self.debug("Polling current state") - await self._cluster_handler.get_attributes( - [ - self._attribute_name, - WindowCovering.AttributeDefs.window_covering_mode.name, - ], - from_cache=False, - only_cache=False, - ) - self.async_write_ha_state() - - async def _async_on_off(self, invert: bool) -> None: - """Turn the entity on or off.""" - name: str = WindowCovering.AttributeDefs.window_covering_mode.name - current_mode: WindowCoveringMode = WindowCoveringMode( - self._cluster_handler.cluster.get(name) - ) - send_command: bool = False - if invert and WindowCoveringMode.Motor_direction_reversed not in current_mode: - current_mode |= WindowCoveringMode.Motor_direction_reversed - send_command = True - elif not invert and WindowCoveringMode.Motor_direction_reversed in current_mode: - current_mode &= ~WindowCoveringMode.Motor_direction_reversed - send_command = True - if send_command: - await self._cluster_handler.write_attributes_safe({name: current_mode}) - await self.async_update() - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.curtain.agl001"} -) -class AqaraE1CurtainMotorHooksLockedSwitch(ZHASwitchConfigurationEntity): - """Representation of a switch that controls whether the curtain motor hooks are locked.""" - - _unique_id_suffix = "hooks_lock" - _attribute_name = "hooks_lock" - _attr_translation_key = "hooks_locked" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossExternalOpenWindowDetected(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for communicating an open window.""" - - _unique_id_suffix = "external_open_window_detected" - _attribute_name: str = "external_open_window_detected" - _attr_translation_key: str = "external_window_sensor" - _attr_icon: str = "mdi:window-open" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossWindowOpenFeature(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute enabling open window detection.""" - - _unique_id_suffix = "window_open_feature" - _attribute_name: str = "window_open_feature" - _attr_translation_key: str = "use_internal_window_detection" - _attr_icon: str = "mdi:window-open" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossMountingModeControl(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for switching to mounting mode.""" - - _unique_id_suffix = "mounting_mode_control" - _attribute_name: str = "mounting_mode_control" - _attr_translation_key: str = "mounting_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossRadiatorCovered(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for communicating full usage of the external temperature sensor.""" - - _unique_id_suffix = "radiator_covered" - _attribute_name: str = "radiator_covered" - _attr_translation_key: str = "prioritize_external_temperature_sensor" - _attr_icon: str = "mdi:thermometer" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossHeatAvailable(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for communicating available heat.""" - - _unique_id_suffix = "heat_available" - _attribute_name: str = "heat_available" - _attr_translation_key: str = "heat_available" - _attr_icon: str = "mdi:water-boiler" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossLoadBalancingEnable(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for enabling load balancing.""" - - _unique_id_suffix = "load_balancing_enable" - _attribute_name: str = "load_balancing_enable" - _attr_translation_key: str = "use_load_balancing" - _attr_icon: str = "mdi:scale-balance" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossAdaptationRunSettings(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for enabling daily adaptation run. - - Actually a bitmap, but only the first bit is used. - """ - - _unique_id_suffix = "adaptation_run_settings" - _attribute_name: str = "adaptation_run_settings" - _attr_translation_key: str = "adaptation_run_enabled" diff --git a/homeassistant/components/zha/update.py b/homeassistant/components/zha/update.py index 0cb80d13119..e12d048b190 100644 --- a/homeassistant/components/zha/update.py +++ b/homeassistant/components/zha/update.py @@ -5,11 +5,10 @@ from __future__ import annotations import functools import logging import math -from typing import TYPE_CHECKING, Any +from typing import Any -from zigpy.ota import OtaImageWithMetadata -from zigpy.zcl.clusters.general import Ota -from zigpy.zcl.foundation import Status +from zha.exceptions import ZHAException +from zigpy.application import ControllerApplication from homeassistant.components.update import ( UpdateDeviceClass, @@ -17,8 +16,8 @@ from homeassistant.components.update import ( UpdateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -27,24 +26,17 @@ from homeassistant.helpers.update_coordinator import ( DataUpdateCoordinator, ) -from .core import discovery -from .core.const import CLUSTER_HANDLER_OTA, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED -from .core.helpers import get_zha_data, get_zha_gateway -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from zigpy.application import ControllerApplication - - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice +from .entity import ZHAEntity +from .helpers import ( + SIGNAL_ADD_ENTITIES, + EntityData, + async_add_entities as zha_async_add_entities, + get_zha_data, + get_zha_gateway, +) _LOGGER = logging.getLogger(__name__) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.UPDATE -) - async def async_setup_entry( hass: HomeAssistant, @@ -53,20 +45,20 @@ async def async_setup_entry( ) -> None: """Set up the Zigbee Home Automation update from config entry.""" zha_data = get_zha_data(hass) + if zha_data.update_coordinator is None: + zha_data.update_coordinator = ZHAFirmwareUpdateCoordinator( + hass, get_zha_gateway(hass).application_controller + ) entities_to_create = zha_data.platforms[Platform.UPDATE] - coordinator = ZHAFirmwareUpdateCoordinator( - hass, get_zha_gateway(hass).application_controller - ) - unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, + zha_async_add_entities, async_add_entities, + ZHAFirmwareUpdateEntity, entities_to_create, - coordinator=coordinator, ), ) config_entry.async_on_unload(unsub) @@ -93,14 +85,11 @@ class ZHAFirmwareUpdateCoordinator(DataUpdateCoordinator[None]): # pylint: disa await self.controller_application.ota.broadcast_notify(jitter=100) -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_OTA) class ZHAFirmwareUpdateEntity( - ZhaEntity, CoordinatorEntity[ZHAFirmwareUpdateCoordinator], UpdateEntity + ZHAEntity, CoordinatorEntity[ZHAFirmwareUpdateCoordinator], UpdateEntity ): """Representation of a ZHA firmware update entity.""" - _unique_id_suffix = "firmware_update" - _attr_entity_category = EntityCategory.CONFIG _attr_device_class = UpdateDeviceClass.FIRMWARE _attr_supported_features = ( UpdateEntityFeature.INSTALL @@ -108,113 +97,70 @@ class ZHAFirmwareUpdateEntity( | UpdateEntityFeature.SPECIFIC_VERSION ) - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - channels: list[ClusterHandler], - coordinator: ZHAFirmwareUpdateCoordinator, - **kwargs: Any, - ) -> None: - """Initialize the ZHA update entity.""" - super().__init__(unique_id, zha_device, channels, **kwargs) - CoordinatorEntity.__init__(self, coordinator) + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA siren.""" + zha_data = get_zha_data(entity_data.device_proxy.gateway_proxy.hass) + assert zha_data.update_coordinator is not None - self._ota_cluster_handler: ClusterHandler = self.cluster_handlers[ - CLUSTER_HANDLER_OTA - ] - self._attr_installed_version: str | None = self._get_cluster_version() - self._attr_latest_version = self._attr_installed_version - self._latest_firmware: OtaImageWithMetadata | None = None + super().__init__(entity_data, coordinator=zha_data.update_coordinator, **kwargs) + CoordinatorEntity.__init__(self, zha_data.update_coordinator) - def _get_cluster_version(self) -> str | None: - """Synchronize current file version with the cluster.""" + @property + def installed_version(self) -> str | None: + """Version installed and in use.""" + return self.entity_data.entity.installed_version - if self._ota_cluster_handler.current_file_version is not None: - return f"0x{self._ota_cluster_handler.current_file_version:08x}" + @property + def in_progress(self) -> bool | int | None: + """Update installation progress. - return None + Needs UpdateEntityFeature.PROGRESS flag to be set for it to be used. - @callback - def attribute_updated(self, attrid: int, name: str, value: Any) -> None: - """Handle attribute updates on the OTA cluster.""" - if attrid == Ota.AttributeDefs.current_file_version.id: - self._attr_installed_version = f"0x{value:08x}" - self.async_write_ha_state() + Can either return a boolean (True if in progress, False if not) + or an integer to indicate the progress in from 0 to 100%. + """ + if not self.entity_data.entity.in_progress: + return self.entity_data.entity.in_progress - @callback - def device_ota_update_available( - self, image: OtaImageWithMetadata, current_file_version: int - ) -> None: - """Handle ota update available signal from Zigpy.""" - self._latest_firmware = image - self._attr_latest_version = f"0x{image.version:08x}" - self._attr_installed_version = f"0x{current_file_version:08x}" + # Stay in an indeterminate state until we actually send something + if self.entity_data.entity.progress == 0: + return True - if image.metadata.changelog: - self._attr_release_summary = image.metadata.changelog + # Rescale 0-100% to 2-100% to avoid 0 and 1 colliding with None, False, and True + return int(math.ceil(2 + 98 * self.entity_data.entity.progress / 100)) - self.async_write_ha_state() + @property + def latest_version(self) -> str | None: + """Latest version available for install.""" + return self.entity_data.entity.latest_version - @callback - def _update_progress(self, current: int, total: int, progress: float) -> None: - """Update install progress on event.""" - # If we are not supposed to be updating, do nothing - if self._attr_in_progress is False: - return + @property + def release_summary(self) -> str | None: + """Summary of the release notes or changelog. - # Remap progress to 2-100 to avoid 0 and 1 - self._attr_in_progress = int(math.ceil(2 + 98 * progress / 100)) - self.async_write_ha_state() + This is not suitable for long changelogs, but merely suitable + for a short excerpt update description of max 255 characters. + """ + return self.entity_data.entity.release_summary + @property + def release_url(self) -> str | None: + """URL to the full release notes of the latest version available.""" + return self.entity_data.entity.release_url + + # We explicitly convert ZHA exceptions to HA exceptions here so there is no need to + # use the `@convert_zha_error_to_ha_error` decorator. async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: """Install an update.""" - assert self._latest_firmware is not None - - # Set the progress to an indeterminate state - self._attr_in_progress = True - self.async_write_ha_state() - try: - result = await self.zha_device.device.update_firmware( - image=self._latest_firmware, - progress_callback=self._update_progress, - ) - except Exception as ex: - raise HomeAssistantError(f"Update was not successful: {ex}") from ex - - # If we tried to install firmware that is no longer compatible with the device, - # bail out - if result == Status.NO_IMAGE_AVAILABLE: - self._attr_latest_version = self._attr_installed_version + await self.entity_data.entity.async_install(version=version, backup=backup) + except ZHAException as exc: + raise HomeAssistantError(exc) from exc + finally: self.async_write_ha_state() - # If the update finished but was not successful, we should also throw an error - if result != Status.SUCCESS: - raise HomeAssistantError(f"Update was not successful: {result}") - - # Clear the state - self._latest_firmware = None - self._attr_in_progress = False - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Call when entity is added.""" - await super().async_added_to_hass() - - # OTA events are sent by the device - self.zha_device.device.add_listener(self) - self.async_accept_signal( - self._ota_cluster_handler, SIGNAL_ATTR_UPDATED, self.attribute_updated - ) - - async def async_will_remove_from_hass(self) -> None: - """Call when entity will be removed.""" - await super().async_will_remove_from_hass() - self._attr_in_progress = False - async def async_update(self) -> None: """Update the entity.""" await CoordinatorEntity.async_update(self) diff --git a/homeassistant/components/zha/websocket_api.py b/homeassistant/components/zha/websocket_api.py index 1a51a06243e..0d4296e4b22 100644 --- a/homeassistant/components/zha/websocket_api.py +++ b/homeassistant/components/zha/websocket_api.py @@ -7,27 +7,7 @@ import logging from typing import TYPE_CHECKING, Any, Literal, NamedTuple, cast import voluptuous as vol -import zigpy.backups -from zigpy.config import CONF_DEVICE -from zigpy.config.validators import cv_boolean -from zigpy.types.named import EUI64, KeyData -from zigpy.zcl.clusters.security import IasAce -import zigpy.zdo.types as zdo_types - -from homeassistant.components import websocket_api -from homeassistant.const import ATTR_COMMAND, ATTR_ID, ATTR_NAME -from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.helpers import entity_registry as er -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.service import async_register_admin_service - -from .api import ( - async_change_channel, - async_get_active_network_settings, - async_get_radio_type, -) -from .core.const import ( +from zha.application.const import ( ATTR_ARGS, ATTR_ATTRIBUTE, ATTR_CLUSTER_ID, @@ -46,13 +26,51 @@ from .core.const import ( ATTR_WARNING_DEVICE_STROBE, ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE, ATTR_WARNING_DEVICE_STROBE_INTENSITY, - BINDINGS, CLUSTER_COMMAND_SERVER, CLUSTER_COMMANDS_CLIENT, CLUSTER_COMMANDS_SERVER, - CLUSTER_HANDLER_IAS_WD, CLUSTER_TYPE_IN, CLUSTER_TYPE_OUT, + WARNING_DEVICE_MODE_EMERGENCY, + WARNING_DEVICE_SOUND_HIGH, + WARNING_DEVICE_SQUAWK_MODE_ARMED, + WARNING_DEVICE_STROBE_HIGH, + WARNING_DEVICE_STROBE_YES, + ZHA_CLUSTER_HANDLER_MSG, +) +from zha.application.gateway import Gateway +from zha.application.helpers import ( + async_is_bindable_target, + convert_install_code, + get_matched_clusters, + qr_to_install_code, +) +from zha.zigbee.cluster_handlers.const import CLUSTER_HANDLER_IAS_WD +from zha.zigbee.device import Device +from zha.zigbee.group import GroupMemberReference +import zigpy.backups +from zigpy.config import CONF_DEVICE +from zigpy.config.validators import cv_boolean +from zigpy.types.named import EUI64, KeyData +from zigpy.zcl.clusters.security import IasAce +import zigpy.zdo.types as zdo_types + +from homeassistant.components import websocket_api +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_COMMAND, ATTR_ID, ATTR_NAME +from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.service import async_register_admin_service +from homeassistant.helpers.typing import VolDictType, VolSchemaType + +from .api import ( + async_change_channel, + async_get_active_network_settings, + async_get_radio_type, +) +from .const import ( CUSTOM_CONFIGURATION, DOMAIN, EZSP_OVERWRITE_EUI64, @@ -60,33 +78,24 @@ from .core.const import ( GROUP_IDS, GROUP_NAME, MFG_CLUSTER_ID_START, - WARNING_DEVICE_MODE_EMERGENCY, - WARNING_DEVICE_SOUND_HIGH, - WARNING_DEVICE_SQUAWK_MODE_ARMED, - WARNING_DEVICE_STROBE_HIGH, - WARNING_DEVICE_STROBE_YES, ZHA_ALARM_OPTIONS, - ZHA_CLUSTER_HANDLER_MSG, - ZHA_CONFIG_SCHEMAS, + ZHA_OPTIONS, ) -from .core.gateway import EntityReference -from .core.group import GroupMember -from .core.helpers import ( +from .helpers import ( + CONF_ZHA_ALARM_SCHEMA, + CONF_ZHA_OPTIONS_SCHEMA, + EntityReference, + ZHAGatewayProxy, async_cluster_exists, - async_is_bindable_target, cluster_command_schema_to_vol_schema, - convert_install_code, - get_matched_clusters, + get_config_entry, get_zha_gateway, - qr_to_install_code, + get_zha_gateway_proxy, ) if TYPE_CHECKING: from homeassistant.components.websocket_api.connection import ActiveConnection - from .core.device import ZHADevice - from .core.gateway import ZHAGateway - _LOGGER = logging.getLogger(__name__) TYPE = "type" @@ -104,6 +113,8 @@ ATTR_SOURCE_IEEE = "source_ieee" ATTR_TARGET_IEEE = "target_ieee" ATTR_QR_CODE = "qr_code" +BINDINGS = "bindings" + SERVICE_PERMIT = "permit" SERVICE_REMOVE = "remove" SERVICE_SET_ZIGBEE_CLUSTER_ATTRIBUTE = "set_zigbee_cluster_attribute" @@ -126,7 +137,7 @@ def _ensure_list_if_present[_T](value: _T | None) -> list[_T] | list[Any] | None return cast("list[_T]", value) if isinstance(value, list) else [value] -SERVICE_PERMIT_PARAMS = { +SERVICE_PERMIT_PARAMS: VolDictType = { vol.Optional(ATTR_IEEE): IEEE_SCHEMA, vol.Optional(ATTR_DURATION, default=60): vol.All( vol.Coerce(int), vol.Range(0, 254) @@ -138,7 +149,7 @@ SERVICE_PERMIT_PARAMS = { vol.Exclusive(ATTR_QR_CODE, "install_code"): vol.All(cv.string, qr_to_install_code), } -SERVICE_SCHEMAS = { +SERVICE_SCHEMAS: dict[str, VolSchemaType] = { SERVICE_PERMIT: vol.Schema( vol.All( cv.deprecated(ATTR_IEEE_ADDRESS, replacement_key=ATTR_IEEE), @@ -233,6 +244,12 @@ SERVICE_SCHEMAS = { } +ZHA_CONFIG_SCHEMAS = { + ZHA_OPTIONS: CONF_ZHA_OPTIONS_SCHEMA, + ZHA_ALARM_OPTIONS: CONF_ZHA_ALARM_SCHEMA, +} + + class ClusterBinding(NamedTuple): """Describes a cluster binding.""" @@ -242,9 +259,9 @@ class ClusterBinding(NamedTuple): endpoint_id: int -def _cv_group_member(value: dict[str, Any]) -> GroupMember: +def _cv_group_member(value: dict[str, Any]) -> GroupMemberReference: """Transform a group member.""" - return GroupMember( + return GroupMemberReference( ieee=value[ATTR_IEEE], endpoint_id=value[ATTR_ENDPOINT_ID], ) @@ -305,7 +322,7 @@ async def websocket_permit_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Permit ZHA zigbee devices.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) duration: int = msg[ATTR_DURATION] ieee: EUI64 | None = msg.get(ATTR_IEEE) @@ -320,28 +337,30 @@ async def websocket_permit_devices( @callback def async_cleanup() -> None: """Remove signal listener and turn off debug mode.""" - zha_gateway.async_disable_debug_mode() + zha_gateway_proxy.async_disable_debug_mode() remove_dispatcher_function() connection.subscriptions[msg["id"]] = async_cleanup - zha_gateway.async_enable_debug_mode() + zha_gateway_proxy.async_enable_debug_mode() src_ieee: EUI64 link_key: KeyData if ATTR_SOURCE_IEEE in msg: src_ieee = msg[ATTR_SOURCE_IEEE] link_key = msg[ATTR_INSTALL_CODE] _LOGGER.debug("Allowing join for %s device with link key", src_ieee) - await zha_gateway.application_controller.permit_with_link_key( + await zha_gateway_proxy.gateway.application_controller.permit_with_link_key( time_s=duration, node=src_ieee, link_key=link_key ) elif ATTR_QR_CODE in msg: src_ieee, link_key = msg[ATTR_QR_CODE] _LOGGER.debug("Allowing join for %s device with link key", src_ieee) - await zha_gateway.application_controller.permit_with_link_key( + await zha_gateway_proxy.gateway.application_controller.permit_with_link_key( time_s=duration, node=src_ieee, link_key=link_key ) else: - await zha_gateway.application_controller.permit(time_s=duration, node=ieee) + await zha_gateway_proxy.gateway.application_controller.permit( + time_s=duration, node=ieee + ) connection.send_result(msg[ID]) @@ -352,26 +371,26 @@ async def websocket_get_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA devices.""" - zha_gateway = get_zha_gateway(hass) - devices = [device.zha_device_info for device in zha_gateway.devices.values()] + zha_gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + devices = [ + device.zha_device_info for device in zha_gateway_proxy.device_proxies.values() + ] connection.send_result(msg[ID], devices) @callback -def _get_entity_name( - zha_gateway: ZHAGateway, entity_ref: EntityReference -) -> str | None: +def _get_entity_name(zha_gateway: Gateway, entity_ref: EntityReference) -> str | None: entity_registry = er.async_get(zha_gateway.hass) - entry = entity_registry.async_get(entity_ref.reference_id) + entry = entity_registry.async_get(entity_ref.ha_entity_id) return entry.name if entry else None @callback def _get_entity_original_name( - zha_gateway: ZHAGateway, entity_ref: EntityReference + zha_gateway: Gateway, entity_ref: EntityReference ) -> str | None: entity_registry = er.async_get(zha_gateway.hass) - entry = entity_registry.async_get(entity_ref.reference_id) + entry = entity_registry.async_get(entity_ref.ha_entity_id) return entry.original_name if entry else None @@ -382,32 +401,36 @@ async def websocket_get_groupable_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA devices that can be grouped.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) - devices = [device for device in zha_gateway.devices.values() if device.is_groupable] + devices = [ + device + for device in zha_gateway_proxy.device_proxies.values() + if device.device.is_groupable + ] groupable_devices: list[dict[str, Any]] = [] for device in devices: - entity_refs = zha_gateway.device_registry[device.ieee] + entity_refs = zha_gateway_proxy.ha_entity_refs[device.device.ieee] groupable_devices.extend( { "endpoint_id": ep_id, "entities": [ { - "name": _get_entity_name(zha_gateway, entity_ref), + "name": _get_entity_name(zha_gateway_proxy, entity_ref), "original_name": _get_entity_original_name( - zha_gateway, entity_ref + zha_gateway_proxy, entity_ref ), } for entity_ref in entity_refs - if list(entity_ref.cluster_handlers.values())[ + if list(entity_ref.entity_data.entity.cluster_handlers.values())[ 0 ].cluster.endpoint.endpoint_id == ep_id ], "device": device.zha_device_info, } - for ep_id in device.async_get_groupable_endpoints() + for ep_id in device.device.async_get_groupable_endpoints() ) connection.send_result(msg[ID], groupable_devices) @@ -420,8 +443,8 @@ async def websocket_get_groups( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA groups.""" - zha_gateway = get_zha_gateway(hass) - groups = [group.group_info for group in zha_gateway.groups.values()] + zha_gateway_proxy = get_zha_gateway_proxy(hass) + groups = [group.group_info for group in zha_gateway_proxy.group_proxies.values()] connection.send_result(msg[ID], groups) @@ -437,10 +460,10 @@ async def websocket_get_device( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA devices.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) ieee: EUI64 = msg[ATTR_IEEE] - if not (zha_device := zha_gateway.devices.get(ieee)): + if not (zha_device := zha_gateway_proxy.device_proxies.get(ieee)): connection.send_message( websocket_api.error_message( msg[ID], websocket_api.ERR_NOT_FOUND, "ZHA Device not found" @@ -464,10 +487,10 @@ async def websocket_get_group( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA group.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - if not (zha_group := zha_gateway.groups.get(group_id)): + if not (zha_group := zha_gateway_proxy.group_proxies.get(group_id)): connection.send_message( websocket_api.error_message( msg[ID], websocket_api.ERR_NOT_FOUND, "ZHA Group not found" @@ -493,13 +516,17 @@ async def websocket_add_group( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Add a new ZHA group.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway = get_zha_gateway_proxy(hass) group_name: str = msg[GROUP_NAME] group_id: int | None = msg.get(GROUP_ID) - members: list[GroupMember] | None = msg.get(ATTR_MEMBERS) - group = await zha_gateway.async_create_zigpy_group(group_name, members, group_id) + members: list[GroupMemberReference] | None = msg.get(ATTR_MEMBERS) + group = await zha_gateway.gateway.async_create_zigpy_group( + group_name, members, group_id + ) assert group - connection.send_result(msg[ID], group.group_info) + connection.send_result( + msg[ID], zha_gateway.group_proxies[group.group_id].group_info + ) @websocket_api.require_admin @@ -514,17 +541,18 @@ async def websocket_remove_groups( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Remove the specified ZHA groups.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway = get_zha_gateway_proxy(hass) group_ids: list[int] = msg[GROUP_IDS] if len(group_ids) > 1: tasks = [ - zha_gateway.async_remove_zigpy_group(group_id) for group_id in group_ids + zha_gateway.gateway.async_remove_zigpy_group(group_id) + for group_id in group_ids ] await asyncio.gather(*tasks) else: - await zha_gateway.async_remove_zigpy_group(group_ids[0]) - ret_groups = [group.group_info for group in zha_gateway.groups.values()] + await zha_gateway.gateway.async_remove_zigpy_group(group_ids[0]) + ret_groups = [group.group_info for group in zha_gateway.group_proxies.values()] connection.send_result(msg[ID], ret_groups) @@ -542,8 +570,9 @@ async def websocket_add_group_members( ) -> None: """Add members to a ZHA group.""" zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - members: list[GroupMember] = msg[ATTR_MEMBERS] + members: list[GroupMemberReference] = msg[ATTR_MEMBERS] if not (zha_group := zha_gateway.groups.get(group_id)): connection.send_message( @@ -554,8 +583,9 @@ async def websocket_add_group_members( return await zha_group.async_add_members(members) - ret_group = zha_group.group_info - connection.send_result(msg[ID], ret_group) + ret_group = zha_gateway_proxy.get_group_proxy(group_id) + assert ret_group + connection.send_result(msg[ID], ret_group.group_info) @websocket_api.require_admin @@ -572,8 +602,9 @@ async def websocket_remove_group_members( ) -> None: """Remove members from a ZHA group.""" zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - members: list[GroupMember] = msg[ATTR_MEMBERS] + members: list[GroupMemberReference] = msg[ATTR_MEMBERS] if not (zha_group := zha_gateway.groups.get(group_id)): connection.send_message( @@ -584,8 +615,9 @@ async def websocket_remove_group_members( return await zha_group.async_remove_members(members) - ret_group = zha_group.group_info - connection.send_result(msg[ID], ret_group) + ret_group = zha_gateway_proxy.get_group_proxy(group_id) + assert ret_group + connection.send_result(msg[ID], ret_group.group_info) @websocket_api.require_admin @@ -602,7 +634,7 @@ async def websocket_reconfigure_node( """Reconfigure a ZHA nodes entities by its ieee address.""" zha_gateway = get_zha_gateway(hass) ieee: EUI64 = msg[ATTR_IEEE] - device: ZHADevice | None = zha_gateway.get_device(ieee) + device: Device | None = zha_gateway.get_device(ieee) async def forward_messages(data): """Forward events to websocket.""" @@ -864,14 +896,15 @@ async def websocket_get_bindable_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Directly bind devices.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) source_ieee: EUI64 = msg[ATTR_IEEE] - source_device = zha_gateway.get_device(source_ieee) + source_device = zha_gateway_proxy.device_proxies.get(source_ieee) + assert source_device is not None devices = [ device.zha_device_info - for device in zha_gateway.devices.values() - if async_is_bindable_target(source_device, device) + for device in zha_gateway_proxy.device_proxies.values() + if async_is_bindable_target(source_device.device, device.device) ] _LOGGER.debug( @@ -992,7 +1025,7 @@ async def websocket_unbind_group( async def async_binding_operation( - zha_gateway: ZHAGateway, + zha_gateway: Gateway, source_ieee: EUI64, target_ieee: EUI64, operation: zdo_types.ZDOCmd, @@ -1046,7 +1079,7 @@ async def websocket_get_configuration( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA configuration.""" - zha_gateway = get_zha_gateway(hass) + config_entry: ConfigEntry = get_config_entry(hass) import voluptuous_serialize # pylint: disable=import-outside-toplevel def custom_serializer(schema: Any) -> Any: @@ -1069,9 +1102,9 @@ async def websocket_get_configuration( data["schemas"][section] = voluptuous_serialize.convert( schema, custom_serializer=custom_serializer ) - data["data"][section] = zha_gateway.config_entry.options.get( - CUSTOM_CONFIGURATION, {} - ).get(section, {}) + data["data"][section] = config_entry.options.get(CUSTOM_CONFIGURATION, {}).get( + section, {} + ) # send default values for unconfigured options for entry in data["schemas"][section]: @@ -1093,8 +1126,8 @@ async def websocket_update_zha_configuration( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Update the ZHA configuration.""" - zha_gateway = get_zha_gateway(hass) - options = zha_gateway.config_entry.options + config_entry: ConfigEntry = get_config_entry(hass) + options = config_entry.options data_to_save = {**options, CUSTOM_CONFIGURATION: msg["data"]} for section, schema in ZHA_CONFIG_SCHEMAS.items(): @@ -1125,10 +1158,8 @@ async def websocket_update_zha_configuration( data_to_save, ) - hass.config_entries.async_update_entry( - zha_gateway.config_entry, options=data_to_save - ) - status = await hass.config_entries.async_reload(zha_gateway.config_entry.entry_id) + hass.config_entries.async_update_entry(config_entry, options=data_to_save) + status = await hass.config_entries.async_reload(config_entry.entry_id) connection.send_result(msg[ID], status) @@ -1141,10 +1172,11 @@ async def websocket_get_network_settings( """Get ZHA network settings.""" backup = async_get_active_network_settings(hass) zha_gateway = get_zha_gateway(hass) + config_entry: ConfigEntry = get_config_entry(hass) connection.send_result( msg[ID], { - "radio_type": async_get_radio_type(hass, zha_gateway.config_entry).name, + "radio_type": async_get_radio_type(hass, config_entry).name, "device": zha_gateway.application_controller.config[CONF_DEVICE], "settings": backup.as_dict(), }, @@ -1279,12 +1311,8 @@ def async_load_api(hass: HomeAssistant) -> None: """Remove a node from the network.""" zha_gateway = get_zha_gateway(hass) ieee: EUI64 = service.data[ATTR_IEEE] - zha_device: ZHADevice | None = zha_gateway.get_device(ieee) - if zha_device is not None and zha_device.is_active_coordinator: - _LOGGER.info("Removing the coordinator (%s) is not allowed", ieee) - return _LOGGER.info("Removing node %s", ieee) - await application_controller.remove(ieee) + await zha_gateway.async_remove_device(ieee) async_register_admin_service( hass, DOMAIN, SERVICE_REMOVE, remove, schema=SERVICE_SCHEMAS[IEEE_SERVICE] diff --git a/homeassistant/components/zhong_hong/climate.py b/homeassistant/components/zhong_hong/climate.py index b0a8f02a2f3..eaf00b5432f 100644 --- a/homeassistant/components/zhong_hong/climate.py +++ b/homeassistant/components/zhong_hong/climate.py @@ -11,7 +11,7 @@ from zhong_hong_hvac.hvac import HVAC as ZhongHongHVAC from homeassistant.components.climate import ( ATTR_HVAC_MODE, - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as CLIMATE_PLATFORM_SCHEMA, ClimateEntity, ClimateEntityFeature, HVACMode, @@ -42,7 +42,7 @@ DEFAULT_GATEWAY_ADDRRESS = 1 SIGNAL_DEVICE_ADDED = "zhong_hong_device_added" SIGNAL_ZHONG_HONG_HUB_START = "zhong_hong_hub_start" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = CLIMATE_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, diff --git a/homeassistant/components/ziggo_mediabox_xl/media_player.py b/homeassistant/components/ziggo_mediabox_xl/media_player.py index 7c97d38cff3..a81a206b5b2 100644 --- a/homeassistant/components/ziggo_mediabox_xl/media_player.py +++ b/homeassistant/components/ziggo_mediabox_xl/media_player.py @@ -9,7 +9,7 @@ import voluptuous as vol from ziggo_mediabox_xl import ZiggoMediaboxXL from homeassistant.components.media_player import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, MediaPlayerState, @@ -24,7 +24,7 @@ _LOGGER = logging.getLogger(__name__) DATA_KNOWN_DEVICES = "ziggo_mediabox_xl_known_devices" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( {vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string} ) diff --git a/homeassistant/components/zone/__init__.py b/homeassistant/components/zone/__init__.py index 0fef9961679..1c43a79e10e 100644 --- a/homeassistant/components/zone/__init__.py +++ b/homeassistant/components/zone/__init__.py @@ -45,7 +45,7 @@ from homeassistant.helpers import ( service, storage, ) -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.loader import bind_hass from homeassistant.util.location import distance @@ -62,7 +62,7 @@ ENTITY_ID_HOME = ENTITY_ID_FORMAT.format(HOME_ZONE) ICON_HOME = "mdi:home" ICON_IMPORT = "mdi:import" -CREATE_FIELDS = { +CREATE_FIELDS: VolDictType = { vol.Required(CONF_NAME): cv.string, vol.Required(CONF_LATITUDE): cv.latitude, vol.Required(CONF_LONGITUDE): cv.longitude, @@ -72,7 +72,7 @@ CREATE_FIELDS = { } -UPDATE_FIELDS = { +UPDATE_FIELDS: VolDictType = { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_LATITUDE): cv.latitude, vol.Optional(CONF_LONGITUDE): cv.longitude, diff --git a/homeassistant/components/zoneminder/sensor.py b/homeassistant/components/zoneminder/sensor.py index 700344f44da..75769d9fd98 100644 --- a/homeassistant/components/zoneminder/sensor.py +++ b/homeassistant/components/zoneminder/sensor.py @@ -9,7 +9,7 @@ from zoneminder.monitor import Monitor, TimePeriod from zoneminder.zm import ZoneMinder from homeassistant.components.sensor import ( - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorEntity, SensorEntityDescription, ) @@ -53,7 +53,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SENSOR_KEYS: list[str] = [desc.key for desc in SENSOR_TYPES] -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Optional( CONF_INCLUDE_ARCHIVED, default=DEFAULT_INCLUDE_ARCHIVED diff --git a/homeassistant/components/zoneminder/switch.py b/homeassistant/components/zoneminder/switch.py index 48cbe58a876..23adf2f4c88 100644 --- a/homeassistant/components/zoneminder/switch.py +++ b/homeassistant/components/zoneminder/switch.py @@ -9,7 +9,10 @@ import voluptuous as vol from zoneminder.monitor import Monitor, MonitorState from zoneminder.zm import ZoneMinder -from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity +from homeassistant.components.switch import ( + PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA, + SwitchEntity, +) from homeassistant.const import CONF_COMMAND_OFF, CONF_COMMAND_ON from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady @@ -21,7 +24,7 @@ from . import DOMAIN as ZONEMINDER_DOMAIN _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( +PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( { vol.Required(CONF_COMMAND_ON): cv.string, vol.Required(CONF_COMMAND_OFF): cv.string, diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index fee828c9fd8..8f81790708f 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -166,65 +166,6 @@ STRATEGY = "strategy" MINIMUM_QR_STRING_LENGTH = 52 -def convert_planned_provisioning_entry(info: dict) -> ProvisioningEntry: - """Handle provisioning entry dict to ProvisioningEntry.""" - return ProvisioningEntry( - dsk=info[DSK], - security_classes=info[SECURITY_CLASSES], - status=info[STATUS], - requested_security_classes=info.get(REQUESTED_SECURITY_CLASSES), - additional_properties={ - k: v - for k, v in info.items() - if k not in (DSK, SECURITY_CLASSES, STATUS, REQUESTED_SECURITY_CLASSES) - }, - ) - - -def convert_qr_provisioning_information(info: dict) -> QRProvisioningInformation: - """Convert QR provisioning information dict to QRProvisioningInformation.""" - ## Remove this when we have fix for QRProvisioningInformation.from_dict() - return QRProvisioningInformation( - version=info[VERSION], - security_classes=info[SECURITY_CLASSES], - dsk=info[DSK], - generic_device_class=info[GENERIC_DEVICE_CLASS], - specific_device_class=info[SPECIFIC_DEVICE_CLASS], - installer_icon_type=info[INSTALLER_ICON_TYPE], - manufacturer_id=info[MANUFACTURER_ID], - product_type=info[PRODUCT_TYPE], - product_id=info[PRODUCT_ID], - application_version=info[APPLICATION_VERSION], - max_inclusion_request_interval=info.get(MAX_INCLUSION_REQUEST_INTERVAL), - uuid=info.get(UUID), - supported_protocols=info.get(SUPPORTED_PROTOCOLS), - status=info[STATUS], - requested_security_classes=info.get(REQUESTED_SECURITY_CLASSES), - additional_properties={ - k: v - for k, v in info.items() - if k - not in ( - VERSION, - SECURITY_CLASSES, - DSK, - GENERIC_DEVICE_CLASS, - SPECIFIC_DEVICE_CLASS, - INSTALLER_ICON_TYPE, - MANUFACTURER_ID, - PRODUCT_TYPE, - PRODUCT_ID, - APPLICATION_VERSION, - MAX_INCLUSION_REQUEST_INTERVAL, - UUID, - SUPPORTED_PROTOCOLS, - STATUS, - REQUESTED_SECURITY_CLASSES, - ) - }, - ) - - # Helper schemas PLANNED_PROVISIONING_ENTRY_SCHEMA = vol.All( vol.Schema( @@ -244,7 +185,7 @@ PLANNED_PROVISIONING_ENTRY_SCHEMA = vol.All( # Provisioning entries can have extra keys for SmartStart extra=vol.ALLOW_EXTRA, ), - convert_planned_provisioning_entry, + ProvisioningEntry.from_dict, ) QR_PROVISIONING_INFORMATION_SCHEMA = vol.All( @@ -278,7 +219,7 @@ QR_PROVISIONING_INFORMATION_SCHEMA = vol.All( }, extra=vol.ALLOW_EXTRA, ), - convert_qr_provisioning_information, + QRProvisioningInformation.from_dict, ) QR_CODE_STRING_SCHEMA = vol.All(str, vol.Length(min=MINIMUM_QR_STRING_LENGTH)) diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index dff582558b1..e73fa9fc3a7 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -38,6 +38,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import AbortFlow, FlowManager from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import VolDictType from . import disconnect_client from .addon import get_addon_manager @@ -639,7 +640,7 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): CONF_ADDON_LR_S2_AUTHENTICATED_KEY, self.lr_s2_authenticated_key or "" ) - schema = { + schema: VolDictType = { vol.Optional(CONF_S0_LEGACY_KEY, default=s0_legacy_key): str, vol.Optional( CONF_S2_ACCESS_CONTROL_KEY, default=s2_access_control_key diff --git a/homeassistant/components/zwave_js/discovery.py b/homeassistant/components/zwave_js/discovery.py index 0b66567c036..6e750ee8b2d 100644 --- a/homeassistant/components/zwave_js/discovery.py +++ b/homeassistant/components/zwave_js/discovery.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from dataclasses import asdict, dataclass, field from enum import StrEnum from typing import TYPE_CHECKING, Any, cast from awesomeversion import AwesomeVersion -from typing_extensions import Generator from zwave_js_server.const import ( CURRENT_STATE_PROPERTY, CURRENT_VALUE_PROPERTY, @@ -579,6 +579,15 @@ DISCOVERY_SCHEMAS = [ ), entity_registry_enabled_default=False, ), + # ZVIDAR Z-CM-V01 (SmartWings/Deyi WM25L/V Z-Wave Motor for Roller Shade) + ZWaveDiscoverySchema( + platform=Platform.COVER, + hint="shade", + manufacturer_id={0x045A}, + product_id={0x0507}, + product_type={0x0904}, + primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, + ), # Vision Security ZL7432 In Wall Dual Relay Switch ZWaveDiscoverySchema( platform=Platform.SWITCH, diff --git a/homeassistant/components/zwave_js/fan.py b/homeassistant/components/zwave_js/fan.py index 925a48512d8..37d3fc57886 100644 --- a/homeassistant/components/zwave_js/fan.py +++ b/homeassistant/components/zwave_js/fan.py @@ -78,7 +78,12 @@ async def async_setup_entry( class ZwaveFan(ZWaveBaseEntity, FanEntity): """Representation of a Z-Wave fan.""" - _attr_supported_features = FanEntityFeature.SET_SPEED + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + _enable_turn_on_off_backwards_compatibility = False def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo @@ -249,7 +254,11 @@ class ValueMappingZwaveFan(ZwaveFan): @property def supported_features(self) -> FanEntityFeature: """Flag supported features.""" - flags = FanEntityFeature.SET_SPEED + flags = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) if self.has_fan_value_mapping and self.fan_value_mapping.presets: flags |= FanEntityFeature.PRESET_MODE @@ -382,7 +391,13 @@ class ZwaveThermostatFan(ZWaveBaseEntity, FanEntity): @property def supported_features(self) -> FanEntityFeature: """Flag supported features.""" - return FanEntityFeature.PRESET_MODE + if not self._fan_off: + return FanEntityFeature.PRESET_MODE + return ( + FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + ) @property def fan_state(self) -> str | None: diff --git a/homeassistant/components/zwave_js/helpers.py b/homeassistant/components/zwave_js/helpers.py index 598cf2f78f6..737b8deff34 100644 --- a/homeassistant/components/zwave_js/helpers.py +++ b/homeassistant/components/zwave_js/helpers.py @@ -40,7 +40,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.group import expand_entity_ids -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers.typing import ConfigType, VolSchemaType from .const import ( ATTR_COMMAND_CLASS, @@ -479,7 +479,9 @@ def copy_available_params( ) -def get_value_state_schema(value: ZwaveValue) -> vol.Schema | None: +def get_value_state_schema( + value: ZwaveValue, +) -> VolSchemaType | vol.Coerce | vol.In | None: """Return device automation schema for a config entry.""" if isinstance(value, ConfigurationValue): min_ = value.metadata.min diff --git a/homeassistant/components/zwave_js/lock.py b/homeassistant/components/zwave_js/lock.py index 5eb89e17402..b16c1090ef3 100644 --- a/homeassistant/components/zwave_js/lock.py +++ b/homeassistant/components/zwave_js/lock.py @@ -196,15 +196,19 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity): ) -> None: """Set the lock configuration.""" params: dict[str, Any] = {"operation_type": operation_type} - for attr, val in ( - ("lock_timeout_configuration", lock_timeout), - ("auto_relock_time", auto_relock_time), - ("hold_and_release_time", hold_and_release_time), - ("twist_assist", twist_assist), - ("block_to_block", block_to_block), - ): - if val is not None: - params[attr] = val + params.update( + { + attr: val + for attr, val in ( + ("lock_timeout_configuration", lock_timeout), + ("auto_relock_time", auto_relock_time), + ("hold_and_release_time", hold_and_release_time), + ("twist_assist", twist_assist), + ("block_to_block", block_to_block), + ) + if val is not None + } + ) configuration = DoorLockCCConfigurationSetOptions(**params) result = await set_configuration( self.info.node.endpoints[self.info.primary_value.endpoint or 0], diff --git a/homeassistant/components/zwave_js/services.py b/homeassistant/components/zwave_js/services.py index 66d09714723..e5c0bd64781 100644 --- a/homeassistant/components/zwave_js/services.py +++ b/homeassistant/components/zwave_js/services.py @@ -3,12 +3,11 @@ from __future__ import annotations import asyncio -from collections.abc import Collection, Sequence +from collections.abc import Collection, Generator, Sequence import logging import math from typing import Any -from typing_extensions import Generator import voluptuous as vol from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import SET_VALUE_SUCCESS, CommandClass, CommandStatus diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 7c65f1804b1..4bba3e0538c 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -291,7 +291,7 @@ "name": "Clear lock user code" }, "invoke_cc_api": { - "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` service and require direct calls to the Command Class API.", + "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` action and require direct calls to the Command Class API.", "fields": { "command_class": { "description": "The ID of the command class that you want to issue a command to.", @@ -313,7 +313,7 @@ "name": "Invoke a Command Class API on a node (advanced)" }, "multicast_set_value": { - "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This service has minimal validation so only use this service if you know what you are doing.", + "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "broadcast": { "description": "Whether command should be broadcast to all devices on the network.", @@ -475,7 +475,7 @@ "name": "Set lock user code" }, "set_value": { - "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This service has minimal validation so only use this service if you know what you are doing.", + "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "command_class": { "description": "The ID of the command class for the value.", @@ -502,7 +502,7 @@ "name": "[%key:component::zwave_js::services::set_config_parameter::fields::value::name%]" }, "wait_for_result": { - "description": "Whether or not to wait for a response from the node. If not included in the payload, the integration will decide whether to wait or not. If set to `true`, note that the service call can take a while if setting a value on an asleep battery device.", + "description": "Whether or not to wait for a response from the node. If not included in the payload, the integration will decide whether to wait or not. If set to `true`, note that the action can take a while if setting a value on an asleep battery device.", "name": "Wait for result?" } }, diff --git a/homeassistant/components/zwave_js/triggers/event.py b/homeassistant/components/zwave_js/triggers/event.py index 921cae19b3a..9938d08408c 100644 --- a/homeassistant/components/zwave_js/triggers/event.py +++ b/homeassistant/components/zwave_js/triggers/event.py @@ -80,10 +80,8 @@ def validate_event_data(obj: dict) -> dict: except ValidationError as exc: # Filter out required field errors if keys can be missing, and if there are # still errors, raise an exception - if errors := [ - error for error in exc.errors() if error["type"] != "value_error.missing" - ]: - raise vol.MultipleInvalid(errors) from exc + if [error for error in exc.errors() if error["type"] != "value_error.missing"]: + raise vol.MultipleInvalid from exc return obj diff --git a/homeassistant/components/zwave_js/triggers/value_updated.py b/homeassistant/components/zwave_js/triggers/value_updated.py index 4814eba0757..d8c5702ce5d 100644 --- a/homeassistant/components/zwave_js/triggers/value_updated.py +++ b/homeassistant/components/zwave_js/triggers/value_updated.py @@ -128,14 +128,9 @@ async def async_attach_trigger( (prev_value, prev_value_raw, from_value), (curr_value, curr_value_raw, to_value), ): - if ( - match != MATCH_ALL - and value_to_eval != match - and not ( - isinstance(match, list) - and (value_to_eval in match or raw_value_to_eval in match) - ) - and raw_value_to_eval != match + if match not in (MATCH_ALL, value_to_eval, raw_value_to_eval) and not ( + isinstance(match, list) + and (value_to_eval in match or raw_value_to_eval in match) ): return diff --git a/homeassistant/components/zwave_me/fan.py b/homeassistant/components/zwave_me/fan.py index 25ccec9a0fb..b8a4b5e4ad2 100644 --- a/homeassistant/components/zwave_me/fan.py +++ b/homeassistant/components/zwave_me/fan.py @@ -44,7 +44,12 @@ async def async_setup_entry( class ZWaveMeFan(ZWaveMeEntity, FanEntity): """Representation of a ZWaveMe Fan.""" - _attr_supported_features = FanEntityFeature.SET_SPEED + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int: diff --git a/homeassistant/components/zwave_me/light.py b/homeassistant/components/zwave_me/light.py index b1065d45160..2289fe7b115 100644 --- a/homeassistant/components/zwave_me/light.py +++ b/homeassistant/components/zwave_me/light.py @@ -84,9 +84,8 @@ class ZWaveMeRGB(ZWaveMeEntity, LightEntity): self.device.id, f"exact?level={round(brightness / 2.55)}" ) return - cmd = "exact?red={}&green={}&blue={}".format( - *color if any(color) else 255, 255, 255 - ) + cmd = "exact?red={}&green={}&blue={}" + cmd = cmd.format(*color) if any(color) else cmd.format(*(255, 255, 255)) self.controller.zwave_api.send_command(self.device.id, cmd) @property diff --git a/homeassistant/config.py b/homeassistant/config.py index 8e22f2051f0..948ab342e79 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -60,7 +60,7 @@ from .const import ( LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, __version__, ) -from .core import DOMAIN as HA_DOMAIN, ConfigSource, HomeAssistant, callback +from .core import DOMAIN as HOMEASSISTANT_DOMAIN, ConfigSource, HomeAssistant, callback from .exceptions import ConfigValidationError, HomeAssistantError from .generated.currencies import HISTORIC_CURRENCIES from .helpers import config_validation as cv, issue_registry as ir @@ -261,12 +261,12 @@ CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: if currency not in HISTORIC_CURRENCIES: - ir.async_delete_issue(hass, HA_DOMAIN, "historic_currency") + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "historic_currency") return ir.async_create_issue( hass, - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, "historic_currency", is_fixable=False, learn_more_url="homeassistant://config/general", @@ -278,12 +278,12 @@ def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> Non def _raise_issue_if_no_country(hass: HomeAssistant, country: str | None) -> None: if country is not None: - ir.async_delete_issue(hass, HA_DOMAIN, "country_not_configured") + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "country_not_configured") return ir.async_create_issue( hass, - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, "country_not_configured", is_fixable=False, learn_more_url="homeassistant://config/general", @@ -481,12 +481,14 @@ async def async_hass_config_yaml(hass: HomeAssistant) -> dict: for invalid_domain in invalid_domains: config.pop(invalid_domain) - core_config = config.get(HA_DOMAIN, {}) + core_config = config.get(HOMEASSISTANT_DOMAIN, {}) try: await merge_packages_config(hass, config, core_config.get(CONF_PACKAGES, {})) except vol.Invalid as exc: suffix = "" - if annotation := find_annotation(config, [HA_DOMAIN, CONF_PACKAGES, *exc.path]): + if annotation := find_annotation( + config, [HOMEASSISTANT_DOMAIN, CONF_PACKAGES, *exc.path] + ): suffix = f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" _LOGGER.error( "Invalid package configuration '%s'%s: %s", CONF_PACKAGES, suffix, exc @@ -614,7 +616,7 @@ def _get_annotation(item: Any) -> tuple[str, int | str] | None: return (getattr(item, "__config_file__"), getattr(item, "__line__", "?")) -def _get_by_path(data: dict | list, items: list[str | int]) -> Any: +def _get_by_path(data: dict | list, items: list[Hashable]) -> Any: """Access a nested object in root by item sequence. Returns None in case of error. @@ -626,7 +628,7 @@ def _get_by_path(data: dict | list, items: list[str | int]) -> Any: def find_annotation( - config: dict | list, path: list[str | int] + config: dict | list, path: list[Hashable] ) -> tuple[str, int | str] | None: """Find file/line annotation for a node in config pointed to by path. @@ -636,7 +638,7 @@ def find_annotation( """ def find_annotation_for_key( - item: dict, path: list[str | int], tail: str | int + item: dict, path: list[Hashable], tail: Hashable ) -> tuple[str, int | str] | None: for key in item: if key == tail: @@ -646,7 +648,7 @@ def find_annotation( return None def find_annotation_rec( - config: dict | list, path: list[str | int], tail: str | int | None + config: dict | list, path: list[Hashable], tail: Hashable | None ) -> tuple[str, int | str] | None: item = _get_by_path(config, path) if isinstance(item, dict) and tail is not None: @@ -709,7 +711,7 @@ def stringify_invalid( ) else: message_prefix = f"Invalid config for '{domain}'" - if domain != HA_DOMAIN and link: + if domain != HOMEASSISTANT_DOMAIN and link: message_suffix = f", please check the docs at {link}" else: message_suffix = "" @@ -792,7 +794,7 @@ def format_homeassistant_error( if annotation := find_annotation(config, [domain]): message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" message = f"{message_prefix}: {str(exc) or repr(exc)}" - if domain != HA_DOMAIN and link: + if domain != HOMEASSISTANT_DOMAIN and link: message += f", please check the docs at {link}" return message @@ -914,7 +916,7 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB]) for name, pkg in config[CONF_PACKAGES].items(): - if (pkg_cust := pkg.get(HA_DOMAIN)) is None: + if (pkg_cust := pkg.get(HOMEASSISTANT_DOMAIN)) is None: continue try: @@ -938,7 +940,9 @@ def _log_pkg_error( ) -> None: """Log an error while merging packages.""" message_prefix = f"Setup of package '{package}'" - if annotation := find_annotation(config, [HA_DOMAIN, CONF_PACKAGES, package]): + if annotation := find_annotation( + config, [HOMEASSISTANT_DOMAIN, CONF_PACKAGES, package] + ): message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" _LOGGER.error("%s failed: %s", message_prefix, message) @@ -947,7 +951,7 @@ def _log_pkg_error( def _identify_config_schema(module: ComponentProtocol) -> str | None: """Extract the schema and identify list or dict based.""" if not isinstance(module.CONFIG_SCHEMA, vol.Schema): - return None + return None # type: ignore[unreachable] schema = module.CONFIG_SCHEMA.schema @@ -1053,7 +1057,7 @@ async def merge_packages_config( continue for comp_name, comp_conf in pack_conf.items(): - if comp_name == HA_DOMAIN: + if comp_name == HOMEASSISTANT_DOMAIN: continue try: domain = cv.domain_key(comp_name) @@ -1198,7 +1202,7 @@ def _get_log_message_and_stack_print_pref( # Generate the log message from the English translations log_message = async_get_exception_message( - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, platform_exception.translation_key, translation_placeholders=placeholders, ) @@ -1259,7 +1263,7 @@ def async_drop_config_annotations( # Don't drop annotations from the homeassistant integration because it may # have configuration for other integrations as packages. - if integration.domain in config and integration.domain != HA_DOMAIN: + if integration.domain in config and integration.domain != HOMEASSISTANT_DOMAIN: drop_config_annotations_rec(config[integration.domain]) return config @@ -1311,7 +1315,7 @@ def async_handle_component_errors( raise ConfigValidationError( translation_key, [platform_exception.exception for platform_exception in config_exception_info], - translation_domain=HA_DOMAIN, + translation_domain=HOMEASSISTANT_DOMAIN, translation_placeholders=placeholders, ) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index c8d671e1fe1..e48313cab33 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -3,10 +3,19 @@ from __future__ import annotations import asyncio -from collections import UserDict -from collections.abc import Callable, Coroutine, Hashable, Iterable, Mapping, ValuesView +from collections import UserDict, defaultdict +from collections.abc import ( + Callable, + Coroutine, + Generator, + Hashable, + Iterable, + Mapping, + ValuesView, +) from contextvars import ContextVar from copy import deepcopy +from datetime import datetime from enum import Enum, StrEnum import functools from functools import cached_property @@ -16,14 +25,14 @@ from types import MappingProxyType from typing import TYPE_CHECKING, Any, Generic, Self, cast from async_interrupt import interrupt -from typing_extensions import Generator, TypeVar +from typing_extensions import TypeVar from . import data_entry_flow, loader from .components import persistent_notification from .const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, Platform from .core import ( CALLBACK_TYPE, - DOMAIN as HA_DOMAIN, + DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, Event, HassJob, @@ -61,6 +70,7 @@ from .setup import ( from .util import ulid as ulid_util from .util.async_ import create_eager_task from .util.decorator import Registry +from .util.dt import utc_from_timestamp, utcnow from .util.enum import try_parse_enum if TYPE_CHECKING: @@ -110,7 +120,7 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry() STORAGE_KEY = "core.config_entries" STORAGE_VERSION = 1 -STORAGE_VERSION_MINOR = 2 +STORAGE_VERSION_MINOR = 3 SAVE_DELAY = 1 @@ -295,15 +305,19 @@ class ConfigEntry(Generic[_DataT]): _background_tasks: set[asyncio.Future[Any]] _integration_for_domain: loader.Integration | None _tries: int + created_at: datetime + modified_at: datetime def __init__( self, *, + created_at: datetime | None = None, data: Mapping[str, Any], disabled_by: ConfigEntryDisabler | None = None, domain: str, entry_id: str | None = None, minor_version: int, + modified_at: datetime | None = None, options: Mapping[str, Any] | None, pref_disable_new_entities: bool | None = None, pref_disable_polling: bool | None = None, @@ -407,6 +421,8 @@ class ConfigEntry(Generic[_DataT]): _setter(self, "_integration_for_domain", None) _setter(self, "_tries", 0) + _setter(self, "created_at", created_at or utcnow()) + _setter(self, "modified_at", modified_at or utcnow()) def __repr__(self) -> str: """Representation of ConfigEntry.""" @@ -475,8 +491,10 @@ class ConfigEntry(Generic[_DataT]): def as_json_fragment(self) -> json_fragment: """Return JSON fragment of a config entry.""" json_repr = { + "created_at": self.created_at.timestamp(), "entry_id": self.entry_id, "domain": self.domain, + "modified_at": self.modified_at.timestamp(), "title": self.title, "source": self.source, "state": self.state.value, @@ -823,6 +841,10 @@ class ConfigEntry(Generic[_DataT]): async def async_remove(self, hass: HomeAssistant) -> None: """Invoke remove callback on component.""" + old_modified_at = self.modified_at + object.__setattr__(self, "modified_at", utcnow()) + self.clear_cache() + if self.source == SOURCE_IGNORE: return @@ -854,6 +876,8 @@ class ConfigEntry(Generic[_DataT]): self.title, integration.domain, ) + # Restore modified_at + object.__setattr__(self, "modified_at", old_modified_at) @callback def _async_set_state( @@ -942,11 +966,13 @@ class ConfigEntry(Generic[_DataT]): def as_dict(self) -> dict[str, Any]: """Return dictionary version of this entry.""" return { + "created_at": self.created_at.isoformat(), "data": dict(self.data), "disabled_by": self.disabled_by, "domain": self.domain, "entry_id": self.entry_id, "minor_version": self.minor_version, + "modified_at": self.modified_at.isoformat(), "options": dict(self.options), "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, @@ -1041,7 +1067,7 @@ class ConfigEntry(Generic[_DataT]): issue_id = f"config_entry_reauth_{self.domain}_{self.entry_id}" ir.async_create_issue( hass, - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, issue_id, data={"flow_id": result["flow_id"]}, is_fixable=False, @@ -1198,8 +1224,12 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): super().__init__(hass) self.config_entries = config_entries self._hass_config = hass_config - self._pending_import_flows: dict[str, dict[str, asyncio.Future[None]]] = {} - self._initialize_futures: dict[str, list[asyncio.Future[None]]] = {} + self._pending_import_flows: defaultdict[ + str, dict[str, asyncio.Future[None]] + ] = defaultdict(dict) + self._initialize_futures: defaultdict[str, set[asyncio.Future[None]]] = ( + defaultdict(set) + ) self._discovery_debouncer = Debouncer[None]( hass, _LOGGER, @@ -1219,10 +1249,10 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): @callback def _async_has_other_discovery_flows(self, flow_id: str) -> bool: """Check if there are any other discovery flows in progress.""" - return any( - flow.context["source"] in DISCOVERY_SOURCES and flow.flow_id != flow_id - for flow in self._progress.values() - ) + for flow in self._progress.values(): + if flow.flow_id != flow_id and flow.context["source"] in DISCOVERY_SOURCES: + return True + return False async def async_init( self, handler: str, *, context: dict[str, Any] | None = None, data: Any = None @@ -1246,18 +1276,17 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): flow_id=flow_id, handler=handler, reason="single_instance_allowed", - translation_domain=HA_DOMAIN, + translation_domain=HOMEASSISTANT_DOMAIN, ) loop = self.hass.loop if context["source"] == SOURCE_IMPORT: - self._pending_import_flows.setdefault(handler, {})[flow_id] = ( - loop.create_future() - ) + self._pending_import_flows[handler][flow_id] = loop.create_future() cancel_init_future = loop.create_future() - self._initialize_futures.setdefault(handler, []).append(cancel_init_future) + handler_init_futures = self._initialize_futures[handler] + handler_init_futures.add(cancel_init_future) try: async with interrupt( cancel_init_future, @@ -1268,8 +1297,13 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): except FlowCancelledError as ex: raise asyncio.CancelledError from ex finally: - self._initialize_futures[handler].remove(cancel_init_future) - self._pending_import_flows.get(handler, {}).pop(flow_id, None) + handler_init_futures.remove(cancel_init_future) + if not handler_init_futures: + del self._initialize_futures[handler] + if handler in self._pending_import_flows: + self._pending_import_flows[handler].pop(flow_id, None) + if not self._pending_import_flows[handler]: + del self._pending_import_flows[handler] if result["type"] != data_entry_flow.FlowResultType.ABORT: await self.async_post_init(flow, result) @@ -1296,11 +1330,18 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): try: result = await self._async_handle_step(flow, flow.init_step, data) finally: - init_done = self._pending_import_flows.get(handler, {}).get(flow_id) - if init_done and not init_done.done(): - init_done.set_result(None) + self._set_pending_import_done(flow) return flow, result + def _set_pending_import_done(self, flow: ConfigFlow) -> None: + """Set pending import flow as done.""" + if ( + (handler_import_flows := self._pending_import_flows.get(flow.handler)) + and (init_done := handler_import_flows.get(flow.flow_id)) + and not init_done.done() + ): + init_done.set_result(None) + @callback def async_shutdown(self) -> None: """Cancel any initializing flows.""" @@ -1321,9 +1362,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): # We do this to avoid a circular dependency where async_finish_flow sets up a # new entry, which needs the integration to be set up, which is waiting for # init to be done. - init_done = self._pending_import_flows.get(flow.handler, {}).get(flow.flow_id) - if init_done and not init_done.done(): - init_done.set_result(None) + self._set_pending_import_done(flow) # Remove notification if no other discovery config entries in progress if not self._async_has_other_discovery_flows(flow.flow_id): @@ -1335,7 +1374,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): entry := self.config_entries.async_get_entry(entry_id) ) is not None: issue_id = f"config_entry_reauth_{entry.domain}_{entry.entry_id}" - ir.async_delete_issue(self.hass, HA_DOMAIN, issue_id) + ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: return result @@ -1352,7 +1391,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): flow_id=flow.flow_id, handler=flow.handler, reason="single_instance_allowed", - translation_domain=HA_DOMAIN, + translation_domain=HOMEASSISTANT_DOMAIN, ) # Check if config entry exists with unique ID. Unload it. @@ -1591,25 +1630,34 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): ) -> dict[str, Any]: """Migrate to the new version.""" data = old_data - if old_major_version == 1 and old_minor_version < 2: - # Version 1.2 implements migration and freezes the available keys - for entry in data["entries"]: - # Populate keys which were introduced before version 1.2 + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and freezes the available keys + for entry in data["entries"]: + # Populate keys which were introduced before version 1.2 - pref_disable_new_entities = entry.get("pref_disable_new_entities") - if pref_disable_new_entities is None and "system_options" in entry: - pref_disable_new_entities = entry.get("system_options", {}).get( - "disable_new_entities" + pref_disable_new_entities = entry.get("pref_disable_new_entities") + if pref_disable_new_entities is None and "system_options" in entry: + pref_disable_new_entities = entry.get("system_options", {}).get( + "disable_new_entities" + ) + + entry.setdefault("disabled_by", entry.get("disabled_by")) + entry.setdefault("minor_version", entry.get("minor_version", 1)) + entry.setdefault("options", entry.get("options", {})) + entry.setdefault( + "pref_disable_new_entities", pref_disable_new_entities ) + entry.setdefault( + "pref_disable_polling", entry.get("pref_disable_polling") + ) + entry.setdefault("unique_id", entry.get("unique_id")) - entry.setdefault("disabled_by", entry.get("disabled_by")) - entry.setdefault("minor_version", entry.get("minor_version", 1)) - entry.setdefault("options", entry.get("options", {})) - entry.setdefault("pref_disable_new_entities", pref_disable_new_entities) - entry.setdefault( - "pref_disable_polling", entry.get("pref_disable_polling") - ) - entry.setdefault("unique_id", entry.get("unique_id")) + if old_minor_version < 3: + # Version 1.3 adds the created_at and modified_at fields + created_at = utc_from_timestamp(0).isoformat() + for entry in data["entries"]: + entry["created_at"] = entry["modified_at"] = created_at if old_major_version > 1: raise NotImplementedError @@ -1664,12 +1712,12 @@ class ConfigEntries: entries = self._entries.get_entries_for_domain(domain) if include_ignore and include_disabled: return bool(entries) - return any( - entry - for entry in entries - if (include_ignore or entry.source != SOURCE_IGNORE) - and (include_disabled or not entry.disabled_by) - ) + for entry in entries: + if (include_ignore or entry.source != SOURCE_IGNORE) and ( + include_disabled or not entry.disabled_by + ): + return True + return False @callback def async_entries( @@ -1744,7 +1792,7 @@ class ConfigEntries: if "flow_id" in progress_flow: self.hass.config_entries.flow.async_abort(progress_flow["flow_id"]) issue_id = f"config_entry_reauth_{entry.domain}_{entry.entry_id}" - ir.async_delete_issue(self.hass, HA_DOMAIN, issue_id) + ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) # After we have fully removed an "ignore" config entry we can try and rediscover # it so that a user is able to immediately start configuring it. We do this by @@ -1785,11 +1833,13 @@ class ConfigEntries: entry_id = entry["entry_id"] config_entry = ConfigEntry( + created_at=datetime.fromisoformat(entry["created_at"]), data=entry["data"], disabled_by=try_parse_enum(ConfigEntryDisabler, entry["disabled_by"]), domain=entry["domain"], entry_id=entry_id, minor_version=entry["minor_version"], + modified_at=datetime.fromisoformat(entry["modified_at"]), options=entry["options"], pref_disable_new_entities=entry["pref_disable_new_entities"], pref_disable_polling=entry["pref_disable_polling"], @@ -2006,6 +2056,8 @@ class ConfigEntries: if not changed: return False + _setter(entry, "modified_at", utcnow()) + for listener in entry.update_listeners: self.hass.async_create_task( listener(self.hass, entry), diff --git a/homeassistant/const.py b/homeassistant/const.py index 577e8df6f39..891cc0cc023 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -18,12 +18,12 @@ from .util.hass_dict import HassKey from .util.signal_type import SignalType if TYPE_CHECKING: - from .core import EventStateChangedData + from .core import EventStateChangedData, EventStateReportedData from .helpers.typing import NoEventData APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 7 +MINOR_VERSION: Final = 9 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" @@ -113,6 +113,7 @@ SUN_EVENT_SUNRISE: Final = "sunrise" # #### CONFIG #### CONF_ABOVE: Final = "above" CONF_ACCESS_TOKEN: Final = "access_token" +CONF_ACTION: Final = "action" CONF_ADDRESS: Final = "address" CONF_AFTER: Final = "after" CONF_ALIAS: Final = "alias" @@ -221,6 +222,7 @@ CONF_METHOD: Final = "method" CONF_MINIMUM: Final = "minimum" CONF_MODE: Final = "mode" CONF_MODEL: Final = "model" +CONF_MODEL_ID: Final = "model_id" CONF_MONITORED_CONDITIONS: Final = "monitored_conditions" CONF_MONITORED_VARIABLES: Final = "monitored_variables" CONF_NAME: Final = "name" @@ -321,7 +323,7 @@ EVENT_LOGGING_CHANGED: Final = "logging_changed" EVENT_SERVICE_REGISTERED: Final = "service_registered" EVENT_SERVICE_REMOVED: Final = "service_removed" EVENT_STATE_CHANGED: EventType[EventStateChangedData] = EventType("state_changed") -EVENT_STATE_REPORTED: Final = "state_reported" +EVENT_STATE_REPORTED: EventType[EventStateReportedData] = EventType("state_reported") EVENT_THEMES_UPDATED: Final = "themes_updated" EVENT_PANELS_UPDATED: Final = "panels_updated" EVENT_LOVELACE_UPDATED: Final = "lovelace_updated" @@ -564,6 +566,7 @@ ATTR_CONNECTIONS: Final = "connections" ATTR_DEFAULT_NAME: Final = "default_name" ATTR_MANUFACTURER: Final = "manufacturer" ATTR_MODEL: Final = "model" +ATTR_MODEL_ID: Final = "model_id" ATTR_SERIAL_NUMBER: Final = "serial_number" ATTR_SUGGESTED_AREA: Final = "suggested_area" ATTR_SW_VERSION: Final = "sw_version" diff --git a/homeassistant/core.py b/homeassistant/core.py index ac287fb2d5f..1050d25ee71 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -96,11 +96,12 @@ from .helpers.deprecation import ( dir_with_deprecated_constants, ) from .helpers.json import json_bytes, json_fragment -from .helpers.typing import UNDEFINED, UndefinedType +from .helpers.typing import UNDEFINED, UndefinedType, VolSchemaType from .util import dt as dt_util, location from .util.async_ import ( cancelling, create_eager_task, + get_scheduled_timer_handles, run_callback_threadsafe, shutdown_run_callback_threadsafe, ) @@ -158,14 +159,31 @@ class ConfigSource(enum.StrEnum): YAML = "yaml" -class EventStateChangedData(TypedDict): - """EventStateChanged data.""" +class EventStateEventData(TypedDict): + """Base class for EVENT_STATE_CHANGED and EVENT_STATE_REPORTED data.""" entity_id: str - old_state: State | None new_state: State | None +class EventStateChangedData(EventStateEventData): + """EVENT_STATE_CHANGED data. + + A state changed event is fired when on state write the state is changed. + """ + + old_state: State | None + + +class EventStateReportedData(EventStateEventData): + """EVENT_STATE_REPORTED data. + + A state reported event is fired when on state write the state is unchanged. + """ + + old_last_reported: datetime.datetime + + # SOURCE_* are deprecated as of Home Assistant 2022.2, use ConfigSource instead _DEPRECATED_SOURCE_DISCOVERED = DeprecatedConstantEnum( ConfigSource.DISCOVERED, "2025.1" @@ -1210,8 +1228,7 @@ class HomeAssistant: def _cancel_cancellable_timers(self) -> None: """Cancel timer handles marked as cancellable.""" - handles: Iterable[asyncio.TimerHandle] = self.loop._scheduled # type: ignore[attr-defined] # noqa: SLF001 - for handle in handles: + for handle in get_scheduled_timer_handles(self.loop): if ( not handle.cancelled() and (args := handle._args) # noqa: SLF001 @@ -1291,6 +1308,11 @@ class EventOrigin(enum.Enum): """Return the event.""" return self.value + @cached_property + def idx(self) -> int: + """Return the index of the origin.""" + return next((idx for idx, origin in enumerate(EventOrigin) if origin is self)) + class Event(Generic[_DataT]): """Representation of an event within the bus.""" @@ -1604,27 +1626,8 @@ class EventBus: raise HomeAssistantError( f"Event filter is required for event {event_type}" ) - # Special case for EVENT_STATE_REPORTED, we also want to listen to - # EVENT_STATE_CHANGED - self._listeners[EVENT_STATE_REPORTED].append(filterable_job) - self._listeners[EVENT_STATE_CHANGED].append(filterable_job) - return functools.partial( - self._async_remove_multiple_listeners, - (EVENT_STATE_REPORTED, EVENT_STATE_CHANGED), - filterable_job, - ) return self._async_listen_filterable_job(event_type, filterable_job) - @callback - def _async_remove_multiple_listeners( - self, - keys: Iterable[EventType[_DataT] | str], - filterable_job: _FilterableJobType[Any], - ) -> None: - """Remove multiple listeners for specific event_types.""" - for key in keys: - self._async_remove_listener(key, filterable_job) - @callback def _async_listen_filterable_job( self, @@ -2240,16 +2243,45 @@ class StateMachine: This method must be run in the event loop. """ - new_state = str(new_state) - attributes = attributes or {} - old_state = self._states_data.get(entity_id) - if old_state is None: - # If the state is missing, try to convert the entity_id to lowercase - # and try again. - entity_id = entity_id.lower() - old_state = self._states_data.get(entity_id) + self.async_set_internal( + entity_id.lower(), + str(new_state), + attributes or {}, + force_update, + context, + state_info, + timestamp or time.time(), + ) - if old_state is None: + @callback + def async_set_internal( + self, + entity_id: str, + new_state: str, + attributes: Mapping[str, Any] | None, + force_update: bool, + context: Context | None, + state_info: StateInfo | None, + timestamp: float, + ) -> None: + """Set the state of an entity, add entity if it does not exist. + + This method is intended to only be used by core internally + and should not be considered a stable API. We will make + breaking changes to this function in the future and it + should not be used in integrations. + + This method must be run in the event loop. + """ + # Most cases the key will be in the dict + # so we optimize for the happy path as + # python 3.11+ has near zero overhead for + # try when it does not raise an exception. + old_state: State | None + try: + old_state = self._states_data[entity_id] + except KeyError: + old_state = None same_state = False same_attr = False last_changed = None @@ -2269,16 +2301,18 @@ class StateMachine: # timestamp implementation: # https://github.com/python/cpython/blob/c90a862cdcf55dc1753c6466e5fa4a467a13ae24/Modules/_datetimemodule.c#L6387 # https://github.com/python/cpython/blob/c90a862cdcf55dc1753c6466e5fa4a467a13ae24/Modules/_datetimemodule.c#L6323 - if timestamp is None: - timestamp = time.time() now = dt_util.utc_from_timestamp(timestamp) + if context is None: + context = Context(id=ulid_at_time(timestamp)) + if same_state and same_attr: # mypy does not understand this is only possible if old_state is not None old_last_reported = old_state.last_reported # type: ignore[union-attr] old_state.last_reported = now # type: ignore[union-attr] old_state.last_reported_timestamp = timestamp # type: ignore[union-attr] - self._bus.async_fire_internal( + # Avoid creating an EventStateReportedData + self._bus.async_fire_internal( # type: ignore[misc] EVENT_STATE_REPORTED, { "entity_id": entity_id, @@ -2290,9 +2324,6 @@ class StateMachine: ) return - if context is None: - context = Context(id=ulid_at_time(timestamp)) - if same_attr: if TYPE_CHECKING: assert old_state is not None @@ -2355,7 +2386,7 @@ class Service: | EntityServiceResponse | None, ], - schema: vol.Schema | None, + schema: VolSchemaType | None, domain: str, service: str, context: Context | None = None, @@ -2503,7 +2534,7 @@ class ServiceRegistry: | EntityServiceResponse | None, ], - schema: vol.Schema | None = None, + schema: VolSchemaType | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, job_type: HassJobType | None = None, ) -> None: @@ -2530,7 +2561,7 @@ class ServiceRegistry: | EntityServiceResponse | None, ], - schema: vol.Schema | None = None, + schema: VolSchemaType | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, job_type: HassJobType | None = None, ) -> None: diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index de45702ad95..b8e8f269b82 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations import abc import asyncio from collections import defaultdict -from collections.abc import Callable, Container, Iterable, Mapping +from collections.abc import Callable, Container, Hashable, Iterable, Mapping from contextlib import suppress import copy from dataclasses import dataclass @@ -13,7 +13,7 @@ from enum import StrEnum from functools import partial import logging from types import MappingProxyType -from typing import Any, Generic, Required, TypedDict +from typing import Any, Generic, Required, TypedDict, cast from typing_extensions import TypeVar import voluptuous as vol @@ -46,7 +46,7 @@ class FlowResultType(StrEnum): MENU = "menu" -# RESULT_TYPE_* is deprecated, to be removed in 2022.9 +# RESULT_TYPE_* is deprecated, to be removed in 2025.1 _DEPRECATED_RESULT_TYPE_FORM = DeprecatedConstantEnum(FlowResultType.FORM, "2025.1") _DEPRECATED_RESULT_TYPE_CREATE_ENTRY = DeprecatedConstantEnum( FlowResultType.CREATE_ENTRY, "2025.1" @@ -112,15 +112,13 @@ class UnknownStep(FlowError): """Unknown step specified.""" -# ignore misc is required as vol.Invalid is not typed -# mypy error: Class cannot subclass "Invalid" (has type "Any") -class InvalidData(vol.Invalid): # type: ignore[misc] +class InvalidData(vol.Invalid): """Invalid data provided.""" def __init__( self, message: str, - path: list[str | vol.Marker] | None, + path: list[Hashable] | None, error_message: str | None, schema_errors: dict[str, Any], **kwargs: Any, @@ -384,8 +382,9 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): if ( data_schema := cur_step.get("data_schema") ) is not None and user_input is not None: + data_schema = cast(vol.Schema, data_schema) try: - user_input = data_schema(user_input) # type: ignore[operator] + user_input = data_schema(user_input) except vol.Invalid as ex: raised_errors = [ex] if isinstance(ex, vol.MultipleInvalid): @@ -533,7 +532,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): report( ( "does not use FlowResultType enum for data entry flow result type. " - "This is deprecated and will stop working in Home Assistant 2022.9" + "This is deprecated and will stop working in Home Assistant 2025.1" ), error_if_core=False, ) @@ -694,7 +693,7 @@ class FlowHandler(Generic[_FlowResultT, _HandlerT]): ): # Copy the marker to not modify the flow schema new_key = copy.copy(key) - new_key.description = {"suggested_value": suggested_values[key]} + new_key.description = {"suggested_value": suggested_values[key.schema]} schema[new_key] = val return vol.Schema(schema) @@ -906,6 +905,33 @@ class FlowHandler(Generic[_FlowResultT, _HandlerT]): self.__progress_task = progress_task +class SectionConfig(TypedDict, total=False): + """Class to represent a section config.""" + + collapsed: bool + + +class section: + """Data entry flow section.""" + + CONFIG_SCHEMA = vol.Schema( + { + vol.Optional("collapsed", default=False): bool, + }, + ) + + def __init__( + self, schema: vol.Schema, options: SectionConfig | None = None + ) -> None: + """Initialize.""" + self.schema = schema + self.options: SectionConfig = self.CONFIG_SCHEMA(options or {}) + + def __call__(self, value: Any) -> Any: + """Validate input.""" + return self.schema(value) + + # These can be removed if no deprecated constant are in this module anymore __getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) __dir__ = partial( diff --git a/homeassistant/exceptions.py b/homeassistant/exceptions.py index 01e22d16e79..f308cbc5cd8 100644 --- a/homeassistant/exceptions.py +++ b/homeassistant/exceptions.py @@ -2,12 +2,10 @@ from __future__ import annotations -from collections.abc import Callable, Sequence +from collections.abc import Callable, Generator, Sequence from dataclasses import dataclass from typing import TYPE_CHECKING, Any -from typing_extensions import Generator - from .util.event_type import EventType if TYPE_CHECKING: diff --git a/homeassistant/generated/application_credentials.py b/homeassistant/generated/application_credentials.py index bc6b29e4c23..dc30f9d76f0 100644 --- a/homeassistant/generated/application_credentials.py +++ b/homeassistant/generated/application_credentials.py @@ -4,7 +4,6 @@ To update, run python3 -m script.hassfest """ APPLICATION_CREDENTIALS = [ - "aladdin_connect", "electric_kiwi", "fitbit", "geocaching", @@ -15,6 +14,7 @@ APPLICATION_CREDENTIALS = [ "google_tasks", "home_connect", "husqvarna_automower", + "iotty", "lametric", "lyric", "microbees", @@ -25,6 +25,7 @@ APPLICATION_CREDENTIALS = [ "netatmo", "senz", "spotify", + "tesla_fleet", "twitch", "withings", "xbox", diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index 17461225851..2ea604a91a2 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -137,6 +137,41 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "govee_ble", "local_name": "B5178*", }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5121*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5122*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5123*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5124*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5125*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5126*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GVH5127*", + }, { "connectable": False, "domain": "govee_ble", @@ -221,6 +256,22 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "manufacturer_id": 19506, "service_uuid": "00001801-0000-1000-8000-00805f9b34fb", }, + { + "connectable": False, + "domain": "govee_ble", + "manufacturer_id": 61320, + }, + { + "connectable": False, + "domain": "govee_ble", + "manufacturer_data_start": [ + 236, + 0, + 0, + 1, + ], + "manufacturer_id": 34819, + }, { "domain": "homekit_controller", "manufacturer_data_start": [ @@ -270,6 +321,11 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "inkbird", "local_name": "tps", }, + { + "connectable": True, + "domain": "iron_os", + "service_uuid": "9eae1000-9d0d-48c5-aa55-33e27f9bc533", + }, { "connectable": False, "domain": "kegtron", @@ -624,6 +680,15 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "manufacturer_id": 27, "service_uuid": "0000fff0-0000-1000-8000-00805f9b34fb", }, + { + "connectable": False, + "domain": "thermobeacon", + "manufacturer_data_start": [ + 0, + ], + "manufacturer_id": 48, + "service_uuid": "0000fff0-0000-1000-8000-00805f9b34fb", + }, { "connectable": False, "domain": "thermobeacon", diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index cf6e2bb4fa7..c3fe4af4a76 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -6,11 +6,14 @@ To update, run python3 -m script.hassfest FLOWS = { "helper": [ "derivative", + "generic_hygrostat", "generic_thermostat", "group", + "history_stats", "integration", "min_max", "random", + "statistics", "switch_as_x", "template", "threshold", @@ -40,7 +43,6 @@ FLOWS = { "airvisual_pro", "airzone", "airzone_cloud", - "aladdin_connect", "alarmdecoder", "amberelectric", "ambient_network", @@ -51,6 +53,7 @@ FLOWS = { "androidtv_remote", "anova", "anthemav", + "anthropic", "aosmith", "apcupsd", "apple_tv", @@ -67,6 +70,7 @@ FLOWS = { "aurora", "aurora_abb_powerone", "aussie_broadband", + "autarco", "awair", "axis", "azure_data_explorer", @@ -79,6 +83,7 @@ FLOWS = { "blink", "blue_current", "bluemaestro", + "bluesound", "bluetooth", "bmw_connected_drive", "bond", @@ -89,6 +94,7 @@ FLOWS = { "brother", "brottsplatskartan", "brunt", + "bryant_evolution", "bsblan", "bthome", "buienradar", @@ -97,6 +103,7 @@ FLOWS = { "cast", "ccm15", "cert_expiry", + "chacon_dio", "cloudflare", "co2signal", "coinbase", @@ -144,6 +151,7 @@ FLOWS = { "efergy", "electrasmart", "electric_kiwi", + "elevenlabs", "elgato", "elkm1", "elmax", @@ -198,6 +206,7 @@ FLOWS = { "gardena_bluetooth", "gdacs", "generic", + "geniushub", "geo_json_events", "geocaching", "geofency", @@ -265,10 +274,13 @@ FLOWS = { "intellifire", "ios", "iotawatt", + "iotty", "ipma", "ipp", "iqvia", + "iron_os", "islamic_prayer_times", + "israel_rail", "iss", "ista_ecotrend", "isy994", @@ -305,6 +317,7 @@ FLOWS = { "lidarr", "lifx", "linear_garage_door", + "linkplay", "litejet", "litterrobot", "livisi", @@ -312,7 +325,6 @@ FLOWS = { "local_ip", "local_todo", "locative", - "logi_circle", "lookin", "loqed", "luftdaten", @@ -320,7 +332,9 @@ FLOWS = { "lutron", "lutron_caseta", "lyric", + "madvr", "mailgun", + "mastodon", "matter", "mealie", "meater", @@ -351,6 +365,7 @@ FLOWS = { "motionblinds_ble", "motioneye", "motionmount", + "mpd", "mqtt", "mullvad", "mutesync", @@ -370,6 +385,7 @@ FLOWS = { "nextdns", "nfandroidtv", "nibe_heatpump", + "nice_go", "nightscout", "nina", "nmap_tracker", @@ -435,6 +451,7 @@ FLOWS = { "pushover", "pvoutput", "pvpc_hourly_pricing", + "pyload", "qbittorrent", "qingping", "qnap", @@ -470,6 +487,7 @@ FLOWS = { "rpi_power", "rtsp_to_webrtc", "ruckus_unleashed", + "russound_rio", "ruuvi_gateway", "ruuvitag_ble", "rympro", @@ -493,6 +511,7 @@ FLOWS = { "shelly", "shopping_list", "sia", + "simplefin", "simplepush", "simplisafe", "skybell", @@ -555,6 +574,7 @@ FLOWS = { "technove", "tedee", "tellduslive", + "tesla_fleet", "tesla_wall_connector", "teslemetry", "tessie", @@ -613,6 +633,7 @@ FLOWS = { "volumio", "volvooncall", "vulcan", + "wake_on_lan", "wallbox", "waqi", "watttime", @@ -632,6 +653,7 @@ FLOWS = { "wled", "wolflink", "workday", + "worldclock", "ws66i", "wyoming", "xbox", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 3b5fe9843f2..f6df799d01e 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -650,6 +650,11 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "k[lps]*", "macaddress": "5091E3*", }, + { + "domain": "tplink", + "hostname": "p1*", + "macaddress": "5091E3*", + }, { "domain": "tplink", "hostname": "k[lps]*", @@ -822,7 +827,7 @@ DHCP: Final[list[dict[str, str | bool]]] = [ }, { "domain": "tplink", - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5CE931*", }, { @@ -832,9 +837,14 @@ DHCP: Final[list[dict[str, str | bool]]] = [ }, { "domain": "tplink", - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5C628B*", }, + { + "domain": "tplink", + "hostname": "l[59]*", + "macaddress": "14EBB6*", + }, { "domain": "tplink", "hostname": "tp*", @@ -870,16 +880,31 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "s5*", "macaddress": "3C52A1*", }, + { + "domain": "tplink", + "hostname": "h1*", + "macaddress": "3C52A1*", + }, { "domain": "tplink", "hostname": "l9*", "macaddress": "A842A1*", }, + { + "domain": "tplink", + "hostname": "p1*", + "macaddress": "A842A1*", + }, { "domain": "tplink", "hostname": "l9*", "macaddress": "3460F9*", }, + { + "domain": "tplink", + "hostname": "p1*", + "macaddress": "3460F9*", + }, { "domain": "tplink", "hostname": "hs*", @@ -890,6 +915,11 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "k[lps]*", "macaddress": "74DA88*", }, + { + "domain": "tplink", + "hostname": "p1*", + "macaddress": "74DA88*", + }, { "domain": "tplink", "hostname": "p3*", @@ -930,6 +960,16 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "l9*", "macaddress": "F0A731*", }, + { + "domain": "tplink", + "hostname": "ks2*", + "macaddress": "F0A731*", + }, + { + "domain": "tplink", + "hostname": "kh1*", + "macaddress": "F0A731*", + }, { "domain": "tuya", "macaddress": "105A17*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index bbf96e4461b..7df27aa5e68 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -180,12 +180,6 @@ } } }, - "aladdin_connect": { - "name": "Aladdin Connect", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "alarmdecoder": { "name": "AlarmDecoder", "integration_type": "device", @@ -206,12 +200,6 @@ "amazon": { "name": "Amazon", "integrations": { - "alexa": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push", - "name": "Amazon Alexa" - }, "amazon_polly": { "integration_type": "hub", "config_flow": false, @@ -321,6 +309,12 @@ "config_flow": true, "iot_class": "local_push" }, + "anthropic": { + "name": "Anthropic Conversation", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "anwb_energie": { "name": "ANWB Energie", "integration_type": "virtual", @@ -402,7 +396,7 @@ "iot_class": "cloud_push" }, "aprilaire": { - "name": "Aprilaire", + "name": "AprilAire", "integration_type": "device", "config_flow": true, "iot_class": "local_push" @@ -461,6 +455,11 @@ "config_flow": false, "iot_class": "local_polling" }, + "artsound": { + "name": "ArtSound", + "integration_type": "virtual", + "supported_by": "linkplay" + }, "aruba": { "name": "Aruba", "integrations": { @@ -496,29 +495,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "assist_pipeline": { - "name": "Assist pipeline", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push" - }, - "asterisk": { - "name": "Asterisk", - "integrations": { - "asterisk_cdr": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling", - "name": "Asterisk Call Detail Records" - }, - "asterisk_mbox": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push", - "name": "Asterisk Voicemail" - } - } - }, "asuswrt": { "name": "ASUSWRT", "integration_type": "hub", @@ -587,6 +563,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "autarco": { + "name": "Autarco", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "avion": { "name": "Avi-on", "integration_type": "hub", @@ -635,12 +617,6 @@ "config_flow": true, "iot_class": "local_push" }, - "bayesian": { - "name": "Bayesian", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "bbox": { "name": "Bbox", "integration_type": "hub", @@ -731,7 +707,7 @@ "bluesound": { "name": "Bluesound", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "bluetooth": { @@ -816,6 +792,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "bryant_evolution": { + "name": "Bryant Evolution", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "bsblan": { "name": "BSB-Lan", "integration_type": "device", @@ -884,6 +866,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "chacon_dio": { + "name": "Chacon DiO", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push" + }, "channels": { "name": "Channels", "integration_type": "hub", @@ -1345,7 +1333,7 @@ "iot_class": "local_push" }, "dsmr": { - "name": "DSMR Slimme Meter", + "name": "DSMR Smart Meter", "integration_type": "hub", "config_flow": true, "iot_class": "local_push" @@ -1510,6 +1498,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "elevenlabs": { + "name": "ElevenLabs", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "elgato": { "name": "Elgato", "integrations": { @@ -1798,11 +1792,6 @@ "ffmpeg": { "name": "FFmpeg", "integrations": { - "ffmpeg": { - "integration_type": "hub", - "config_flow": false, - "name": "FFmpeg" - }, "ffmpeg_motion": { "integration_type": "hub", "config_flow": false, @@ -1840,12 +1829,6 @@ "config_flow": true, "iot_class": "local_polling" }, - "filter": { - "name": "Filter", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push" - }, "fints": { "name": "FinTS", "integration_type": "service", @@ -2127,16 +2110,10 @@ "config_flow": true, "iot_class": "local_push" }, - "generic_hygrostat": { - "name": "Generic hygrostat", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "geniushub": { "name": "Genius Hub", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "geo_json_events": { @@ -2248,12 +2225,6 @@ "google": { "name": "Google", "integrations": { - "google_assistant": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push", - "name": "Google Assistant" - }, "google_assistant_sdk": { "integration_type": "service", "config_flow": true, @@ -2530,12 +2501,6 @@ "config_flow": true, "iot_class": "local_polling" }, - "history_stats": { - "name": "History Stats", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "hitron_coda": { "name": "Rogers Hitron CODA", "integration_type": "hub", @@ -2875,6 +2840,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "iotty": { + "name": "iotty", + "integration_type": "device", + "config_flow": true, + "iot_class": "cloud_polling" + }, "iperf3": { "name": "Iperf3", "integration_type": "hub", @@ -2905,6 +2876,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "iron_os": { + "name": "IronOS", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "islamic_prayer_times": { "integration_type": "hub", "config_flow": true, @@ -2915,6 +2892,12 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, + "israel_rail": { + "name": "Israel Railways", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "iss": { "name": "International Space Station (ISS)", "integration_type": "service", @@ -3274,6 +3257,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "linkplay": { + "name": "LinkPlay", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "linksys_smart": { "name": "Linksys Smart Wi-Fi", "integration_type": "hub", @@ -3355,12 +3344,6 @@ "config_flow": false, "iot_class": "cloud_push" }, - "logi_circle": { - "name": "Logi Circle", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "logitech": { "name": "Logitech", "integrations": { @@ -3370,12 +3353,6 @@ "iot_class": "local_push", "name": "Logitech Harmony Hub" }, - "ue_smart_radio": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_polling", - "name": "Logitech UE Smart Radio" - }, "squeezebox": { "integration_type": "hub", "config_flow": true, @@ -3459,18 +3436,18 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, + "madvr": { + "name": "madVR Envy", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_push" + }, "mailgun": { "name": "Mailgun", "integration_type": "hub", "config_flow": true, "iot_class": "cloud_push" }, - "manual": { - "name": "Manual Alarm Control Panel", - "integration_type": "hub", - "config_flow": false, - "iot_class": "calculated" - }, "marantz": { "name": "Marantz", "integration_type": "virtual", @@ -3489,8 +3466,8 @@ }, "mastodon": { "name": "Mastodon", - "integration_type": "hub", - "config_flow": false, + "integration_type": "service", + "config_flow": true, "iot_class": "cloud_push" }, "matrix": { @@ -3571,6 +3548,11 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "mercury_nz": { + "name": "Mercury NZ Limited", + "integration_type": "virtual", + "supported_by": "opower" + }, "message_bird": { "name": "MessageBird", "integration_type": "hub", @@ -3707,6 +3689,11 @@ "config_flow": true, "iot_class": "local_polling" }, + "mini_connected": { + "name": "MINI Connected", + "integration_type": "virtual", + "supported_by": "bmw_connected_drive" + }, "minio": { "name": "Minio", "integration_type": "hub", @@ -3826,7 +3813,7 @@ "mpd": { "name": "Music Player Daemon (MPD)", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "mqtt": { @@ -4042,6 +4029,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "nice_go": { + "name": "Nice G.O.", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push" + }, "nightscout": { "name": "Nightscout", "integration_type": "hub", @@ -4249,7 +4242,7 @@ "name": "Onkyo", "integration_type": "hub", "config_flow": false, - "iot_class": "local_polling" + "iot_class": "local_push" }, "onvif": { "name": "ONVIF", @@ -4577,6 +4570,11 @@ "config_flow": false, "iot_class": "local_push" }, + "pinecil": { + "name": "Pinecil", + "integration_type": "virtual", + "supported_by": "iron_os" + }, "ping": { "name": "Ping (ICMP)", "integration_type": "hub", @@ -4781,7 +4779,7 @@ "pyload": { "name": "pyLoad", "integration_type": "service", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "python_script": { @@ -5159,7 +5157,7 @@ "integrations": { "russound_rio": { "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_push", "name": "Russound RIO" }, @@ -5433,6 +5431,12 @@ "config_flow": false, "iot_class": "cloud_push" }, + "simplefin": { + "name": "SimpleFin", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "simplepush": { "name": "Simplepush", "integration_type": "hub", @@ -5778,12 +5782,6 @@ "config_flow": false, "iot_class": "cloud_polling" }, - "statistics": { - "name": "Statistics", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "statsd": { "name": "StatsD", "integration_type": "hub", @@ -5980,10 +5978,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "tag": { - "integration_type": "hub", - "config_flow": false - }, "tailscale": { "name": "Tailscale", "integration_type": "hub", @@ -6122,6 +6116,12 @@ "config_flow": true, "iot_class": "local_polling", "name": "Tesla Wall Connector" + }, + "tesla_fleet": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "name": "Tesla Fleet" } } }, @@ -6732,7 +6732,7 @@ "wake_on_lan": { "name": "Wake on LAN", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_push" }, "wallbox": { @@ -6781,11 +6781,6 @@ } } }, - "webhook": { - "name": "Webhook", - "integration_type": "hub", - "config_flow": false - }, "webmin": { "name": "Webmin", "integration_type": "device", @@ -6865,7 +6860,7 @@ "worldclock": { "name": "Worldclock", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_push" }, "worldtidesinfo": { @@ -7151,6 +7146,12 @@ } }, "helper": { + "bayesian": { + "name": "Bayesian", + "integration_type": "helper", + "config_flow": false, + "iot_class": "local_polling" + }, "counter": { "integration_type": "helper", "config_flow": false @@ -7160,6 +7161,17 @@ "config_flow": true, "iot_class": "calculated" }, + "filter": { + "name": "Filter", + "integration_type": "helper", + "config_flow": false, + "iot_class": "local_push" + }, + "generic_hygrostat": { + "integration_type": "helper", + "config_flow": true, + "iot_class": "local_polling" + }, "generic_thermostat": { "integration_type": "helper", "config_flow": true, @@ -7170,6 +7182,12 @@ "config_flow": true, "iot_class": "calculated" }, + "history_stats": { + "name": "History Stats", + "integration_type": "helper", + "config_flow": true, + "iot_class": "local_polling" + }, "input_boolean": { "integration_type": "helper", "config_flow": false @@ -7199,6 +7217,12 @@ "config_flow": true, "iot_class": "local_push" }, + "manual": { + "name": "Manual Alarm Control Panel", + "integration_type": "helper", + "config_flow": false, + "iot_class": "calculated" + }, "min_max": { "integration_type": "helper", "config_flow": true, @@ -7214,6 +7238,12 @@ "integration_type": "helper", "config_flow": false }, + "statistics": { + "name": "Statistics", + "integration_type": "helper", + "config_flow": true, + "iot_class": "local_polling" + }, "switch_as_x": { "integration_type": "helper", "config_flow": true, @@ -7265,6 +7295,7 @@ "filesize", "garages_amsterdam", "generic", + "generic_hygrostat", "generic_thermostat", "google_travel_time", "group", @@ -7296,7 +7327,6 @@ "shopping_list", "sun", "switch_as_x", - "tag", "threshold", "time_date", "tod", diff --git a/homeassistant/generated/languages.py b/homeassistant/generated/languages.py index feedd373fd9..78105c76f4c 100644 --- a/homeassistant/generated/languages.py +++ b/homeassistant/generated/languages.py @@ -44,6 +44,7 @@ LANGUAGES = { "lb", "lt", "lv", + "mk", "ml", "nb", "nl", diff --git a/homeassistant/generated/ssdp.py b/homeassistant/generated/ssdp.py index 8e7319917f0..9ed65bab868 100644 --- a/homeassistant/generated/ssdp.py +++ b/homeassistant/generated/ssdp.py @@ -297,6 +297,10 @@ SSDP = { "manufacturer": "Ubiquiti Networks", "modelDescription": "UniFi Dream Machine SE", }, + { + "manufacturer": "Ubiquiti Networks", + "modelDescription": "UniFi Dream Machine Pro Max", + }, ], "unifiprotect": [ { @@ -311,6 +315,10 @@ SSDP = { "manufacturer": "Ubiquiti Networks", "modelDescription": "UniFi Dream Machine SE", }, + { + "manufacturer": "Ubiquiti Networks", + "modelDescription": "UniFi Dream Machine Pro Max", + }, ], "upnp": [ { diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 8efe49b7892..7cd60da2d0e 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -589,6 +589,11 @@ ZEROCONF = { "name": "gateway*", }, ], + "_linkplay._tcp.local.": [ + { + "domain": "linkplay", + }, + ], "_lookin._tcp.local.": [ { "domain": "lookin", @@ -646,6 +651,11 @@ ZEROCONF = { "name": "yeelink-*", }, ], + "_musc._tcp.local.": [ + { + "domain": "bluesound", + }, + ], "_nanoleafapi._tcp.local.": [ { "domain": "nanoleaf", diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py index 5c4ead4e611..d61f889d4b5 100644 --- a/homeassistant/helpers/aiohttp_client.py +++ b/homeassistant/helpers/aiohttp_client.py @@ -5,6 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Awaitable, Callable from contextlib import suppress +import socket from ssl import SSLContext import sys from types import MappingProxyType @@ -13,6 +14,7 @@ from typing import TYPE_CHECKING, Any import aiohttp from aiohttp import web from aiohttp.hdrs import CONTENT_TYPE, USER_AGENT +from aiohttp.resolver import AsyncResolver from aiohttp.web_exceptions import HTTPBadGateway, HTTPGatewayTimeout from homeassistant import config_entries @@ -23,7 +25,6 @@ from homeassistant.util import ssl as ssl_util from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import json_loads -from .backports.aiohttp_resolver import AsyncResolver from .frame import warn_use from .json import json_dumps @@ -82,7 +83,9 @@ class HassClientResponse(aiohttp.ClientResponse): @callback @bind_hass def async_get_clientsession( - hass: HomeAssistant, verify_ssl: bool = True, family: int = 0 + hass: HomeAssistant, + verify_ssl: bool = True, + family: socket.AddressFamily = socket.AF_UNSPEC, ) -> aiohttp.ClientSession: """Return default aiohttp ClientSession. @@ -111,7 +114,7 @@ def async_create_clientsession( hass: HomeAssistant, verify_ssl: bool = True, auto_cleanup: bool = True, - family: int = 0, + family: socket.AddressFamily = socket.AF_UNSPEC, **kwargs: Any, ) -> aiohttp.ClientSession: """Create a new ClientSession with kwargs, i.e. for cookies. @@ -142,7 +145,7 @@ def _async_create_clientsession( verify_ssl: bool = True, auto_cleanup_method: Callable[[HomeAssistant, aiohttp.ClientSession], None] | None = None, - family: int = 0, + family: socket.AddressFamily = socket.AF_UNSPEC, **kwargs: Any, ) -> aiohttp.ClientSession: """Create a new ClientSession with kwargs, i.e. for cookies.""" @@ -275,14 +278,33 @@ def _async_register_default_clientsession_shutdown( @callback -def _make_key(verify_ssl: bool = True, family: int = 0) -> tuple[bool, int]: +def _make_key( + verify_ssl: bool = True, family: socket.AddressFamily = socket.AF_UNSPEC +) -> tuple[bool, socket.AddressFamily]: """Make a key for connector or session pool.""" return (verify_ssl, family) +class HomeAssistantTCPConnector(aiohttp.TCPConnector): + """Home Assistant TCP Connector. + + Same as aiohttp.TCPConnector but with a longer cleanup_closed timeout. + + By default the cleanup_closed timeout is 2 seconds. This is too short + for Home Assistant since we churn through a lot of connections. We set + it to 60 seconds to reduce the overhead of aborting TLS connections + that are likely already closed. + """ + + # abort transport after 60 seconds (cleanup broken connections) + _cleanup_closed_period = 60.0 + + @callback def _async_get_connector( - hass: HomeAssistant, verify_ssl: bool = True, family: int = 0 + hass: HomeAssistant, + verify_ssl: bool = True, + family: socket.AddressFamily = socket.AF_UNSPEC, ) -> aiohttp.BaseConnector: """Return the connector pool for aiohttp. @@ -299,7 +321,7 @@ def _async_get_connector( else: ssl_context = ssl_util.get_default_no_verify_context() - connector = aiohttp.TCPConnector( + connector = HomeAssistantTCPConnector( family=family, enable_cleanup_closed=ENABLE_CLEANUP_CLOSED, ssl=ssl_context, diff --git a/homeassistant/helpers/area_registry.py b/homeassistant/helpers/area_registry.py index 975750ebbdd..3e101f185ed 100644 --- a/homeassistant/helpers/area_registry.py +++ b/homeassistant/helpers/area_registry.py @@ -5,11 +5,13 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Iterable import dataclasses +from datetime import datetime from functools import cached_property from typing import Any, Literal, TypedDict from homeassistant.core import HomeAssistant, callback from homeassistant.util import slugify +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -31,7 +33,7 @@ EVENT_AREA_REGISTRY_UPDATED: EventType[EventAreaRegistryUpdatedData] = EventType ) STORAGE_KEY = "core.area_registry" STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 6 +STORAGE_VERSION_MINOR = 7 class _AreaStoreData(TypedDict): @@ -44,6 +46,8 @@ class _AreaStoreData(TypedDict): labels: list[str] name: str picture: str | None + created_at: str + modified_at: str class AreasRegistryStoreData(TypedDict): @@ -83,6 +87,8 @@ class AreaEntry(NormalizedNameBaseRegistryEntry): "labels": list(self.labels), "name": self.name, "picture": self.picture, + "created_at": self.created_at.timestamp(), + "modified_at": self.modified_at.timestamp(), } ) ) @@ -125,6 +131,12 @@ class AreaRegistryStore(Store[AreasRegistryStoreData]): for area in old_data["areas"]: area["labels"] = [] + if old_minor_version < 7: + # Version 1.7 adds created_at and modiefied_at + created_at = utc_from_timestamp(0).isoformat() + for area in old_data["areas"]: + area["created_at"] = area["modified_at"] = created_at + if old_major_version > 1: raise NotImplementedError return old_data # type: ignore[return-value] @@ -315,17 +327,17 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): """Update name of area.""" old = self.areas[area_id] - new_values = {} - - for attr_name, value in ( - ("aliases", aliases), - ("icon", icon), - ("labels", labels), - ("picture", picture), - ("floor_id", floor_id), - ): - if value is not UNDEFINED and value != getattr(old, attr_name): - new_values[attr_name] = value + new_values: dict[str, Any] = { + attr_name: value + for attr_name, value in ( + ("aliases", aliases), + ("icon", icon), + ("labels", labels), + ("picture", picture), + ("floor_id", floor_id), + ) + if value is not UNDEFINED and value != getattr(old, attr_name) + } if name is not UNDEFINED and name != old.name: new_values["name"] = name @@ -334,8 +346,10 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): if not new_values: return old + new_values["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("area_registry.async_update") - new = self.areas[area_id] = dataclasses.replace(old, **new_values) # type: ignore[arg-type] + new = self.areas[area_id] = dataclasses.replace(old, **new_values) self.async_schedule_save() return new @@ -361,6 +375,8 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): name=area["name"], normalized_name=normalized_name, picture=area["picture"], + created_at=datetime.fromisoformat(area["created_at"]), + modified_at=datetime.fromisoformat(area["modified_at"]), ) self.areas = areas @@ -379,6 +395,8 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): "labels": list(entry.labels), "name": entry.name, "picture": entry.picture, + "created_at": entry.created_at.isoformat(), + "modified_at": entry.modified_at.isoformat(), } for entry in self.areas.values() ] diff --git a/homeassistant/helpers/backports/__init__.py b/homeassistant/helpers/backports/__init__.py deleted file mode 100644 index e672fe1d3d2..00000000000 --- a/homeassistant/helpers/backports/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Backports for helpers.""" diff --git a/homeassistant/helpers/backports/aiohttp_resolver.py b/homeassistant/helpers/backports/aiohttp_resolver.py deleted file mode 100644 index efa4ba4bb85..00000000000 --- a/homeassistant/helpers/backports/aiohttp_resolver.py +++ /dev/null @@ -1,116 +0,0 @@ -"""Backport of aiohttp's AsyncResolver for Home Assistant. - -This is a backport of the AsyncResolver class from aiohttp 3.10. - -Before aiohttp 3.10, on system with IPv6 support, AsyncResolver would not fallback -to providing A records when AAAA records were not available. - -Additionally, unlike the ThreadedResolver, AsyncResolver -did not handle link-local addresses correctly. -""" - -from __future__ import annotations - -import asyncio -import socket -import sys -from typing import Any, TypedDict - -import aiodns -from aiohttp.abc import AbstractResolver - -# This is a backport of https://github.com/aio-libs/aiohttp/pull/8270 -# This can be removed once aiohttp 3.10 is the minimum supported version. - -_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV -_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) - - -class ResolveResult(TypedDict): - """Resolve result. - - This is the result returned from an AbstractResolver's - resolve method. - - :param hostname: The hostname that was provided. - :param host: The IP address that was resolved. - :param port: The port that was resolved. - :param family: The address family that was resolved. - :param proto: The protocol that was resolved. - :param flags: The flags that were resolved. - """ - - hostname: str - host: str - port: int - family: int - proto: int - flags: int - - -class AsyncResolver(AbstractResolver): - """Use the `aiodns` package to make asynchronous DNS lookups.""" - - def __init__(self, *args: Any, **kwargs: Any) -> None: - """Initialize the resolver.""" - if aiodns is None: - raise RuntimeError("Resolver requires aiodns library") - - self._loop = asyncio.get_running_loop() - self._resolver = aiodns.DNSResolver(*args, loop=self._loop, **kwargs) # type: ignore[misc] - - async def resolve( # type: ignore[override] - self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> list[ResolveResult]: - """Resolve a host name to an IP address.""" - try: - resp = await self._resolver.getaddrinfo( - host, - port=port, - type=socket.SOCK_STREAM, - family=family, # type: ignore[arg-type] - flags=socket.AI_ADDRCONFIG, - ) - except aiodns.error.DNSError as exc: - msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc - hosts: list[ResolveResult] = [] - for node in resp.nodes: - address: tuple[bytes, int] | tuple[bytes, int, int, int] = node.addr - family = node.family - if family == socket.AF_INET6: - if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID: - # This is essential for link-local IPv6 addresses. - # LL IPv6 is a VERY rare case. Strictly speaking, we should use - # getnameinfo() unconditionally, but performance makes sense. - result = await self._resolver.getnameinfo( - (address[0].decode("ascii"), *address[1:]), - _NUMERIC_SOCKET_FLAGS, - ) - resolved_host = result.node - else: - resolved_host = address[0].decode("ascii") - port = address[1] - else: # IPv4 - assert family == socket.AF_INET - resolved_host = address[0].decode("ascii") - port = address[1] - hosts.append( - ResolveResult( - hostname=host, - host=resolved_host, - port=port, - family=family, - proto=0, - flags=_NUMERIC_SOCKET_FLAGS, - ) - ) - - if not hosts: - raise OSError("DNS lookup failed") - - return hosts - - async def close(self) -> None: - """Close the resolver.""" - self._resolver.cancel() diff --git a/homeassistant/helpers/category_registry.py b/homeassistant/helpers/category_registry.py index 6498859e2ab..41fa82084b3 100644 --- a/homeassistant/helpers/category_registry.py +++ b/homeassistant/helpers/category_registry.py @@ -5,9 +5,11 @@ from __future__ import annotations from collections.abc import Iterable import dataclasses from dataclasses import dataclass, field -from typing import Literal, TypedDict +from datetime import datetime +from typing import Any, Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from homeassistant.util.ulid import ulid_now @@ -23,13 +25,16 @@ EVENT_CATEGORY_REGISTRY_UPDATED: EventType[EventCategoryRegistryUpdatedData] = ( ) STORAGE_KEY = "core.category_registry" STORAGE_VERSION_MAJOR = 1 +STORAGE_VERSION_MINOR = 2 class _CategoryStoreData(TypedDict): """Data type for individual category. Used in CategoryRegistryStoreData.""" category_id: str + created_at: str icon: str | None + modified_at: str name: str @@ -55,10 +60,36 @@ class CategoryEntry: """Category registry entry.""" category_id: str = field(default_factory=ulid_now) + created_at: datetime = field(default_factory=utcnow) icon: str | None = None + modified_at: datetime = field(default_factory=utcnow) name: str +class CategoryRegistryStore(Store[CategoryRegistryStoreData]): + """Store category registry data.""" + + async def _async_migrate_func( + self, + old_major_version: int, + old_minor_version: int, + old_data: dict[str, dict[str, list[dict[str, Any]]]], + ) -> CategoryRegistryStoreData: + """Migrate to the new version.""" + if old_major_version > STORAGE_VERSION_MAJOR: + raise ValueError("Can't migrate to future version") + + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and adds created_at and modified_at + created_at = utc_from_timestamp(0).isoformat() + for categories in old_data["categories"].values(): + for category in categories: + category["created_at"] = category["modified_at"] = created_at + + return old_data # type: ignore[return-value] + + class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): """Class to hold a registry of categories by scope.""" @@ -66,11 +97,12 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): """Initialize the category registry.""" self.hass = hass self.categories: dict[str, dict[str, CategoryEntry]] = {} - self._store = Store( + self._store = CategoryRegistryStore( hass, STORAGE_VERSION_MAJOR, STORAGE_KEY, atomic_writes=True, + minor_version=STORAGE_VERSION_MINOR, ) @callback @@ -145,7 +177,7 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): ) -> CategoryEntry: """Update name or icon of the category.""" old = self.categories[scope][category_id] - changes = {} + changes: dict[str, Any] = {} if icon is not UNDEFINED and icon != old.icon: changes["icon"] = icon @@ -157,8 +189,10 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): if not changes: return old + changes["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("category_registry.async_update") - new = self.categories[scope][category_id] = dataclasses.replace(old, **changes) # type: ignore[arg-type] + new = self.categories[scope][category_id] = dataclasses.replace(old, **changes) self.async_schedule_save() self.hass.bus.async_fire_internal( @@ -180,7 +214,9 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): category_entries[scope] = { category["category_id"]: CategoryEntry( category_id=category["category_id"], + created_at=datetime.fromisoformat(category["created_at"]), icon=category["icon"], + modified_at=datetime.fromisoformat(category["modified_at"]), name=category["name"], ) for category in categories @@ -196,7 +232,9 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): scope: [ { "category_id": entry.category_id, + "created_at": entry.created_at.isoformat(), "icon": entry.icon, + "modified_at": entry.modified_at.isoformat(), "name": entry.name, } for entry in entries.values() diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 0626e0033c4..06d836e8c20 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -22,7 +22,7 @@ from homeassistant.config import ( # type: ignore[attr-defined] load_yaml_config_file, merge_packages_config, ) -from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.requirements import ( RequirementsNotFound, @@ -157,10 +157,10 @@ async def async_check_ha_config_file( # noqa: C901 return result.add_error(f"Error loading {config_path}: {err}") # Extract and validate core [homeassistant] config - core_config = config.pop(HA_DOMAIN, {}) + core_config = config.pop(HOMEASSISTANT_DOMAIN, {}) try: core_config = CORE_CONFIG_SCHEMA(core_config) - result[HA_DOMAIN] = core_config + result[HOMEASSISTANT_DOMAIN] = core_config # Merge packages await merge_packages_config( @@ -168,8 +168,8 @@ async def async_check_ha_config_file( # noqa: C901 ) except vol.Invalid as err: result.add_error( - format_schema_error(hass, err, HA_DOMAIN, core_config), - HA_DOMAIN, + format_schema_error(hass, err, HOMEASSISTANT_DOMAIN, core_config), + HOMEASSISTANT_DOMAIN, core_config, ) core_config = {} diff --git a/homeassistant/helpers/collection.py b/homeassistant/helpers/collection.py index 1dd94d85f9a..9151a9dfc6b 100644 --- a/homeassistant/helpers/collection.py +++ b/homeassistant/helpers/collection.py @@ -26,7 +26,7 @@ from . import entity_registry from .entity import Entity from .entity_component import EntityComponent from .storage import Store -from .typing import ConfigType +from .typing import ConfigType, VolDictType STORAGE_VERSION = 1 SAVE_DELAY = 10 @@ -515,8 +515,8 @@ class StorageCollectionWebsocket[_StorageCollectionT: StorageCollection]: storage_collection: _StorageCollectionT, api_prefix: str, model_name: str, - create_schema: dict, - update_schema: dict, + create_schema: VolDictType, + update_schema: VolDictType, ) -> None: """Initialize a websocket CRUD.""" self.storage_collection = storage_collection @@ -536,12 +536,7 @@ class StorageCollectionWebsocket[_StorageCollectionT: StorageCollection]: return f"{self.model_name}_id" @callback - def async_setup( - self, - hass: HomeAssistant, - *, - create_create: bool = True, - ) -> None: + def async_setup(self, hass: HomeAssistant) -> None: """Set up the websocket commands.""" websocket_api.async_register_command( hass, @@ -552,20 +547,19 @@ class StorageCollectionWebsocket[_StorageCollectionT: StorageCollection]: ), ) - if create_create: - websocket_api.async_register_command( - hass, - f"{self.api_prefix}/create", - websocket_api.require_admin( - websocket_api.async_response(self.ws_create_item) - ), - websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend( - { - **self.create_schema, - vol.Required("type"): f"{self.api_prefix}/create", - } - ), - ) + websocket_api.async_register_command( + hass, + f"{self.api_prefix}/create", + websocket_api.require_admin( + websocket_api.async_response(self.ws_create_item) + ), + websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend( + { + **self.create_schema, + vol.Required("type"): f"{self.api_prefix}/create", + } + ), + ) websocket_api.async_register_command( hass, @@ -648,8 +642,8 @@ class StorageCollectionWebsocket[_StorageCollectionT: StorageCollection]: } for change in change_set ] - for connection, msg_id in self._subscribers: - connection.send_message(websocket_api.event_message(msg_id, json_msg)) + for conn, msg_id in self._subscribers: + conn.send_message(websocket_api.event_message(msg_id, json_msg)) if not self._subscribers: self._remove_subscription = ( diff --git a/homeassistant/helpers/condition.py b/homeassistant/helpers/condition.py index e15b40a78df..629cdeef942 100644 --- a/homeassistant/helpers/condition.py +++ b/homeassistant/helpers/condition.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from collections import deque -from collections.abc import Callable, Container +from collections.abc import Callable, Container, Generator from contextlib import contextmanager from datetime import datetime, time as dt_time, timedelta import functools as ft @@ -12,7 +12,6 @@ import re import sys from typing import Any, Protocol, cast -from typing_extensions import Generator import voluptuous as vol from homeassistant.components import zone as zone_cmp @@ -61,7 +60,7 @@ import homeassistant.util.dt as dt_util from . import config_validation as cv, entity_registry as er from .sun import get_astral_event_date -from .template import Template, attach as template_attach, render_complex +from .template import Template, render_complex from .trace import ( TraceElement, trace_append_element, @@ -511,9 +510,6 @@ def async_numeric_state_from_config(config: ConfigType) -> ConditionCheckerType: hass: HomeAssistant, variables: TemplateVarsType = None ) -> bool: """Test numeric state condition.""" - if value_template is not None: - value_template.hass = hass - errors = [] for index, entity_id in enumerate(entity_ids): try: @@ -631,7 +627,6 @@ def state_from_config(config: ConfigType) -> ConditionCheckerType: @trace_condition_function def if_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Test if condition.""" - template_attach(hass, for_period) errors = [] result: bool = match != ENTITY_MATCH_ANY for index, entity_id in enumerate(entity_ids): @@ -793,8 +788,6 @@ def async_template_from_config(config: ConfigType) -> ConditionCheckerType: @trace_condition_function def template_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Validate template based if-condition.""" - value_template.hass = hass - return async_template(hass, value_template, variables) return template_if diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 295cd13fed4..6e9a6d5a69d 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -34,6 +34,7 @@ from homeassistant.const import ( ATTR_FLOOR_ID, ATTR_LABEL_ID, CONF_ABOVE, + CONF_ACTION, CONF_ALIAS, CONF_ATTRIBUTE, CONF_BELOW, @@ -108,6 +109,7 @@ from homeassistant.util.yaml.objects import NodeStrClass from . import script_variables as script_variables_helper, template as template_helper from .frame import get_integration_logger +from .typing import VolDictType, VolSchemaType TIME_PERIOD_ERROR = "offset {} should be format 'HH:MM', 'HH:MM:SS' or 'HH:MM:SS.F'" @@ -768,9 +770,9 @@ def socket_timeout(value: Any | None) -> object: float_value = float(value) if float_value > 0.0: return float_value - raise vol.Invalid("Invalid socket timeout value. float > 0.0 required.") except Exception as err: raise vol.Invalid(f"Invalid socket timeout: {err}") from err + raise vol.Invalid("Invalid socket timeout value. float > 0.0 required.") def url( @@ -980,8 +982,8 @@ def removed( def key_value_schemas( key: str, - value_schemas: dict[Hashable, vol.Schema], - default_schema: vol.Schema | None = None, + value_schemas: dict[Hashable, VolSchemaType | Callable[[Any], dict[str, Any]]], + default_schema: VolSchemaType | None = None, default_description: str | None = None, ) -> Callable[[Any], dict[Hashable, Any]]: """Create a validator that validates based on a value for specific key. @@ -1015,12 +1017,12 @@ def key_value_schemas( # Validator helpers -def key_dependency( +def key_dependency[_KT: Hashable, _VT]( key: Hashable, dependency: Hashable -) -> Callable[[dict[Hashable, Any]], dict[Hashable, Any]]: +) -> Callable[[dict[_KT, _VT]], dict[_KT, _VT]]: """Validate that all dependencies exist for key.""" - def validator(value: dict[Hashable, Any]) -> dict[Hashable, Any]: + def validator(value: dict[_KT, _VT]) -> dict[_KT, _VT]: """Test dependencies.""" if not isinstance(value, dict): raise vol.Invalid("key dependencies require a dict") @@ -1037,6 +1039,7 @@ def key_dependency( def custom_serializer(schema: Any) -> Any: """Serialize additional types for voluptuous_serialize.""" + from .. import data_entry_flow # pylint: disable=import-outside-toplevel from . import selector # pylint: disable=import-outside-toplevel if schema is positive_time_period_dict: @@ -1048,6 +1051,15 @@ def custom_serializer(schema: Any) -> Any: if schema is boolean: return {"type": "boolean"} + if isinstance(schema, data_entry_flow.section): + return { + "type": "expandable", + "schema": voluptuous_serialize.convert( + schema.schema, custom_serializer=custom_serializer + ), + "expanded": not schema.options["collapsed"], + } + if isinstance(schema, multi_select): return {"type": "multi_select", "options": schema.options} @@ -1193,7 +1205,7 @@ PLATFORM_SCHEMA = vol.Schema( PLATFORM_SCHEMA_BASE = PLATFORM_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA) -ENTITY_SERVICE_FIELDS = { +ENTITY_SERVICE_FIELDS: VolDictType = { # Either accept static entity IDs, a single dynamic template or a mixed list # of static and dynamic templates. While this could be solved with a single # complex template, handling it like this, keeps config validation useful. @@ -1241,21 +1253,19 @@ TARGET_SERVICE_FIELDS = { _HAS_ENTITY_SERVICE_FIELD = has_at_least_one_key(*ENTITY_SERVICE_FIELDS) -def _make_entity_service_schema(schema: dict, extra: int) -> vol.Schema: +def _make_entity_service_schema(schema: dict, extra: int) -> VolSchemaType: """Create an entity service schema.""" - return vol.Schema( - vol.All( - vol.Schema( - { - # The frontend stores data here. Don't use in core. - vol.Remove("metadata"): dict, - **schema, - **ENTITY_SERVICE_FIELDS, - }, - extra=extra, - ), - _HAS_ENTITY_SERVICE_FIELD, - ) + return vol.All( + vol.Schema( + { + # The frontend stores data here. Don't use in core. + vol.Remove("metadata"): dict, + **schema, + **ENTITY_SERVICE_FIELDS, + }, + extra=extra, + ), + _HAS_ENTITY_SERVICE_FIELD, ) @@ -1263,15 +1273,15 @@ BASE_ENTITY_SCHEMA = _make_entity_service_schema({}, vol.PREVENT_EXTRA) def make_entity_service_schema( - schema: dict, *, extra: int = vol.PREVENT_EXTRA -) -> vol.Schema: + schema: dict | None, *, extra: int = vol.PREVENT_EXTRA +) -> VolSchemaType: """Create an entity service schema.""" if not schema and extra == vol.PREVENT_EXTRA: # If the schema is empty and we don't allow extra keys, we can return # the base schema and avoid compiling a new schema which is the case # for ~50% of services. return BASE_ENTITY_SCHEMA - return _make_entity_service_schema(schema, extra) + return _make_entity_service_schema(schema or {}, extra) SCRIPT_CONVERSATION_RESPONSE_SCHEMA = vol.Any(template, None) @@ -1299,7 +1309,7 @@ def script_action(value: Any) -> dict: SCRIPT_SCHEMA = vol.All(ensure_list, [script_action]) -SCRIPT_ACTION_BASE_SCHEMA = { +SCRIPT_ACTION_BASE_SCHEMA: VolDictType = { vol.Optional(CONF_ALIAS): string, vol.Optional(CONF_CONTINUE_ON_ERROR): boolean, vol.Optional(CONF_ENABLED): vol.Any(boolean, template), @@ -1314,11 +1324,30 @@ EVENT_SCHEMA = vol.Schema( } ) + +def _backward_compat_service_schema(value: Any | None) -> Any: + """Backward compatibility for service schemas.""" + + if not isinstance(value, dict): + return value + + # `service` has been renamed to `action` + if CONF_SERVICE in value: + if CONF_ACTION in value: + raise vol.Invalid( + "Cannot specify both 'service' and 'action'. Please use 'action' only." + ) + value[CONF_ACTION] = value.pop(CONF_SERVICE) + + return value + + SERVICE_SCHEMA = vol.All( + _backward_compat_service_schema, vol.Schema( { **SCRIPT_ACTION_BASE_SCHEMA, - vol.Exclusive(CONF_SERVICE, "service name"): vol.Any( + vol.Exclusive(CONF_ACTION, "service name"): vol.Any( service, dynamic_template ), vol.Exclusive(CONF_SERVICE_TEMPLATE, "service name"): vol.Any( @@ -1337,7 +1366,7 @@ SERVICE_SCHEMA = vol.All( vol.Remove("metadata"): dict, } ), - has_at_least_one_key(CONF_SERVICE, CONF_SERVICE_TEMPLATE), + has_at_least_one_key(CONF_ACTION, CONF_SERVICE_TEMPLATE), ) NUMERIC_STATE_THRESHOLD_SCHEMA = vol.Any( @@ -1345,7 +1374,7 @@ NUMERIC_STATE_THRESHOLD_SCHEMA = vol.Any( vol.All(str, entity_domain(["input_number", "number", "sensor", "zone"])), ) -CONDITION_BASE_SCHEMA = { +CONDITION_BASE_SCHEMA: VolDictType = { vol.Optional(CONF_ALIAS): string, vol.Optional(CONF_ENABLED): vol.Any(boolean, template), } @@ -1394,13 +1423,13 @@ STATE_CONDITION_ATTRIBUTE_SCHEMA = vol.Schema( ) -def STATE_CONDITION_SCHEMA(value: Any) -> dict: +def STATE_CONDITION_SCHEMA(value: Any) -> dict[str, Any]: """Validate a state condition.""" if not isinstance(value, dict): raise vol.Invalid("Expected a dictionary") if CONF_ATTRIBUTE in value: - validated: dict = STATE_CONDITION_ATTRIBUTE_SCHEMA(value) + validated: dict[str, Any] = STATE_CONDITION_ATTRIBUTE_SCHEMA(value) else: validated = STATE_CONDITION_STATE_SCHEMA(value) @@ -1833,6 +1862,7 @@ ACTIONS_MAP = { CONF_WAIT_FOR_TRIGGER: SCRIPT_ACTION_WAIT_FOR_TRIGGER, CONF_VARIABLES: SCRIPT_ACTION_VARIABLES, CONF_IF: SCRIPT_ACTION_IF, + CONF_ACTION: SCRIPT_ACTION_CALL_SERVICE, CONF_SERVICE: SCRIPT_ACTION_CALL_SERVICE, CONF_SERVICE_TEMPLATE: SCRIPT_ACTION_CALL_SERVICE, CONF_STOP: SCRIPT_ACTION_STOP, diff --git a/homeassistant/helpers/data_entry_flow.py b/homeassistant/helpers/data_entry_flow.py index 2adab32195b..b2cad292e3d 100644 --- a/homeassistant/helpers/data_entry_flow.py +++ b/homeassistant/helpers/data_entry_flow.py @@ -47,7 +47,7 @@ class _BaseFlowManagerView(HomeAssistantView, Generic[_FlowManagerT]): data = result.copy() if (schema := data["data_schema"]) is None: - data["data_schema"] = [] + data["data_schema"] = [] # type: ignore[typeddict-item] # json result type else: data["data_schema"] = voluptuous_serialize.convert( schema, custom_serializer=cv.custom_serializer diff --git a/homeassistant/helpers/device.py b/homeassistant/helpers/device.py index e1b9ded5723..16212422236 100644 --- a/homeassistant/helpers/device.py +++ b/homeassistant/helpers/device.py @@ -26,7 +26,10 @@ def async_device_info_to_link_from_entity( hass: HomeAssistant, entity_id_or_uuid: str, ) -> dr.DeviceInfo | None: - """DeviceInfo with information to link a device to a configuration entry in the link category from a entity id or entity uuid.""" + """DeviceInfo with information to link a device from an entity. + + DeviceInfo will only return information to categorize as a link. + """ return async_device_info_to_link_from_device_id( hass, @@ -39,7 +42,10 @@ def async_device_info_to_link_from_device_id( hass: HomeAssistant, device_id: str | None, ) -> dr.DeviceInfo | None: - """DeviceInfo with information to link a device to a configuration entry in the link category from a device id.""" + """DeviceInfo with information to link a device from a device id. + + DeviceInfo will only return information to categorize as a link. + """ dev_reg = dr.async_get(hass) @@ -58,7 +64,11 @@ def async_remove_stale_devices_links_keep_entity_device( entry_id: str, source_entity_id_or_uuid: str, ) -> None: - """Remove the link between stales devices and a configuration entry, keeping only the device that the informed entity is linked to.""" + """Remove the link between stale devices and a configuration entry. + + Only the device passed in the source_entity_id_or_uuid parameter + linked to the configuration entry will be maintained. + """ async_remove_stale_devices_links_keep_current_device( hass=hass, @@ -73,9 +83,10 @@ def async_remove_stale_devices_links_keep_current_device( entry_id: str, current_device_id: str | None, ) -> None: - """Remove the link between stales devices and a configuration entry, keeping only the device informed. + """Remove the link between stale devices and a configuration entry. - Device passed in the current_device_id parameter will be kept linked to the configuration entry. + Only the device passed in the current_device_id parameter linked to + the configuration entry will be maintained. """ dev_reg = dr.async_get(hass) diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index 2a90d885d70..30001a64474 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Mapping +from datetime import datetime from enum import StrEnum from functools import cached_property, lru_cache, partial import logging @@ -23,6 +24,7 @@ from homeassistant.core import ( ) from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import async_suggest_report_issue +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import format_unserializable_data @@ -55,7 +57,7 @@ EVENT_DEVICE_REGISTRY_UPDATED: EventType[EventDeviceRegistryUpdatedData] = Event ) STORAGE_KEY = "core.device_registry" STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 5 +STORAGE_VERSION_MINOR = 8 CLEANUP_DELAY = 10 @@ -94,6 +96,7 @@ class DeviceInfo(TypedDict, total=False): configuration_url: str | URL | None connections: set[tuple[str, str]] + created_at: str default_manufacturer: str default_model: str default_name: str @@ -101,6 +104,8 @@ class DeviceInfo(TypedDict, total=False): identifiers: set[tuple[str, str]] manufacturer: str | None model: str | None + model_id: str | None + modified_at: str name: str | None serial_number: str | None suggested_area: str | None @@ -127,6 +132,7 @@ DEVICE_INFO_TYPES = { "identifiers", "manufacturer", "model", + "model_id", "name", "serial_number", "suggested_area", @@ -145,6 +151,9 @@ DEVICE_INFO_TYPES = { DEVICE_INFO_KEYS = set.union(*(itm for itm in DEVICE_INFO_TYPES.values())) +# Integrations which may share a device with a native integration +LOW_PRIO_CONFIG_ENTRY_DOMAINS = {"homekit_controller", "matter", "mqtt", "upnp"} + class _EventDeviceRegistryUpdatedData_CreateRemove(TypedDict): """EventDeviceRegistryUpdated data for action type 'create' and 'remove'.""" @@ -185,6 +194,35 @@ class DeviceInfoError(HomeAssistantError): self.domain = domain +class DeviceCollisionError(HomeAssistantError): + """Raised when a device collision is detected.""" + + +class DeviceIdentifierCollisionError(DeviceCollisionError): + """Raised when a device identifier collision is detected.""" + + def __init__( + self, identifiers: set[tuple[str, str]], existing_device: DeviceEntry + ) -> None: + """Initialize error.""" + super().__init__( + f"Identifiers {identifiers} already registered with {existing_device}" + ) + + +class DeviceConnectionCollisionError(DeviceCollisionError): + """Raised when a device connection collision is detected.""" + + def __init__( + self, normalized_connections: set[tuple[str, str]], existing_device: DeviceEntry + ) -> None: + """Initialize error.""" + super().__init__( + f"Connections {normalized_connections} " + f"already registered with {existing_device}" + ) + + def _validate_device_info( config_entry: ConfigEntry, device_info: DeviceInfo, @@ -244,9 +282,10 @@ class DeviceEntry: """Device Registry Entry.""" area_id: str | None = attr.ib(default=None) - config_entries: list[str] = attr.ib(factory=list) + config_entries: set[str] = attr.ib(converter=set, factory=set) configuration_url: str | None = attr.ib(default=None) connections: set[tuple[str, str]] = attr.ib(converter=set, factory=set) + created_at: datetime = attr.ib(factory=utcnow) disabled_by: DeviceEntryDisabler | None = attr.ib(default=None) entry_type: DeviceEntryType | None = attr.ib(default=None) hw_version: str | None = attr.ib(default=None) @@ -255,8 +294,11 @@ class DeviceEntry: labels: set[str] = attr.ib(converter=set, factory=set) manufacturer: str | None = attr.ib(default=None) model: str | None = attr.ib(default=None) + model_id: str | None = attr.ib(default=None) + modified_at: datetime = attr.ib(factory=utcnow) name_by_user: str | None = attr.ib(default=None) name: str | None = attr.ib(default=None) + primary_config_entry: str | None = attr.ib(default=None) serial_number: str | None = attr.ib(default=None) suggested_area: str | None = attr.ib(default=None) sw_version: str | None = attr.ib(default=None) @@ -278,8 +320,9 @@ class DeviceEntry: return { "area_id": self.area_id, "configuration_url": self.configuration_url, - "config_entries": self.config_entries, + "config_entries": list(self.config_entries), "connections": list(self.connections), + "created_at": self.created_at.timestamp(), "disabled_by": self.disabled_by, "entry_type": self.entry_type, "hw_version": self.hw_version, @@ -288,8 +331,11 @@ class DeviceEntry: "labels": list(self.labels), "manufacturer": self.manufacturer, "model": self.model, + "model_id": self.model_id, + "modified_at": self.modified_at.timestamp(), "name_by_user": self.name_by_user, "name": self.name, + "primary_config_entry": self.primary_config_entry, "serial_number": self.serial_number, "sw_version": self.sw_version, "via_device_id": self.via_device_id, @@ -318,9 +364,10 @@ class DeviceEntry: json_bytes( { "area_id": self.area_id, - "config_entries": self.config_entries, + "config_entries": list(self.config_entries), "configuration_url": self.configuration_url, "connections": list(self.connections), + "created_at": self.created_at.isoformat(), "disabled_by": self.disabled_by, "entry_type": self.entry_type, "hw_version": self.hw_version, @@ -329,8 +376,11 @@ class DeviceEntry: "labels": list(self.labels), "manufacturer": self.manufacturer, "model": self.model, + "model_id": self.model_id, + "modified_at": self.modified_at.isoformat(), "name_by_user": self.name_by_user, "name": self.name, + "primary_config_entry": self.primary_config_entry, "serial_number": self.serial_number, "sw_version": self.sw_version, "via_device_id": self.via_device_id, @@ -343,11 +393,13 @@ class DeviceEntry: class DeletedDeviceEntry: """Deleted Device Registry Entry.""" - config_entries: list[str] = attr.ib() + config_entries: set[str] = attr.ib() connections: set[tuple[str, str]] = attr.ib() identifiers: set[tuple[str, str]] = attr.ib() id: str = attr.ib() orphaned_timestamp: float | None = attr.ib() + created_at: datetime = attr.ib(factory=utcnow) + modified_at: datetime = attr.ib(factory=utcnow) def to_device_entry( self, @@ -358,8 +410,9 @@ class DeletedDeviceEntry: """Create DeviceEntry from DeletedDeviceEntry.""" return DeviceEntry( # type ignores: likely https://github.com/python/mypy/issues/8625 - config_entries=[config_entry_id], + config_entries={config_entry_id}, # type: ignore[arg-type] connections=self.connections & connections, # type: ignore[arg-type] + created_at=self.created_at, identifiers=self.identifiers & identifiers, # type: ignore[arg-type] id=self.id, is_new=True, @@ -371,11 +424,13 @@ class DeletedDeviceEntry: return json_fragment( json_bytes( { - "config_entries": self.config_entries, + "config_entries": list(self.config_entries), "connections": list(self.connections), + "created_at": self.created_at.isoformat(), "identifiers": list(self.identifiers), "id": self.id, "orphaned_timestamp": self.orphaned_timestamp, + "modified_at": self.modified_at.isoformat(), } ) ) @@ -443,7 +498,22 @@ class DeviceRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]): if old_minor_version < 5: # Introduced in 2024.3 for device in old_data["devices"]: - device["labels"] = device.get("labels", []) + device["labels"] = [] + if old_minor_version < 6: + # Introduced in 2024.7 + for device in old_data["devices"]: + device["primary_config_entry"] = None + if old_minor_version < 7: + # Introduced in 2024.8 + for device in old_data["devices"]: + device["model_id"] = None + if old_minor_version < 8: + # Introduced in 2024.8 + created_at = utc_from_timestamp(0).isoformat() + for device in old_data["devices"]: + device["created_at"] = device["modified_at"] = created_at + for device in old_data["deleted_devices"]: + device["created_at"] = device["modified_at"] = created_at if old_major_version > 1: raise NotImplementedError @@ -640,6 +710,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): config_entry_id: str, configuration_url: str | URL | None | UndefinedType = UNDEFINED, connections: set[tuple[str, str]] | None | UndefinedType = UNDEFINED, + created_at: str | datetime | UndefinedType = UNDEFINED, # will be ignored default_manufacturer: str | None | UndefinedType = UNDEFINED, default_model: str | None | UndefinedType = UNDEFINED, default_name: str | None | UndefinedType = UNDEFINED, @@ -650,6 +721,8 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): identifiers: set[tuple[str, str]] | None | UndefinedType = UNDEFINED, manufacturer: str | None | UndefinedType = UNDEFINED, model: str | None | UndefinedType = UNDEFINED, + model_id: str | None | UndefinedType = UNDEFINED, + modified_at: str | datetime | UndefinedType = UNDEFINED, # will be ignored name: str | None | UndefinedType = UNDEFINED, serial_number: str | None | UndefinedType = UNDEFINED, suggested_area: str | None | UndefinedType = UNDEFINED, @@ -696,6 +769,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): ("identifiers", identifiers), ("manufacturer", manufacturer), ("model", model), + ("model_id", model_id), ("name", name), ("serial_number", serial_number), ("suggested_area", suggested_area), @@ -759,7 +833,9 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): device = self.async_update_device( device.id, + allow_collisions=True, add_config_entry_id=config_entry_id, + add_config_entry=config_entry, configuration_url=configuration_url, device_info_type=device_info_type, disabled_by=disabled_by, @@ -769,6 +845,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): merge_connections=connections or UNDEFINED, merge_identifiers=identifiers or UNDEFINED, model=model, + model_id=model_id, name=name, serial_number=serial_number, suggested_area=suggested_area, @@ -782,11 +859,15 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): return device @callback - def async_update_device( + def async_update_device( # noqa: C901 self, device_id: str, *, + add_config_entry: ConfigEntry | UndefinedType = UNDEFINED, add_config_entry_id: str | UndefinedType = UNDEFINED, + # Temporary flag so we don't blow up when collisions are implicitly introduced + # by calls to async_get_or_create. Must not be set by integrations. + allow_collisions: bool = False, area_id: str | None | UndefinedType = UNDEFINED, configuration_url: str | URL | None | UndefinedType = UNDEFINED, device_info_type: str | UndefinedType = UNDEFINED, @@ -798,6 +879,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): merge_connections: set[tuple[str, str]] | UndefinedType = UNDEFINED, merge_identifiers: set[tuple[str, str]] | UndefinedType = UNDEFINED, model: str | None | UndefinedType = UNDEFINED, + model_id: str | None | UndefinedType = UNDEFINED, name_by_user: str | None | UndefinedType = UNDEFINED, name: str | None | UndefinedType = UNDEFINED, new_connections: set[tuple[str, str]] | UndefinedType = UNDEFINED, @@ -816,6 +898,19 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): config_entries = old.config_entries + if add_config_entry_id is not UNDEFINED and add_config_entry is UNDEFINED: + config_entry = self.hass.config_entries.async_get_entry(add_config_entry_id) + if config_entry is None: + raise HomeAssistantError( + f"Can't link device to unknown config entry {add_config_entry_id}" + ) + add_config_entry = config_entry + + if not new_connections and not new_identifiers: + raise HomeAssistantError( + "A device must have at least one of identifiers or connections" + ) + if merge_connections is not UNDEFINED and new_connections is not UNDEFINED: raise HomeAssistantError( "Cannot define both merge_connections and new_connections" @@ -853,32 +948,40 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): area = ar.async_get(self.hass).async_get_or_create(suggested_area) area_id = area.id - if add_config_entry_id is not UNDEFINED: - # primary ones have to be at the start. - if device_info_type == "primary": - # Move entry to first spot - if not config_entries or config_entries[0] != add_config_entry_id: - config_entries = [add_config_entry_id] + [ - entry - for entry in config_entries - if entry != add_config_entry_id - ] + if add_config_entry is not UNDEFINED: + primary_entry_id = old.primary_config_entry + if ( + device_info_type == "primary" + and add_config_entry.entry_id != primary_entry_id + ): + if ( + primary_entry_id is None + or not ( + primary_entry := self.hass.config_entries.async_get_entry( + primary_entry_id + ) + ) + or primary_entry.domain in LOW_PRIO_CONFIG_ENTRY_DOMAINS + ): + new_values["primary_config_entry"] = add_config_entry.entry_id + old_values["primary_config_entry"] = old.primary_config_entry - # Not primary, append - elif add_config_entry_id not in config_entries: - config_entries = [*config_entries, add_config_entry_id] + if add_config_entry.entry_id not in old.config_entries: + config_entries = old.config_entries | {add_config_entry.entry_id} if ( remove_config_entry_id is not UNDEFINED and remove_config_entry_id in config_entries ): - if config_entries == [remove_config_entry_id]: + if config_entries == {remove_config_entry_id}: self.async_remove_device(device_id) return None - config_entries = [ - entry for entry in config_entries if entry != remove_config_entry_id - ] + if remove_config_entry_id == old.primary_config_entry: + new_values["primary_config_entry"] = None + old_values["primary_config_entry"] = old.primary_config_entry + + config_entries = config_entries - {remove_config_entry_id} if config_entries != old.config_entries: new_values["config_entries"] = config_entries @@ -894,12 +997,36 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): new_values[attr_name] = old_value | setvalue old_values[attr_name] = old_value + if merge_connections is not UNDEFINED: + normalized_connections = self._validate_connections( + device_id, + merge_connections, + allow_collisions, + ) + old_connections = old.connections + if not normalized_connections.issubset(old_connections): + new_values["connections"] = old_connections | normalized_connections + old_values["connections"] = old_connections + + if merge_identifiers is not UNDEFINED: + merge_identifiers = self._validate_identifiers( + device_id, merge_identifiers, allow_collisions + ) + old_identifiers = old.identifiers + if not merge_identifiers.issubset(old_identifiers): + new_values["identifiers"] = old_identifiers | merge_identifiers + old_values["identifiers"] = old_identifiers + if new_connections is not UNDEFINED: - new_values["connections"] = _normalize_connections(new_connections) + new_values["connections"] = self._validate_connections( + device_id, new_connections, False + ) old_values["connections"] = old.connections if new_identifiers is not UNDEFINED: - new_values["identifiers"] = new_identifiers + new_values["identifiers"] = self._validate_identifiers( + device_id, new_identifiers, False + ) old_values["identifiers"] = old.identifiers if configuration_url is not UNDEFINED: @@ -914,6 +1041,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): ("labels", labels), ("manufacturer", manufacturer), ("model", model), + ("model_id", model_id), ("name", name), ("name_by_user", name_by_user), ("serial_number", serial_number), @@ -931,6 +1059,10 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): if not new_values: return old + if not RUNTIME_ONLY_ATTRS.issuperset(new_values): + # Change modified_at if we are changing something that we store + new_values["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("device_registry.async_update_device") new = attr.evolve(old, **new_values) self.devices[device_id] = new @@ -955,6 +1087,53 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): return new + @callback + def _validate_connections( + self, + device_id: str, + connections: set[tuple[str, str]], + allow_collisions: bool, + ) -> set[tuple[str, str]]: + """Normalize and validate connections, raise on collision with other devices.""" + normalized_connections = _normalize_connections(connections) + if allow_collisions: + return normalized_connections + + for connection in normalized_connections: + # We need to iterate over each connection because if there is a + # conflict, the index will only see the last one and we will not + # be able to tell which one caused the conflict + if ( + existing_device := self.async_get_device(connections={connection}) + ) and existing_device.id != device_id: + raise DeviceConnectionCollisionError( + normalized_connections, existing_device + ) + + return normalized_connections + + @callback + def _validate_identifiers( + self, + device_id: str, + identifiers: set[tuple[str, str]], + allow_collisions: bool, + ) -> set[tuple[str, str]]: + """Validate identifiers, raise on collision with other devices.""" + if allow_collisions: + return identifiers + + for identifier in identifiers: + # We need to iterate over each identifier because if there is a + # conflict, the index will only see the last one and we will not + # be able to tell which one caused the conflict + if ( + existing_device := self.async_get_device(identifiers={identifier}) + ) and existing_device.id != device_id: + raise DeviceIdentifierCollisionError(identifiers, existing_device) + + return identifiers + @callback def async_remove_device(self, device_id: str) -> None: """Remove a device from the device registry.""" @@ -963,6 +1142,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): self.deleted_devices[device_id] = DeletedDeviceEntry( config_entries=device.config_entries, connections=device.connections, + created_at=device.created_at, identifiers=device.identifiers, id=device.id, orphaned_timestamp=None, @@ -991,13 +1171,14 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): for device in data["devices"]: devices[device["id"]] = DeviceEntry( area_id=device["area_id"], - config_entries=device["config_entries"], + config_entries=set(device["config_entries"]), configuration_url=device["configuration_url"], # type ignores (if tuple arg was cast): likely https://github.com/python/mypy/issues/8625 connections={ tuple(conn) # type: ignore[misc] for conn in device["connections"] }, + created_at=datetime.fromisoformat(device["created_at"]), disabled_by=( DeviceEntryDisabler(device["disabled_by"]) if device["disabled_by"] @@ -1017,8 +1198,11 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): labels=set(device["labels"]), manufacturer=device["manufacturer"], model=device["model"], + model_id=device["model_id"], + modified_at=datetime.fromisoformat(device["modified_at"]), name_by_user=device["name_by_user"], name=device["name"], + primary_config_entry=device["primary_config_entry"], serial_number=device["serial_number"], sw_version=device["sw_version"], via_device_id=device["via_device_id"], @@ -1026,10 +1210,12 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): # Introduced in 0.111 for device in data["deleted_devices"]: deleted_devices[device["id"]] = DeletedDeviceEntry( - config_entries=device["config_entries"], + config_entries=set(device["config_entries"]), connections={tuple(conn) for conn in device["connections"]}, + created_at=datetime.fromisoformat(device["created_at"]), identifiers={tuple(iden) for iden in device["identifiers"]}, id=device["id"], + modified_at=datetime.fromisoformat(device["modified_at"]), orphaned_timestamp=device["orphaned_timestamp"], ) @@ -1057,15 +1243,13 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): config_entries = deleted_device.config_entries if config_entry_id not in config_entries: continue - if config_entries == [config_entry_id]: + if config_entries == {config_entry_id}: # Add a time stamp when the deleted device became orphaned self.deleted_devices[deleted_device.id] = attr.evolve( - deleted_device, orphaned_timestamp=now_time, config_entries=[] + deleted_device, orphaned_timestamp=now_time, config_entries=set() ) else: - config_entries = [ - entry for entry in config_entries if entry != config_entry_id - ] + config_entries = config_entries - {config_entry_id} # No need to reindex here since we currently # do not have a lookup by config entry self.deleted_devices[deleted_device.id] = attr.evolve( @@ -1171,8 +1355,8 @@ def async_config_entry_disabled_by_changed( if device.disabled: # Device already disabled, do not overwrite continue - if len(device.config_entries) > 1 and any( - entry_id in enabled_config_entries for entry_id in device.config_entries + if len(device.config_entries) > 1 and device.config_entries.intersection( + enabled_config_entries ): continue registry.async_update_device( diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index cf910a5cba8..dbc1a036ef6 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -263,8 +263,6 @@ class CalculatedState: attributes: dict[str, Any] # Capability attributes returned by the capability_attributes property capability_attributes: Mapping[str, Any] | None - # Attributes which may be overridden by the entity registry - shadowed_attributes: Mapping[str, Any] class CachedProperties(type): @@ -1042,18 +1040,20 @@ class Entity( @callback def _async_calculate_state(self) -> CalculatedState: """Calculate state string and attribute mapping.""" - return CalculatedState(*self.__async_calculate_state()) + state, attr, capabilities, _, _ = self.__async_calculate_state() + return CalculatedState(state, attr, capabilities) def __async_calculate_state( self, - ) -> tuple[str, dict[str, Any], Mapping[str, Any] | None, Mapping[str, Any]]: + ) -> tuple[str, dict[str, Any], Mapping[str, Any] | None, str | None, int | None]: """Calculate state string and attribute mapping. - Returns a tuple (state, attr, capability_attr, shadowed_attr). + Returns a tuple: state - the stringified state attr - the attribute dictionary capability_attr - a mapping with capability attributes - shadowed_attr - a mapping with attributes which may be overridden + original_device_class - the device class which may be overridden + supported_features - the supported features This method is called when writing the state to avoid the overhead of creating a dataclass object. @@ -1062,7 +1062,6 @@ class Entity( capability_attr = self.capability_attributes attr = capability_attr.copy() if capability_attr else {} - shadowed_attr = {} available = self.available # only call self.available once per update cycle state = self._stringify_state(available) @@ -1081,30 +1080,27 @@ class Entity( if (attribution := self.attribution) is not None: attr[ATTR_ATTRIBUTION] = attribution - shadowed_attr[ATTR_DEVICE_CLASS] = self.device_class + original_device_class = self.device_class if ( - device_class := (entry and entry.device_class) - or shadowed_attr[ATTR_DEVICE_CLASS] + device_class := (entry and entry.device_class) or original_device_class ) is not None: attr[ATTR_DEVICE_CLASS] = str(device_class) if (entity_picture := self.entity_picture) is not None: attr[ATTR_ENTITY_PICTURE] = entity_picture - shadowed_attr[ATTR_ICON] = self.icon - if (icon := (entry and entry.icon) or shadowed_attr[ATTR_ICON]) is not None: + if (icon := (entry and entry.icon) or self.icon) is not None: attr[ATTR_ICON] = icon - shadowed_attr[ATTR_FRIENDLY_NAME] = self._friendly_name_internal() if ( - name := (entry and entry.name) or shadowed_attr[ATTR_FRIENDLY_NAME] + name := (entry and entry.name) or self._friendly_name_internal() ) is not None: attr[ATTR_FRIENDLY_NAME] = name if (supported_features := self.supported_features) is not None: attr[ATTR_SUPPORTED_FEATURES] = supported_features - return (state, attr, capability_attr, shadowed_attr) + return (state, attr, capability_attr, original_device_class, supported_features) @callback def _async_write_ha_state(self) -> None: @@ -1130,14 +1126,15 @@ class Entity( return state_calculate_start = timer() - state, attr, capabilities, shadowed_attr = self.__async_calculate_state() + state, attr, capabilities, original_device_class, supported_features = ( + self.__async_calculate_state() + ) time_now = timer() if entry: # Make sure capabilities in the entity registry are up to date. Capabilities # include capability attributes, device class and supported features - original_device_class: str | None = shadowed_attr[ATTR_DEVICE_CLASS] - supported_features: int = attr.get(ATTR_SUPPORTED_FEATURES) or 0 + supported_features = supported_features or 0 if ( capabilities != entry.capabilities or original_device_class != entry.original_device_class @@ -1188,11 +1185,18 @@ class Entity( report_issue, ) - # Overwrite properties that have been set in the config file. - if (customize := hass.data.get(DATA_CUSTOMIZE)) and ( - custom := customize.get(entity_id) - ): - attr.update(custom) + try: + # Most of the time this will already be + # set and since try is near zero cost + # on py3.11+ its faster to assume it is + # set and catch the exception if it is not. + customize = hass.data[DATA_CUSTOMIZE] + except KeyError: + pass + else: + # Overwrite properties that have been set in the config file. + if custom := customize.get(entity_id): + attr.update(custom) if ( self._context_set is not None @@ -1202,7 +1206,7 @@ class Entity( self._context_set = None try: - hass.states.async_set( + hass.states.async_set_internal( entity_id, state, attr, diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index aae0e2058e4..76abb3020d1 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -5,13 +5,11 @@ from __future__ import annotations import asyncio from collections.abc import Callable, Iterable from datetime import timedelta -from functools import partial import logging from types import ModuleType from typing import Any, Generic from typing_extensions import TypeVar -import voluptuous as vol from homeassistant import config as conf_util from homeassistant.config_entries import ConfigEntry @@ -36,7 +34,7 @@ from homeassistant.setup import async_prepare_setup_platform from . import config_validation as cv, discovery, entity, service from .entity_platform import EntityPlatform -from .typing import ConfigType, DiscoveryInfoType +from .typing import ConfigType, DiscoveryInfoType, VolDictType, VolSchemaType DEFAULT_SCAN_INTERVAL = timedelta(seconds=15) DATA_INSTANCES = "entity_components" @@ -222,7 +220,7 @@ class EntityComponent(Generic[_EntityT]): def async_register_legacy_entity_service( self, name: str, - schema: dict[str | vol.Marker, Any] | vol.Schema, + schema: VolDictType | VolSchemaType, func: str | Callable[..., Any], required_features: list[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, @@ -259,31 +257,22 @@ class EntityComponent(Generic[_EntityT]): def async_register_entity_service( self, name: str, - schema: dict[str | vol.Marker, Any] | vol.Schema, + schema: VolDictType | VolSchemaType | None, func: str | Callable[..., Any], required_features: list[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, ) -> None: """Register an entity service.""" - if isinstance(schema, dict): - schema = cv.make_entity_service_schema(schema) - - service_func: str | HassJob[..., Any] - service_func = func if isinstance(func, str) else HassJob(func) - - self.hass.services.async_register( + service.async_register_entity_service( + self.hass, self.domain, name, - partial( - service.entity_service_call, - self.hass, - self._entities, - service_func, - required_features=required_features, - ), - schema, - supports_response, + entities=self._entities, + func=func, job_type=HassJobType.Coroutinefunction, + required_features=required_features, + schema=schema, + supports_response=supports_response, ) async def async_setup_platform( diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index 4dbe3ac68d8..ce107d63b73 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -6,12 +6,9 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine, Iterable from contextvars import ContextVar from datetime import timedelta -from functools import partial from logging import Logger, getLogger from typing import TYPE_CHECKING, Any, Protocol -import voluptuous as vol - from homeassistant import config_entries from homeassistant.const import ( ATTR_RESTORED, @@ -22,7 +19,6 @@ from homeassistant.core import ( CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, - HassJob, HomeAssistant, ServiceCall, SupportsResponse, @@ -43,7 +39,6 @@ from homeassistant.util.async_ import create_eager_task from homeassistant.util.hass_dict import HassKey from . import ( - config_validation as cv, device_registry as dev_reg, entity_registry as ent_reg, service, @@ -52,7 +47,7 @@ from . import ( from .entity_registry import EntityRegistry, RegistryEntryDisabler, RegistryEntryHider from .event import async_call_later from .issue_registry import IssueSeverity, async_create_issue -from .typing import UNDEFINED, ConfigType, DiscoveryInfoType +from .typing import UNDEFINED, ConfigType, DiscoveryInfoType, VolDictType, VolSchemaType if TYPE_CHECKING: from .entity import Entity @@ -987,7 +982,7 @@ class EntityPlatform: def async_register_entity_service( self, name: str, - schema: dict[str | vol.Marker, Any] | vol.Schema, + schema: VolDictType | VolSchemaType | None, func: str | Callable[..., Any], required_features: Iterable[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, @@ -999,24 +994,16 @@ class EntityPlatform: if self.hass.services.has_service(self.platform_name, name): return - if isinstance(schema, dict): - schema = cv.make_entity_service_schema(schema) - - service_func: str | HassJob[..., Any] - service_func = func if isinstance(func, str) else HassJob(func) - - self.hass.services.async_register( + service.async_register_entity_service( + self.hass, self.platform_name, name, - partial( - service.entity_service_call, - self.hass, - self.domain_platform_entities, - service_func, - required_features=required_features, - ), - schema, - supports_response, + entities=self.domain_platform_entities, + func=func, + job_type=None, + required_features=required_features, + schema=schema, + supports_response=supports_response, ) async def _async_update_entity_states(self) -> None: diff --git a/homeassistant/helpers/entity_registry.py b/homeassistant/helpers/entity_registry.py index dabe2e61917..5d17c0c46b1 100644 --- a/homeassistant/helpers/entity_registry.py +++ b/homeassistant/helpers/entity_registry.py @@ -48,6 +48,7 @@ from homeassistant.core import ( from homeassistant.exceptions import MaxLengthExceeded from homeassistant.loader import async_suggest_report_issue from homeassistant.util import slugify, uuid as uuid_util +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import format_unserializable_data @@ -74,7 +75,7 @@ EVENT_ENTITY_REGISTRY_UPDATED: EventType[EventEntityRegistryUpdatedData] = Event _LOGGER = logging.getLogger(__name__) STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 14 +STORAGE_VERSION_MINOR = 15 STORAGE_KEY = "core.entity_registry" CLEANUP_INTERVAL = 3600 * 24 @@ -174,6 +175,7 @@ class RegistryEntry: categories: dict[str, str] = attr.ib(factory=dict) capabilities: Mapping[str, Any] | None = attr.ib(default=None) config_entry_id: str | None = attr.ib(default=None) + created_at: datetime = attr.ib(factory=utcnow) device_class: str | None = attr.ib(default=None) device_id: str | None = attr.ib(default=None) domain: str = attr.ib(init=False, repr=False) @@ -187,6 +189,7 @@ class RegistryEntry: ) has_entity_name: bool = attr.ib(default=False) labels: set[str] = attr.ib(factory=set) + modified_at: datetime = attr.ib(factory=utcnow) name: str | None = attr.ib(default=None) options: ReadOnlyEntityOptionsType = attr.ib( default=None, converter=_protect_entity_options @@ -271,6 +274,7 @@ class RegistryEntry: "area_id": self.area_id, "categories": self.categories, "config_entry_id": self.config_entry_id, + "created_at": self.created_at.timestamp(), "device_id": self.device_id, "disabled_by": self.disabled_by, "entity_category": self.entity_category, @@ -280,6 +284,7 @@ class RegistryEntry: "icon": self.icon, "id": self.id, "labels": list(self.labels), + "modified_at": self.modified_at.timestamp(), "name": self.name, "options": self.options, "original_name": self.original_name, @@ -330,6 +335,7 @@ class RegistryEntry: "categories": self.categories, "capabilities": self.capabilities, "config_entry_id": self.config_entry_id, + "created_at": self.created_at.isoformat(), "device_class": self.device_class, "device_id": self.device_id, "disabled_by": self.disabled_by, @@ -340,6 +346,7 @@ class RegistryEntry: "id": self.id, "has_entity_name": self.has_entity_name, "labels": list(self.labels), + "modified_at": self.modified_at.isoformat(), "name": self.name, "options": self.options, "original_device_class": self.original_device_class, @@ -395,6 +402,8 @@ class DeletedRegistryEntry: domain: str = attr.ib(init=False, repr=False) id: str = attr.ib() orphaned_timestamp: float | None = attr.ib() + created_at: datetime = attr.ib(factory=utcnow) + modified_at: datetime = attr.ib(factory=utcnow) @domain.default def _domain_default(self) -> str: @@ -408,8 +417,10 @@ class DeletedRegistryEntry: json_bytes( { "config_entry_id": self.config_entry_id, + "created_at": self.created_at.isoformat(), "entity_id": self.entity_id, "id": self.id, + "modified_at": self.modified_at.isoformat(), "orphaned_timestamp": self.orphaned_timestamp, "platform": self.platform, "unique_id": self.unique_id, @@ -429,88 +440,97 @@ class EntityRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]): ) -> dict: """Migrate to the new version.""" data = old_data - if old_major_version == 1 and old_minor_version < 2: - # Version 1.2 implements migration and freezes the available keys - for entity in data["entities"]: - # Populate keys which were introduced before version 1.2 - entity.setdefault("area_id", None) - entity.setdefault("capabilities", {}) - entity.setdefault("config_entry_id", None) - entity.setdefault("device_class", None) - entity.setdefault("device_id", None) - entity.setdefault("disabled_by", None) - entity.setdefault("entity_category", None) - entity.setdefault("icon", None) - entity.setdefault("name", None) - entity.setdefault("original_icon", None) - entity.setdefault("original_name", None) - entity.setdefault("supported_features", 0) - entity.setdefault("unit_of_measurement", None) + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and freezes the available keys + for entity in data["entities"]: + # Populate keys which were introduced before version 1.2 + entity.setdefault("area_id", None) + entity.setdefault("capabilities", {}) + entity.setdefault("config_entry_id", None) + entity.setdefault("device_class", None) + entity.setdefault("device_id", None) + entity.setdefault("disabled_by", None) + entity.setdefault("entity_category", None) + entity.setdefault("icon", None) + entity.setdefault("name", None) + entity.setdefault("original_icon", None) + entity.setdefault("original_name", None) + entity.setdefault("supported_features", 0) + entity.setdefault("unit_of_measurement", None) - if old_major_version == 1 and old_minor_version < 3: - # Version 1.3 adds original_device_class - for entity in data["entities"]: - # Move device_class to original_device_class - entity["original_device_class"] = entity["device_class"] - entity["device_class"] = None + if old_minor_version < 3: + # Version 1.3 adds original_device_class + for entity in data["entities"]: + # Move device_class to original_device_class + entity["original_device_class"] = entity["device_class"] + entity["device_class"] = None - if old_major_version == 1 and old_minor_version < 4: - # Version 1.4 adds id - for entity in data["entities"]: - entity["id"] = uuid_util.random_uuid_hex() + if old_minor_version < 4: + # Version 1.4 adds id + for entity in data["entities"]: + entity["id"] = uuid_util.random_uuid_hex() - if old_major_version == 1 and old_minor_version < 5: - # Version 1.5 adds entity options - for entity in data["entities"]: - entity["options"] = {} + if old_minor_version < 5: + # Version 1.5 adds entity options + for entity in data["entities"]: + entity["options"] = {} - if old_major_version == 1 and old_minor_version < 6: - # Version 1.6 adds hidden_by - for entity in data["entities"]: - entity["hidden_by"] = None + if old_minor_version < 6: + # Version 1.6 adds hidden_by + for entity in data["entities"]: + entity["hidden_by"] = None - if old_major_version == 1 and old_minor_version < 7: - # Version 1.7 adds has_entity_name - for entity in data["entities"]: - entity["has_entity_name"] = False + if old_minor_version < 7: + # Version 1.7 adds has_entity_name + for entity in data["entities"]: + entity["has_entity_name"] = False - if old_major_version == 1 and old_minor_version < 8: - # Cleanup after frontend bug which incorrectly updated device_class - # Fixed by frontend PR #13551 - for entity in data["entities"]: - domain = split_entity_id(entity["entity_id"])[0] - if domain in [Platform.BINARY_SENSOR, Platform.COVER]: - continue - entity["device_class"] = None + if old_minor_version < 8: + # Cleanup after frontend bug which incorrectly updated device_class + # Fixed by frontend PR #13551 + for entity in data["entities"]: + domain = split_entity_id(entity["entity_id"])[0] + if domain in [Platform.BINARY_SENSOR, Platform.COVER]: + continue + entity["device_class"] = None - if old_major_version == 1 and old_minor_version < 9: - # Version 1.9 adds translation_key - for entity in data["entities"]: - entity["translation_key"] = None + if old_minor_version < 9: + # Version 1.9 adds translation_key + for entity in data["entities"]: + entity["translation_key"] = None - if old_major_version == 1 and old_minor_version < 10: - # Version 1.10 adds aliases - for entity in data["entities"]: - entity["aliases"] = [] + if old_minor_version < 10: + # Version 1.10 adds aliases + for entity in data["entities"]: + entity["aliases"] = [] - if old_major_version == 1 and old_minor_version < 11: - # Version 1.11 adds deleted_entities - data["deleted_entities"] = data.get("deleted_entities", []) + if old_minor_version < 11: + # Version 1.11 adds deleted_entities + data["deleted_entities"] = data.get("deleted_entities", []) - if old_major_version == 1 and old_minor_version < 12: - # Version 1.12 adds previous_unique_id - for entity in data["entities"]: - entity["previous_unique_id"] = None + if old_minor_version < 12: + # Version 1.12 adds previous_unique_id + for entity in data["entities"]: + entity["previous_unique_id"] = None - if old_major_version == 1 and old_minor_version < 13: - # Version 1.13 adds labels - for entity in data["entities"]: - entity["labels"] = [] + if old_minor_version < 13: + # Version 1.13 adds labels + for entity in data["entities"]: + entity["labels"] = [] - if old_major_version == 1 and old_minor_version < 14: - # Version 1.14 adds categories - for entity in data["entities"]: - entity["categories"] = {} + if old_minor_version < 14: + # Version 1.14 adds categories + for entity in data["entities"]: + entity["categories"] = {} + + if old_minor_version < 15: + # Version 1.15 adds created_at and modified_at + created_at = utc_from_timestamp(0).isoformat() + for entity in data["entities"]: + entity["created_at"] = entity["modified_at"] = created_at + for entity in data["deleted_entities"]: + entity["created_at"] = entity["modified_at"] = created_at if old_major_version > 1: raise NotImplementedError @@ -837,10 +857,12 @@ class EntityRegistry(BaseRegistry): ) entity_registry_id: str | None = None + created_at = utcnow() deleted_entity = self.deleted_entities.pop((domain, platform, unique_id), None) if deleted_entity is not None: # Restore id entity_registry_id = deleted_entity.id + created_at = deleted_entity.created_at entity_id = self.async_generate_entity_id( domain, @@ -865,6 +887,7 @@ class EntityRegistry(BaseRegistry): entry = RegistryEntry( capabilities=none_if_undefined(capabilities), config_entry_id=none_if_undefined(config_entry_id), + created_at=created_at, device_id=none_if_undefined(device_id), disabled_by=disabled_by, entity_category=none_if_undefined(entity_category), @@ -906,6 +929,7 @@ class EntityRegistry(BaseRegistry): orphaned_timestamp = None if config_entry_id else time.time() self.deleted_entities[key] = DeletedRegistryEntry( config_entry_id=config_entry_id, + created_at=entity.created_at, entity_id=entity_id, id=entity.id, orphaned_timestamp=orphaned_timestamp, @@ -1093,6 +1117,8 @@ class EntityRegistry(BaseRegistry): if not new_values: return old + new_values["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("entity_registry.async_update_entity") new = self.entities[entity_id] = attr.evolve(old, **new_values) @@ -1260,6 +1286,7 @@ class EntityRegistry(BaseRegistry): categories=entity["categories"], capabilities=entity["capabilities"], config_entry_id=entity["config_entry_id"], + created_at=datetime.fromisoformat(entity["created_at"]), device_class=entity["device_class"], device_id=entity["device_id"], disabled_by=RegistryEntryDisabler(entity["disabled_by"]) @@ -1276,6 +1303,7 @@ class EntityRegistry(BaseRegistry): id=entity["id"], has_entity_name=entity["has_entity_name"], labels=set(entity["labels"]), + modified_at=datetime.fromisoformat(entity["modified_at"]), name=entity["name"], options=entity["options"], original_device_class=entity["original_device_class"], @@ -1307,8 +1335,10 @@ class EntityRegistry(BaseRegistry): ) deleted_entities[key] = DeletedRegistryEntry( config_entry_id=entity["config_entry_id"], + created_at=datetime.fromisoformat(entity["created_at"]), entity_id=entity["entity_id"], id=entity["id"], + modified_at=datetime.fromisoformat(entity["modified_at"]), orphaned_timestamp=entity["orphaned_timestamp"], platform=entity["platform"], unique_id=entity["unique_id"], diff --git a/homeassistant/helpers/entityfilter.py b/homeassistant/helpers/entityfilter.py index 24b65cba82a..1eaa0fb1404 100644 --- a/homeassistant/helpers/entityfilter.py +++ b/homeassistant/helpers/entityfilter.py @@ -4,7 +4,8 @@ from __future__ import annotations from collections.abc import Callable import fnmatch -from functools import lru_cache +from functools import lru_cache, partial +import operator import re import voluptuous as vol @@ -195,7 +196,7 @@ def _generate_filter_from_sets_and_pattern_lists( # Case 1 - No filter # - All entities included if not have_include and not have_exclude: - return lambda entity_id: True + return bool # Case 2 - Only includes # - Entity listed in entities include: include @@ -280,4 +281,4 @@ def _generate_filter_from_sets_and_pattern_lists( # Case 6 - No Domain and/or glob includes or excludes # - Entity listed in entities include: include # - Otherwise: exclude - return lambda entity_id: entity_id in include_e + return partial(operator.contains, include_e) diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 4150d871b6b..38f461d8d7a 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -17,6 +17,7 @@ from typing import TYPE_CHECKING, Any, Concatenate, Generic, TypeVar from homeassistant.const import ( EVENT_CORE_CONFIG_UPDATE, EVENT_STATE_CHANGED, + EVENT_STATE_REPORTED, MATCH_ALL, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET, @@ -26,6 +27,8 @@ from homeassistant.core import ( Event, # Explicit reexport of 'EventStateChangedData' for backwards compatibility EventStateChangedData as EventStateChangedData, # noqa: PLC0414 + EventStateEventData, + EventStateReportedData, HassJob, HassJobType, HomeAssistant, @@ -57,6 +60,9 @@ from .typing import TemplateVarsType _TRACK_STATE_CHANGE_DATA: HassKey[_KeyedEventData[EventStateChangedData]] = HassKey( "track_state_change_data" ) +_TRACK_STATE_REPORT_DATA: HassKey[_KeyedEventData[EventStateReportedData]] = HassKey( + "track_state_report_data" +) _TRACK_STATE_ADDED_DOMAIN_DATA: HassKey[_KeyedEventData[EventStateChangedData]] = ( HassKey("track_state_added_domain_data") ) @@ -84,6 +90,7 @@ RANDOM_MICROSECOND_MIN = 50000 RANDOM_MICROSECOND_MAX = 500000 _TypedDictT = TypeVar("_TypedDictT", bound=Mapping[str, Any]) +_StateEventDataT = TypeVar("_StateEventDataT", bound=EventStateEventData) @dataclass(slots=True, frozen=True) @@ -321,11 +328,21 @@ def async_track_state_change_event( return _async_track_state_change_event(hass, entity_ids, action, job_type) +@callback +def _async_dispatch_entity_id_event_soon( + hass: HomeAssistant, + callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], + event: Event[_StateEventDataT], +) -> None: + """Dispatch to listeners soon to ensure one event loop runs before dispatch.""" + hass.loop.call_soon(_async_dispatch_entity_id_event, hass, callbacks, event) + + @callback def _async_dispatch_entity_id_event( hass: HomeAssistant, - callbacks: dict[str, list[HassJob[[Event[EventStateChangedData]], Any]]], - event: Event[EventStateChangedData], + callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], + event: Event[_StateEventDataT], ) -> None: """Dispatch to listeners.""" if not (callbacks_list := callbacks.get(event.data["entity_id"])): @@ -342,10 +359,10 @@ def _async_dispatch_entity_id_event( @callback -def _async_state_change_filter( +def _async_state_filter( hass: HomeAssistant, - callbacks: dict[str, list[HassJob[[Event[EventStateChangedData]], Any]]], - event_data: EventStateChangedData, + callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], + event_data: _StateEventDataT, ) -> bool: """Filter state changes by entity_id.""" return event_data["entity_id"] in callbacks @@ -354,8 +371,8 @@ def _async_state_change_filter( _KEYED_TRACK_STATE_CHANGE = _KeyedEventTracker( key=_TRACK_STATE_CHANGE_DATA, event_type=EVENT_STATE_CHANGED, - dispatcher_callable=_async_dispatch_entity_id_event, - filter_callable=_async_state_change_filter, + dispatcher_callable=_async_dispatch_entity_id_event_soon, + filter_callable=_async_state_filter, ) @@ -372,6 +389,26 @@ def _async_track_state_change_event( ) +_KEYED_TRACK_STATE_REPORT = _KeyedEventTracker( + key=_TRACK_STATE_REPORT_DATA, + event_type=EVENT_STATE_REPORTED, + dispatcher_callable=_async_dispatch_entity_id_event, + filter_callable=_async_state_filter, +) + + +def async_track_state_report_event( + hass: HomeAssistant, + entity_ids: str | Iterable[str], + action: Callable[[Event[EventStateReportedData]], Any], + job_type: HassJobType | None = None, +) -> CALLBACK_TYPE: + """Track EVENT_STATE_REPORTED by entity_id without lowercasing.""" + return _async_track_event( + _KEYED_TRACK_STATE_REPORT, hass, entity_ids, action, job_type + ) + + @callback def _remove_empty_listener() -> None: """Remove a listener that does nothing.""" @@ -939,8 +976,6 @@ class TrackTemplateResultInfo: self.hass = hass self._job = HassJob(action, f"track template result {track_templates}") - for track_template_ in track_templates: - track_template_.template.hass = hass self._track_templates = track_templates self._has_super_template = has_super_template diff --git a/homeassistant/helpers/floor_registry.py b/homeassistant/helpers/floor_registry.py index 9bf8a2a5d26..f14edef293a 100644 --- a/homeassistant/helpers/floor_registry.py +++ b/homeassistant/helpers/floor_registry.py @@ -5,10 +5,12 @@ from __future__ import annotations from collections.abc import Iterable import dataclasses from dataclasses import dataclass -from typing import Literal, TypedDict +from datetime import datetime +from typing import Any, Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback from homeassistant.util import slugify +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -28,6 +30,7 @@ EVENT_FLOOR_REGISTRY_UPDATED: EventType[EventFloorRegistryUpdatedData] = EventTy ) STORAGE_KEY = "core.floor_registry" STORAGE_VERSION_MAJOR = 1 +STORAGE_VERSION_MINOR = 2 class _FloorStoreData(TypedDict): @@ -38,6 +41,8 @@ class _FloorStoreData(TypedDict): icon: str | None level: int | None name: str + created_at: str + modified_at: str class FloorRegistryStoreData(TypedDict): @@ -66,6 +71,29 @@ class FloorEntry(NormalizedNameBaseRegistryEntry): level: int | None = None +class FloorRegistryStore(Store[FloorRegistryStoreData]): + """Store floor registry data.""" + + async def _async_migrate_func( + self, + old_major_version: int, + old_minor_version: int, + old_data: dict[str, list[dict[str, Any]]], + ) -> FloorRegistryStoreData: + """Migrate to the new version.""" + if old_major_version > STORAGE_VERSION_MAJOR: + raise ValueError("Can't migrate to future version") + + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and adds created_at and modified_at + created_at = utc_from_timestamp(0).isoformat() + for floor in old_data["floors"]: + floor["created_at"] = floor["modified_at"] = created_at + + return old_data # type: ignore[return-value] + + class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): """Class to hold a registry of floors.""" @@ -75,11 +103,12 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): def __init__(self, hass: HomeAssistant) -> None: """Initialize the floor registry.""" self.hass = hass - self._store = Store( + self._store = FloorRegistryStore( hass, STORAGE_VERSION_MAJOR, STORAGE_KEY, atomic_writes=True, + minor_version=STORAGE_VERSION_MINOR, ) @callback @@ -175,7 +204,7 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): ) -> FloorEntry: """Update name of the floor.""" old = self.floors[floor_id] - changes = { + changes: dict[str, Any] = { attr_name: value for attr_name, value in ( ("aliases", aliases), @@ -191,8 +220,10 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): if not changes: return old + changes["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("floor_registry.async_update") - new = self.floors[floor_id] = dataclasses.replace(old, **changes) # type: ignore[arg-type] + new = self.floors[floor_id] = dataclasses.replace(old, **changes) self.async_schedule_save() self.hass.bus.async_fire_internal( @@ -220,6 +251,8 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): name=floor["name"], level=floor["level"], normalized_name=normalized_name, + created_at=datetime.fromisoformat(floor["created_at"]), + modified_at=datetime.fromisoformat(floor["modified_at"]), ) self.floors = floors @@ -236,6 +269,8 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): "icon": entry.icon, "level": entry.level, "name": entry.name, + "created_at": entry.created_at.isoformat(), + "modified_at": entry.modified_at.isoformat(), } for entry in self.floors.values() ] diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index b1ddf5eacc7..be9b57bf814 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -4,10 +4,10 @@ from __future__ import annotations from abc import abstractmethod import asyncio -from collections.abc import Collection, Coroutine, Iterable +from collections.abc import Callable, Collection, Coroutine, Iterable import dataclasses from dataclasses import dataclass, field -from enum import Enum, auto +from enum import Enum, StrEnum, auto from functools import cached_property from itertools import groupby import logging @@ -33,9 +33,13 @@ from . import ( entity_registry, floor_registry, ) +from .typing import VolSchemaType _LOGGER = logging.getLogger(__name__) type _SlotsType = dict[str, Any] +type _IntentSlotsType = dict[ + str | tuple[str, str], VolSchemaType | Callable[[Any], Any] +] INTENT_TURN_OFF = "HassTurnOff" INTENT_TURN_ON = "HassTurnOn" @@ -50,6 +54,8 @@ INTENT_DECREASE_TIMER = "HassDecreaseTimer" INTENT_PAUSE_TIMER = "HassPauseTimer" INTENT_UNPAUSE_TIMER = "HassUnpauseTimer" INTENT_TIMER_STATUS = "HassTimerStatus" +INTENT_GET_CURRENT_DATE = "HassGetCurrentDate" +INTENT_GET_CURRENT_TIME = "HassGetCurrentTime" SLOT_SCHEMA = vol.Schema({}, extra=vol.ALLOW_EXTRA) @@ -352,7 +358,7 @@ class MatchTargetsCandidate: matched_name: str | None = None -def _find_areas( +def find_areas( name: str, areas: area_registry.AreaRegistry ) -> Iterable[area_registry.AreaEntry]: """Find all areas matching a name (including aliases).""" @@ -372,7 +378,7 @@ def _find_areas( break -def _find_floors( +def find_floors( name: str, floors: floor_registry.FloorRegistry ) -> Iterable[floor_registry.FloorEntry]: """Find all floors matching a name (including aliases).""" @@ -530,7 +536,7 @@ def async_match_targets( # noqa: C901 if not states: return MatchTargetsResult(False, MatchFailedReason.STATE) - # Exit early so we can to avoid registry lookups + # Exit early so we can avoid registry lookups if not ( constraints.name or constraints.features @@ -580,7 +586,7 @@ def async_match_targets( # noqa: C901 if constraints.floor_name: # Filter by areas associated with floor fr = floor_registry.async_get(hass) - targeted_floors = list(_find_floors(constraints.floor_name, fr)) + targeted_floors = list(find_floors(constraints.floor_name, fr)) if not targeted_floors: return MatchTargetsResult( False, @@ -609,7 +615,7 @@ def async_match_targets( # noqa: C901 possible_area_ids = {area.id for area in ar.async_list_areas()} if constraints.area_name: - targeted_areas = list(_find_areas(constraints.area_name, ar)) + targeted_areas = list(find_areas(constraints.area_name, ar)) if not targeted_areas: return MatchTargetsResult( False, @@ -807,13 +813,14 @@ class DynamicServiceIntentHandler(IntentHandler): self, intent_type: str, speech: str | None = None, - required_slots: dict[str | tuple[str, str], vol.Schema] | None = None, - optional_slots: dict[str | tuple[str, str], vol.Schema] | None = None, + required_slots: _IntentSlotsType | None = None, + optional_slots: _IntentSlotsType | None = None, required_domains: set[str] | None = None, required_features: int | None = None, required_states: set[str] | None = None, description: str | None = None, platforms: set[str] | None = None, + device_classes: set[type[StrEnum]] | None = None, ) -> None: """Create Service Intent Handler.""" self.intent_type = intent_type @@ -823,8 +830,9 @@ class DynamicServiceIntentHandler(IntentHandler): self.required_states = required_states self.description = description self.platforms = platforms + self.device_classes = device_classes - self.required_slots: dict[tuple[str, str], vol.Schema] = {} + self.required_slots: _IntentSlotsType = {} if required_slots: for key, value_schema in required_slots.items(): if isinstance(key, str): @@ -833,7 +841,7 @@ class DynamicServiceIntentHandler(IntentHandler): self.required_slots[key] = value_schema - self.optional_slots: dict[tuple[str, str], vol.Schema] = {} + self.optional_slots: _IntentSlotsType = {} if optional_slots: for key, value_schema in optional_slots.items(): if isinstance(key, str): @@ -845,13 +853,38 @@ class DynamicServiceIntentHandler(IntentHandler): @cached_property def slot_schema(self) -> dict: """Return a slot schema.""" + domain_validator = ( + vol.In(list(self.required_domains)) if self.required_domains else cv.string + ) slot_schema = { vol.Any("name", "area", "floor"): non_empty_string, - vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), - vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), - vol.Optional("preferred_area_id"): cv.string, - vol.Optional("preferred_floor_id"): cv.string, + vol.Optional("domain"): vol.All(cv.ensure_list, [domain_validator]), } + if self.device_classes: + # The typical way to match enums is with vol.Coerce, but we build a + # flat list to make the API simpler to describe programmatically + flattened_device_classes = vol.In( + [ + device_class.value + for device_class_enum in self.device_classes + for device_class in device_class_enum + ] + ) + slot_schema.update( + { + vol.Optional("device_class"): vol.All( + cv.ensure_list, + [flattened_device_classes], + ) + } + ) + + slot_schema.update( + { + vol.Optional("preferred_area_id"): cv.string, + vol.Optional("preferred_floor_id"): cv.string, + } + ) if self.required_slots: slot_schema.update( @@ -904,9 +937,6 @@ class DynamicServiceIntentHandler(IntentHandler): if "domain" in slots: domains = set(slots["domain"]["value"]) - if self.required_domains: - # Must be a subset of intent's required domain(s) - domains.intersection_update(self.required_domains) if "device_class" in slots: device_classes = set(slots["device_class"]["value"]) @@ -1107,13 +1137,14 @@ class ServiceIntentHandler(DynamicServiceIntentHandler): domain: str, service: str, speech: str | None = None, - required_slots: dict[str | tuple[str, str], vol.Schema] | None = None, - optional_slots: dict[str | tuple[str, str], vol.Schema] | None = None, + required_slots: _IntentSlotsType | None = None, + optional_slots: _IntentSlotsType | None = None, required_domains: set[str] | None = None, required_features: int | None = None, required_states: set[str] | None = None, description: str | None = None, platforms: set[str] | None = None, + device_classes: set[type[StrEnum]] | None = None, ) -> None: """Create service handler.""" super().__init__( @@ -1126,6 +1157,7 @@ class ServiceIntentHandler(DynamicServiceIntentHandler): required_states=required_states, description=description, platforms=platforms, + device_classes=device_classes, ) self.domain = domain self.service = service diff --git a/homeassistant/helpers/json.py b/homeassistant/helpers/json.py index 28b3d509a0c..1145d785ed3 100644 --- a/homeassistant/helpers/json.py +++ b/homeassistant/helpers/json.py @@ -13,13 +13,39 @@ import orjson from homeassistant.util.file import write_utf8_file, write_utf8_file_atomic from homeassistant.util.json import ( # noqa: F401 - JSON_DECODE_EXCEPTIONS, - JSON_ENCODE_EXCEPTIONS, + JSON_DECODE_EXCEPTIONS as _JSON_DECODE_EXCEPTIONS, + JSON_ENCODE_EXCEPTIONS as _JSON_ENCODE_EXCEPTIONS, SerializationError, format_unserializable_data, - json_loads, + json_loads as _json_loads, ) +from .deprecation import ( + DeprecatedConstant, + all_with_deprecated_constants, + check_if_deprecated_constant, + deprecated_function, + dir_with_deprecated_constants, +) + +_DEPRECATED_JSON_DECODE_EXCEPTIONS = DeprecatedConstant( + _JSON_DECODE_EXCEPTIONS, "homeassistant.util.json.JSON_DECODE_EXCEPTIONS", "2025.8" +) +_DEPRECATED_JSON_ENCODE_EXCEPTIONS = DeprecatedConstant( + _JSON_ENCODE_EXCEPTIONS, "homeassistant.util.json.JSON_ENCODE_EXCEPTIONS", "2025.8" +) +json_loads = deprecated_function( + "homeassistant.util.json.json_loads", breaks_in_ha_version="2025.8" +)(_json_loads) + +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) + + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/helpers/label_registry.py b/homeassistant/helpers/label_registry.py index 64e884e1428..1007b17bc5d 100644 --- a/homeassistant/helpers/label_registry.py +++ b/homeassistant/helpers/label_registry.py @@ -5,10 +5,12 @@ from __future__ import annotations from collections.abc import Iterable import dataclasses from dataclasses import dataclass -from typing import Literal, TypedDict +from datetime import datetime +from typing import Any, Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback from homeassistant.util import slugify +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -28,6 +30,7 @@ EVENT_LABEL_REGISTRY_UPDATED: EventType[EventLabelRegistryUpdatedData] = EventTy ) STORAGE_KEY = "core.label_registry" STORAGE_VERSION_MAJOR = 1 +STORAGE_VERSION_MINOR = 2 class _LabelStoreData(TypedDict): @@ -38,6 +41,8 @@ class _LabelStoreData(TypedDict): icon: str | None label_id: str name: str + created_at: str + modified_at: str class LabelRegistryStoreData(TypedDict): @@ -66,6 +71,29 @@ class LabelEntry(NormalizedNameBaseRegistryEntry): icon: str | None = None +class LabelRegistryStore(Store[LabelRegistryStoreData]): + """Store label registry data.""" + + async def _async_migrate_func( + self, + old_major_version: int, + old_minor_version: int, + old_data: dict[str, list[dict[str, Any]]], + ) -> LabelRegistryStoreData: + """Migrate to the new version.""" + if old_major_version > STORAGE_VERSION_MAJOR: + raise ValueError("Can't migrate to future version") + + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and adds created_at and modified_at + created_at = utc_from_timestamp(0).isoformat() + for label in old_data["labels"]: + label["created_at"] = label["modified_at"] = created_at + + return old_data # type: ignore[return-value] + + class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): """Class to hold a registry of labels.""" @@ -75,11 +103,12 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): def __init__(self, hass: HomeAssistant) -> None: """Initialize the label registry.""" self.hass = hass - self._store = Store( + self._store = LabelRegistryStore( hass, STORAGE_VERSION_MAJOR, STORAGE_KEY, atomic_writes=True, + minor_version=STORAGE_VERSION_MINOR, ) @callback @@ -175,7 +204,7 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): ) -> LabelEntry: """Update name of label.""" old = self.labels[label_id] - changes = { + changes: dict[str, Any] = { attr_name: value for attr_name, value in ( ("color", color), @@ -192,8 +221,10 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): if not changes: return old + changes["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("label_registry.async_update") - new = self.labels[label_id] = dataclasses.replace(old, **changes) # type: ignore[arg-type] + new = self.labels[label_id] = dataclasses.replace(old, **changes) self.async_schedule_save() self.hass.bus.async_fire_internal( @@ -221,6 +252,8 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): label_id=label["label_id"], name=label["name"], normalized_name=normalized_name, + created_at=datetime.fromisoformat(label["created_at"]), + modified_at=datetime.fromisoformat(label["modified_at"]), ) self.labels = labels @@ -237,6 +270,8 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): "icon": entry.icon, "label_id": entry.label_id, "name": entry.name, + "created_at": entry.created_at.isoformat(), + "modified_at": entry.modified_at.isoformat(), } for entry in self.labels.values() ] diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 53ec092fda2..e37aa0c532d 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -3,13 +3,16 @@ from __future__ import annotations from abc import ABC, abstractmethod +from collections.abc import Callable from dataclasses import dataclass +from decimal import Decimal from enum import Enum from functools import cache, partial from typing import Any import slugify as unicode_slug import voluptuous as vol +from voluptuous_openapi import UNSUPPORTED, convert from homeassistant.components.climate.intent import INTENT_GET_TEMPERATURE from homeassistant.components.conversation.trace import ( @@ -19,22 +22,39 @@ from homeassistant.components.conversation.trace import ( from homeassistant.components.cover.intent import INTENT_CLOSE_COVER, INTENT_OPEN_COVER from homeassistant.components.homeassistant.exposed_entities import async_should_expose from homeassistant.components.intent import async_device_supports_timers +from homeassistant.components.script import ATTR_VARIABLES, DOMAIN as SCRIPT_DOMAIN from homeassistant.components.weather.intent import INTENT_GET_WEATHER -from homeassistant.core import Context, HomeAssistant, callback +from homeassistant.const import ( + ATTR_DOMAIN, + ATTR_ENTITY_ID, + ATTR_SERVICE, + EVENT_HOMEASSISTANT_CLOSE, + EVENT_SERVICE_REMOVED, + SERVICE_TURN_ON, +) +from homeassistant.core import Context, Event, HomeAssistant, callback, split_entity_id from homeassistant.exceptions import HomeAssistantError from homeassistant.util import yaml +from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import JsonObjectType from . import ( area_registry as ar, + config_validation as cv, device_registry as dr, entity_registry as er, floor_registry as fr, intent, + selector, service, ) from .singleton import singleton +SCRIPT_PARAMETERS_CACHE: HassKey[dict[str, tuple[str | None, vol.Schema]]] = HassKey( + "llm_script_parameters_cache" +) + + LLM_API_ASSIST = "assist" BASE_PROMPT = ( @@ -50,11 +70,11 @@ Answer in plain text. Keep it simple and to the point. @callback def async_render_no_api_prompt(hass: HomeAssistant) -> str: - """Return the prompt to be used when no API is configured.""" - return ( - "Only if the user wants to control a device, tell them to edit the AI configuration " - "and allow access to Home Assistant." - ) + """Return the prompt to be used when no API is configured. + + No longer used since Home Assistant 2024.7. + """ + return "" @singleton("llm") @@ -142,11 +162,12 @@ class APIInstance: api_prompt: str llm_context: LLMContext tools: list[Tool] + custom_serializer: Callable[[Any], Any] | None = None async def async_call_tool(self, tool_input: ToolInput) -> JsonObjectType: """Call a LLM tool, validate args and return the response.""" async_conversation_trace_append( - ConversationTraceEventType.LLM_TOOL_CALL, + ConversationTraceEventType.TOOL_CALL, {"tool_name": tool_input.tool_name, "tool_args": tool_input.tool_args}, ) @@ -256,6 +277,8 @@ class AssistAPI(API): intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND, intent.INTENT_TOGGLE, + intent.INTENT_GET_CURRENT_DATE, + intent.INTENT_GET_CURRENT_TIME, } def __init__(self, hass: HomeAssistant) -> None: @@ -283,6 +306,7 @@ class AssistAPI(API): api_prompt=self._async_get_api_prompt(llm_context, exposed_entities), llm_context=llm_context, tools=self._async_get_tools(llm_context, exposed_entities), + custom_serializer=_selector_serializer, ) @callback @@ -300,8 +324,7 @@ class AssistAPI(API): ( "When controlling Home Assistant always call the intent tools. " "Use HassTurnOn to lock and HassTurnOff to unlock a lock. " - "When controlling a device, prefer passing just its name and its domain " - "(what comes before the dot in its entity id). " + "When controlling a device, prefer passing just name and domain. " "When controlling an area, prefer passing just area name and domain." ) ] @@ -333,13 +356,13 @@ class AssistAPI(API): if not llm_context.device_id or not async_device_supports_timers( self.hass, llm_context.device_id ): - prompt.append("This device does not support timers.") + prompt.append("This device is not able to start timers.") if exposed_entities: prompt.append( "An overview of the areas and the devices in this smart home:" ) - prompt.append(yaml.dump(exposed_entities)) + prompt.append(yaml.dump(list(exposed_entities.values()))) return "\n".join(prompt) @@ -371,7 +394,7 @@ class AssistAPI(API): exposed_domains: set[str] | None = None if exposed_entities is not None: exposed_domains = { - entity_id.split(".")[0] for entity_id in exposed_entities + split_entity_id(entity_id)[0] for entity_id in exposed_entities } intent_handlers = [ intent_handler @@ -380,11 +403,22 @@ class AssistAPI(API): or intent_handler.platforms & exposed_domains ] - return [ + tools: list[Tool] = [ IntentTool(self.cached_slugify(intent_handler.intent_type), intent_handler) for intent_handler in intent_handlers ] + if llm_context.assistant is not None: + for state in self.hass.states.async_all(SCRIPT_DOMAIN): + if not async_should_expose( + self.hass, llm_context.assistant, state.entity_id + ): + continue + + tools.append(ScriptTool(self.hass, state.entity_id)) + + return tools + def _get_exposed_entities( hass: HomeAssistant, assistant: str @@ -412,13 +446,15 @@ def _get_exposed_entities( entities = {} for state in hass.states.async_all(): + if state.domain == SCRIPT_DOMAIN: + continue + if not async_should_expose(hass, assistant, state.entity_id): continue entity_entry = entity_registry.async_get(state.entity_id) names = [state.name] area_names = [] - description: str | None = None if entity_entry is not None: names.extend(entity_entry.aliases) @@ -438,30 +474,19 @@ def _get_exposed_entities( area_names.append(area.name) area_names.extend(area.aliases) - if ( - state.domain == "script" - and entity_entry.unique_id - and ( - service_desc := service.async_get_cached_service_description( - hass, "script", entity_entry.unique_id - ) - ) - ): - description = service_desc.get("description") - info: dict[str, Any] = { "names": ", ".join(names), + "domain": state.domain, "state": state.state, } - if description: - info["description"] = description - if area_names: info["areas"] = ", ".join(area_names) if attributes := { - attr_name: str(attr_value) if isinstance(attr_value, Enum) else attr_value + attr_name: str(attr_value) + if isinstance(attr_value, (Enum, Decimal, int)) + else attr_value for attr_name, attr_value in state.attributes.items() if attr_name in interesting_attributes }: @@ -470,3 +495,248 @@ def _get_exposed_entities( entities[state.entity_id] = info return entities + + +def _selector_serializer(schema: Any) -> Any: # noqa: C901 + """Convert selectors into OpenAPI schema.""" + if not isinstance(schema, selector.Selector): + return UNSUPPORTED + + if isinstance(schema, selector.BackupLocationSelector): + return {"type": "string", "pattern": "^(?:\\/backup|\\w+)$"} + + if isinstance(schema, selector.BooleanSelector): + return {"type": "boolean"} + + if isinstance(schema, selector.ColorRGBSelector): + return { + "type": "array", + "items": {"type": "number"}, + "minItems": 3, + "maxItems": 3, + "format": "RGB", + } + + if isinstance(schema, selector.ConditionSelector): + return convert(cv.CONDITIONS_SCHEMA) + + if isinstance(schema, selector.ConstantSelector): + return convert(vol.Schema(schema.config["value"])) + + result: dict[str, Any] + if isinstance(schema, selector.ColorTempSelector): + result = {"type": "number"} + if "min" in schema.config: + result["minimum"] = schema.config["min"] + elif "min_mireds" in schema.config: + result["minimum"] = schema.config["min_mireds"] + if "max" in schema.config: + result["maximum"] = schema.config["max"] + elif "max_mireds" in schema.config: + result["maximum"] = schema.config["max_mireds"] + return result + + if isinstance(schema, selector.CountrySelector): + if schema.config.get("countries"): + return {"type": "string", "enum": schema.config["countries"]} + return {"type": "string", "format": "ISO 3166-1 alpha-2"} + + if isinstance(schema, selector.DateSelector): + return {"type": "string", "format": "date"} + + if isinstance(schema, selector.DateTimeSelector): + return {"type": "string", "format": "date-time"} + + if isinstance(schema, selector.DurationSelector): + return convert(cv.time_period_dict) + + if isinstance(schema, selector.EntitySelector): + if schema.config.get("multiple"): + return {"type": "array", "items": {"type": "string", "format": "entity_id"}} + + return {"type": "string", "format": "entity_id"} + + if isinstance(schema, selector.LanguageSelector): + if schema.config.get("languages"): + return {"type": "string", "enum": schema.config["languages"]} + return {"type": "string", "format": "RFC 5646"} + + if isinstance(schema, (selector.LocationSelector, selector.MediaSelector)): + return convert(schema.DATA_SCHEMA) + + if isinstance(schema, selector.NumberSelector): + result = {"type": "number"} + if "min" in schema.config: + result["minimum"] = schema.config["min"] + if "max" in schema.config: + result["maximum"] = schema.config["max"] + return result + + if isinstance(schema, selector.ObjectSelector): + return {"type": "object", "additionalProperties": True} + + if isinstance(schema, selector.SelectSelector): + options = [ + x["value"] if isinstance(x, dict) else x for x in schema.config["options"] + ] + if schema.config.get("multiple"): + return { + "type": "array", + "items": {"type": "string", "enum": options}, + "uniqueItems": True, + } + return {"type": "string", "enum": options} + + if isinstance(schema, selector.TargetSelector): + return convert(cv.TARGET_SERVICE_FIELDS) + + if isinstance(schema, selector.TemplateSelector): + return {"type": "string", "format": "jinja2"} + + if isinstance(schema, selector.TimeSelector): + return {"type": "string", "format": "time"} + + if isinstance(schema, selector.TriggerSelector): + return convert(cv.TRIGGER_SCHEMA) + + if schema.config.get("multiple"): + return {"type": "array", "items": {"type": "string"}} + + return {"type": "string"} + + +class ScriptTool(Tool): + """LLM Tool representing a Script.""" + + def __init__( + self, + hass: HomeAssistant, + script_entity_id: str, + ) -> None: + """Init the class.""" + entity_registry = er.async_get(hass) + + self.name = split_entity_id(script_entity_id)[1] + if self.name[0].isdigit(): + self.name = "_" + self.name + self._entity_id = script_entity_id + self.parameters = vol.Schema({}) + entity_entry = entity_registry.async_get(script_entity_id) + if entity_entry and entity_entry.unique_id: + parameters_cache = hass.data.get(SCRIPT_PARAMETERS_CACHE) + + if parameters_cache is None: + parameters_cache = hass.data[SCRIPT_PARAMETERS_CACHE] = {} + + @callback + def clear_cache(event: Event) -> None: + """Clear script parameter cache on script reload or delete.""" + if ( + event.data[ATTR_DOMAIN] == SCRIPT_DOMAIN + and event.data[ATTR_SERVICE] in parameters_cache + ): + parameters_cache.pop(event.data[ATTR_SERVICE]) + + cancel = hass.bus.async_listen(EVENT_SERVICE_REMOVED, clear_cache) + + @callback + def on_homeassistant_close(event: Event) -> None: + """Cleanup.""" + cancel() + + hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_CLOSE, on_homeassistant_close + ) + + if entity_entry.unique_id in parameters_cache: + self.description, self.parameters = parameters_cache[ + entity_entry.unique_id + ] + return + + if service_desc := service.async_get_cached_service_description( + hass, SCRIPT_DOMAIN, entity_entry.unique_id + ): + self.description = service_desc.get("description") + schema: dict[vol.Marker, Any] = {} + fields = service_desc.get("fields", {}) + + for field, config in fields.items(): + description = config.get("description") + if not description: + description = config.get("name") + key: vol.Marker + if config.get("required"): + key = vol.Required(field, description=description) + else: + key = vol.Optional(field, description=description) + if "selector" in config: + schema[key] = selector.selector(config["selector"]) + else: + schema[key] = cv.string + + self.parameters = vol.Schema(schema) + + aliases: list[str] = [] + if entity_entry.name: + aliases.append(entity_entry.name) + if entity_entry.aliases: + aliases.extend(entity_entry.aliases) + if aliases: + if self.description: + self.description = ( + self.description + ". Aliases: " + str(list(aliases)) + ) + else: + self.description = "Aliases: " + str(list(aliases)) + + parameters_cache[entity_entry.unique_id] = ( + self.description, + self.parameters, + ) + + async def async_call( + self, hass: HomeAssistant, tool_input: ToolInput, llm_context: LLMContext + ) -> JsonObjectType: + """Run the script.""" + + for field, validator in self.parameters.schema.items(): + if field not in tool_input.tool_args: + continue + if isinstance(validator, selector.AreaSelector): + area_reg = ar.async_get(hass) + if validator.config.get("multiple"): + areas: list[ar.AreaEntry] = [] + for area in tool_input.tool_args[field]: + areas.extend(intent.find_areas(area, area_reg)) + tool_input.tool_args[field] = list({area.id for area in areas}) + else: + area = tool_input.tool_args[field] + area = list(intent.find_areas(area, area_reg))[0].id + tool_input.tool_args[field] = area + + elif isinstance(validator, selector.FloorSelector): + floor_reg = fr.async_get(hass) + if validator.config.get("multiple"): + floors: list[fr.FloorEntry] = [] + for floor in tool_input.tool_args[field]: + floors.extend(intent.find_floors(floor, floor_reg)) + tool_input.tool_args[field] = list( + {floor.floor_id for floor in floors} + ) + else: + floor = tool_input.tool_args[field] + floor = list(intent.find_floors(floor, floor_reg))[0].floor_id + tool_input.tool_args[field] = floor + + await hass.services.async_call( + SCRIPT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: self._entity_id, + ATTR_VARIABLES: tool_input.tool_args, + }, + context=llm_context.context, + ) + + return {"success": True} diff --git a/homeassistant/helpers/normalized_name_base_registry.py b/homeassistant/helpers/normalized_name_base_registry.py index 1cffac9ffc5..7e7ca9ed884 100644 --- a/homeassistant/helpers/normalized_name_base_registry.py +++ b/homeassistant/helpers/normalized_name_base_registry.py @@ -1,8 +1,11 @@ """Provide a base class for registries that use a normalized name index.""" -from dataclasses import dataclass +from dataclasses import dataclass, field +from datetime import datetime from functools import lru_cache +from homeassistant.util import dt as dt_util + from .registry import BaseRegistryItems @@ -12,6 +15,8 @@ class NormalizedNameBaseRegistryEntry: name: str normalized_name: str + created_at: datetime = field(default_factory=dt_util.utcnow) + modified_at: datetime = field(default_factory=dt_util.utcnow) @lru_cache(maxsize=1024) diff --git a/homeassistant/helpers/recorder.py b/homeassistant/helpers/recorder.py index 6155fc9b320..59604944eeb 100644 --- a/homeassistant/helpers/recorder.py +++ b/homeassistant/helpers/recorder.py @@ -3,13 +3,25 @@ from __future__ import annotations import asyncio +from collections.abc import Callable, Generator +from contextlib import contextmanager from dataclasses import dataclass, field -from typing import Any +import functools +import logging +from typing import TYPE_CHECKING, Any from homeassistant.core import HomeAssistant, callback from homeassistant.util.hass_dict import HassKey +if TYPE_CHECKING: + from sqlalchemy.orm.session import Session + + from homeassistant.components.recorder import Recorder + +_LOGGER = logging.getLogger(__name__) + DOMAIN: HassKey[RecorderData] = HassKey("recorder") +DATA_INSTANCE: HassKey[Recorder] = HassKey("recorder_instance") @dataclass(slots=True) @@ -20,20 +32,32 @@ class RecorderData: db_connected: asyncio.Future[bool] = field(default_factory=asyncio.Future) +@callback def async_migration_in_progress(hass: HomeAssistant) -> bool: """Check to see if a recorder migration is in progress.""" - if "recorder" not in hass.config.components: - return False # pylint: disable-next=import-outside-toplevel from homeassistant.components import recorder return recorder.util.async_migration_in_progress(hass) +@callback +def async_migration_is_live(hass: HomeAssistant) -> bool: + """Check to see if a recorder migration is live.""" + # pylint: disable-next=import-outside-toplevel + from homeassistant.components import recorder + + return recorder.util.async_migration_is_live(hass) + + @callback def async_initialize_recorder(hass: HomeAssistant) -> None: """Initialize recorder data.""" + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.recorder.basic_websocket_api import async_setup + hass.data[DOMAIN] = RecorderData() + async_setup(hass) async def async_wait_recorder(hass: HomeAssistant) -> bool: @@ -44,3 +68,45 @@ async def async_wait_recorder(hass: HomeAssistant) -> bool: if DOMAIN not in hass.data: return False return await hass.data[DOMAIN].db_connected + + +@functools.lru_cache(maxsize=1) +def get_instance(hass: HomeAssistant) -> Recorder: + """Get the recorder instance.""" + return hass.data[DATA_INSTANCE] + + +@contextmanager +def session_scope( + *, + hass: HomeAssistant | None = None, + session: Session | None = None, + exception_filter: Callable[[Exception], bool] | None = None, + read_only: bool = False, +) -> Generator[Session]: + """Provide a transactional scope around a series of operations. + + read_only is used to indicate that the session is only used for reading + data and that no commit is required. It does not prevent the session + from writing and is not a security measure. + """ + if session is None and hass is not None: + session = get_instance(hass).get_session() + + if session is None: + raise RuntimeError("Session required") + + need_rollback = False + try: + yield session + if not read_only and session.get_transaction(): + need_rollback = True + session.commit() + except Exception as err: + _LOGGER.exception("Error executing query") + if need_rollback: + session.rollback() + if not exception_filter or not exception_filter(err): + raise + finally: + session.close() diff --git a/homeassistant/helpers/schema_config_entry_flow.py b/homeassistant/helpers/schema_config_entry_flow.py index 05e4a852ad9..7463c9945b2 100644 --- a/homeassistant/helpers/schema_config_entry_flow.py +++ b/homeassistant/helpers/schema_config_entry_flow.py @@ -175,7 +175,9 @@ class SchemaCommonFlowHandler: and key.default is not vol.UNDEFINED and key not in self._options ): - user_input[str(key.schema)] = key.default() + user_input[str(key.schema)] = cast( + Callable[[], Any], key.default + )() if user_input is not None and form_step.validate_user_input is not None: # Do extra validation of user input @@ -215,7 +217,7 @@ class SchemaCommonFlowHandler: ) ): # Key not present, delete keys old value (if present) too - values.pop(key, None) + values.pop(key.schema, None) async def _show_next_step_or_create_entry( self, form_step: SchemaFlowFormStep @@ -491,7 +493,7 @@ def wrapped_entity_config_entry_title( def entity_selector_without_own_entities( handler: SchemaOptionsFlowHandler, entity_selector_config: selector.EntitySelectorConfig, -) -> vol.Schema: +) -> selector.EntitySelector: """Return an entity selector which excludes own entities.""" entity_registry = er.async_get(handler.hass) entities = er.async_entries_for_config_entry( diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index 84dabb114cd..26a9b6e069e 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Mapping, Sequence +from collections.abc import AsyncGenerator, Callable, Mapping, Sequence from contextlib import asynccontextmanager from contextvars import ContextVar from copy import copy @@ -13,10 +13,9 @@ from functools import cached_property, partial import itertools import logging from types import MappingProxyType -from typing import Any, Literal, TypedDict, cast +from typing import Any, Literal, TypedDict, cast, overload import async_interrupt -from typing_extensions import AsyncGenerator import voluptuous as vol from homeassistant import exceptions @@ -76,6 +75,7 @@ from homeassistant.core import ( HassJob, HomeAssistant, ServiceResponse, + State, SupportsResponse, callback, ) @@ -108,9 +108,7 @@ from .trace import ( trace_update_result, ) from .trigger import async_initialize_triggers, async_validate_trigger_config -from .typing import UNDEFINED, ConfigType, UndefinedType - -# mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs +from .typing import UNDEFINED, ConfigType, TemplateVarsType, UndefinedType SCRIPT_MODE_PARALLEL = "parallel" SCRIPT_MODE_QUEUED = "queued" @@ -178,7 +176,7 @@ def _set_result_unless_done(future: asyncio.Future[None]) -> None: future.set_result(None) -def action_trace_append(variables, path): +def action_trace_append(variables: dict[str, Any], path: str) -> TraceElement: """Append a TraceElement to trace[path].""" trace_element = TraceElement(variables, path) trace_append_element(trace_element, ACTION_TRACE_NODE_MAX_LEN) @@ -431,7 +429,7 @@ class _ScriptRun: if not self._stop.done(): self._script._changed() # noqa: SLF001 - async def _async_get_condition(self, config): + async def _async_get_condition(self, config: ConfigType) -> ConditionCheckerType: return await self._script._async_get_condition(config) # noqa: SLF001 def _log( @@ -439,7 +437,7 @@ class _ScriptRun: ) -> None: self._script._log(msg, *args, level=level, **kwargs) # noqa: SLF001 - def _step_log(self, default_message, timeout=None): + def _step_log(self, default_message: str, timeout: float | None = None) -> None: self._script.last_action = self._action.get(CONF_ALIAS, default_message) _timeout = ( "" if timeout is None else f" (timeout: {timedelta(seconds=timeout)})" @@ -581,7 +579,7 @@ class _ScriptRun: if not isinstance(exception, exceptions.HomeAssistantError): raise exception - def _log_exception(self, exception): + def _log_exception(self, exception: Exception) -> None: action_type = cv.determine_script_action(self._action) error = str(exception) @@ -630,7 +628,7 @@ class _ScriptRun: ) raise _AbortScript from ex - async def _async_delay_step(self): + async def _async_delay_step(self) -> None: """Handle delay.""" delay_delta = self._get_pos_time_period_template(CONF_DELAY) @@ -662,7 +660,7 @@ class _ScriptRun: return self._get_pos_time_period_template(CONF_TIMEOUT).total_seconds() return None - async def _async_wait_template_step(self): + async def _async_wait_template_step(self) -> None: """Handle a wait template.""" timeout = self._get_timeout_seconds_from_action() self._step_log("wait template", timeout) @@ -671,7 +669,6 @@ class _ScriptRun: trace_set_result(wait=self._variables["wait"]) wait_template = self._action[CONF_WAIT_TEMPLATE] - wait_template.hass = self._hass # check if condition already okay if condition.async_template(self._hass, wait_template, self._variables, False): @@ -691,7 +688,9 @@ class _ScriptRun: futures.append(done) @callback - def async_script_wait(entity_id, from_s, to_s): + def async_script_wait( + entity_id: str, from_s: State | None, to_s: State | None + ) -> None: """Handle script after template condition is true.""" self._async_set_remaining_time_var(timeout_handle) self._variables["wait"]["completed"] = True @@ -728,7 +727,7 @@ class _ScriptRun: except ScriptStoppedError as ex: raise asyncio.CancelledError from ex - async def _async_call_service_step(self): + async def _async_call_service_step(self) -> None: """Call the service specified in the action.""" self._step_log("call service") @@ -775,14 +774,14 @@ class _ScriptRun: if response_variable: self._variables[response_variable] = response_data - async def _async_device_step(self): + async def _async_device_step(self) -> None: """Perform the device automation specified in the action.""" self._step_log("device automation") await device_action.async_call_action_from_config( self._hass, self._action, self._variables, self._context ) - async def _async_scene_step(self): + async def _async_scene_step(self) -> None: """Activate the scene specified in the action.""" self._step_log("activate scene") trace_set_result(scene=self._action[CONF_SCENE]) @@ -794,7 +793,7 @@ class _ScriptRun: context=self._context, ) - async def _async_event_step(self): + async def _async_event_step(self) -> None: """Fire an event.""" self._step_log(self._action.get(CONF_ALIAS, self._action[CONF_EVENT])) event_data = {} @@ -816,7 +815,7 @@ class _ScriptRun: self._action[CONF_EVENT], event_data, context=self._context ) - async def _async_condition_step(self): + async def _async_condition_step(self) -> None: """Test if condition is matching.""" self._script.last_action = self._action.get( CONF_ALIAS, self._action[CONF_CONDITION] @@ -836,12 +835,19 @@ class _ScriptRun: if not check: raise _ConditionFail - def _test_conditions(self, conditions, name, condition_path=None): + def _test_conditions( + self, + conditions: list[ConditionCheckerType], + name: str, + condition_path: str | None = None, + ) -> bool | None: if condition_path is None: condition_path = name @trace_condition_function - def traced_test_conditions(hass, variables): + def traced_test_conditions( + hass: HomeAssistant, variables: TemplateVarsType + ) -> bool | None: try: with trace_path(condition_path): for idx, cond in enumerate(conditions): @@ -857,7 +863,7 @@ class _ScriptRun: return traced_test_conditions(self._hass, self._variables) @async_trace_path("repeat") - async def _async_repeat_step(self): # noqa: C901 + async def _async_repeat_step(self) -> None: # noqa: C901 """Repeat a sequence.""" description = self._action.get(CONF_ALIAS, "sequence") repeat = self._action[CONF_REPEAT] @@ -877,7 +883,7 @@ class _ScriptRun: script = self._script._get_repeat_script(self._step) # noqa: SLF001 warned_too_many_loops = False - async def async_run_sequence(iteration, extra_msg=""): + async def async_run_sequence(iteration: int, extra_msg: str = "") -> None: self._log("Repeating %s: Iteration %i%s", description, iteration, extra_msg) with trace_path("sequence"): await self._async_run_script(script) @@ -1053,7 +1059,7 @@ class _ScriptRun: """If sequence.""" if_data = await self._script._async_get_if_data(self._step) # noqa: SLF001 - test_conditions = False + test_conditions: bool | None = False try: with trace_path("if"): test_conditions = self._test_conditions( @@ -1073,6 +1079,26 @@ class _ScriptRun: with trace_path("else"): await self._async_run_script(if_data["if_else"]) + @overload + def _async_futures_with_timeout( + self, + timeout: float, + ) -> tuple[ + list[asyncio.Future[None]], + asyncio.TimerHandle, + asyncio.Future[None], + ]: ... + + @overload + def _async_futures_with_timeout( + self, + timeout: None, + ) -> tuple[ + list[asyncio.Future[None]], + None, + None, + ]: ... + def _async_futures_with_timeout( self, timeout: float | None, @@ -1099,7 +1125,7 @@ class _ScriptRun: futures.append(timeout_future) return futures, timeout_handle, timeout_future - async def _async_wait_for_trigger_step(self): + async def _async_wait_for_trigger_step(self) -> None: """Wait for a trigger event.""" timeout = self._get_timeout_seconds_from_action() @@ -1120,12 +1146,14 @@ class _ScriptRun: done = self._hass.loop.create_future() futures.append(done) - async def async_done(variables, context=None): + async def async_done( + variables: dict[str, Any], context: Context | None = None + ) -> None: self._async_set_remaining_time_var(timeout_handle) self._variables["wait"]["trigger"] = variables["trigger"] _set_result_unless_done(done) - def log_cb(level, msg, **kwargs): + def log_cb(level: int, msg: str, **kwargs: Any) -> None: self._log(msg, level=level, **kwargs) remove_triggers = await async_initialize_triggers( @@ -1169,14 +1197,14 @@ class _ScriptRun: unsub() - async def _async_variables_step(self): + async def _async_variables_step(self) -> None: """Set a variable value.""" self._step_log("setting variables") self._variables = self._action[CONF_VARIABLES].async_render( self._hass, self._variables, render_as_defaults=False ) - async def _async_set_conversation_response_step(self): + async def _async_set_conversation_response_step(self) -> None: """Set conversation response.""" self._step_log("setting conversation response") resp: template.Template | None = self._action[CONF_SET_CONVERSATION_RESPONSE] @@ -1188,7 +1216,7 @@ class _ScriptRun: ) trace_set_result(conversation_response=self._conversation_response) - async def _async_stop_step(self): + async def _async_stop_step(self) -> None: """Stop script execution.""" stop = self._action[CONF_STOP] error = self._action.get(CONF_ERROR, False) @@ -1321,7 +1349,7 @@ async def _async_stop_scripts_at_shutdown(hass: HomeAssistant, event: Event) -> ) -type _VarsType = dict[str, Any] | MappingProxyType +type _VarsType = dict[str, Any] | MappingProxyType[str, Any] def _referenced_extract_ids(data: Any, key: str, found: set[str]) -> None: @@ -1359,7 +1387,7 @@ class ScriptRunResult: conversation_response: str | None | UndefinedType service_response: ServiceResponse - variables: dict + variables: dict[str, Any] class Script: @@ -1400,7 +1428,6 @@ class Script: self._hass = hass self.sequence = sequence - template.attach(hass, self.sequence) self.name = name self.unique_id = f"{domain}.{name}-{id(self)}" self.domain = domain @@ -1414,7 +1441,7 @@ class Script: self._set_logger(logger) self._log_exceptions = log_exceptions - self.last_action = None + self.last_action: str | None = None self.last_triggered: datetime | None = None self._runs: list[_ScriptRun] = [] @@ -1422,7 +1449,7 @@ class Script: self._max_exceeded = max_exceeded if script_mode == SCRIPT_MODE_QUEUED: self._queue_lck = asyncio.Lock() - self._config_cache: dict[set[tuple], Callable[..., bool]] = {} + self._config_cache: dict[frozenset[tuple[str, str]], ConditionCheckerType] = {} self._repeat_script: dict[int, Script] = {} self._choose_data: dict[int, _ChooseData] = {} self._if_data: dict[int, _IfData] = {} @@ -1430,8 +1457,6 @@ class Script: self._sequence_scripts: dict[int, Script] = {} self.variables = variables self._variables_dynamic = template.is_complex(variables) - if self._variables_dynamic: - template.attach(hass, variables) self._copy_variables_on_run = copy_variables @property @@ -1715,9 +1740,11 @@ class Script: variables["context"] = context elif self._copy_variables_on_run: - variables = cast(dict, copy(run_variables)) + # This is not the top level script, variables have been turned to a dict + variables = cast(dict[str, Any], copy(run_variables)) else: - variables = cast(dict, run_variables) + # This is not the top level script, variables have been turned to a dict + variables = cast(dict[str, Any], run_variables) # Prevent non-allowed recursive calls which will cause deadlocks when we try to # stop (restart) or wait for (queued) our own script run. @@ -1746,9 +1773,7 @@ class Script: cls = _ScriptRun else: cls = _QueuedScriptRun - run = cls( - self._hass, self, cast(dict, variables), context, self._log_exceptions - ) + run = cls(self._hass, self, variables, context, self._log_exceptions) has_existing_runs = bool(self._runs) self._runs.append(run) if self.script_mode == SCRIPT_MODE_RESTART and has_existing_runs: @@ -1773,7 +1798,9 @@ class Script: self._changed() raise - async def _async_stop(self, aws: list[asyncio.Task], update_state: bool) -> None: + async def _async_stop( + self, aws: list[asyncio.Task[None]], update_state: bool + ) -> None: await asyncio.wait(aws) if update_state: self._changed() @@ -1792,11 +1819,8 @@ class Script: return await asyncio.shield(create_eager_task(self._async_stop(aws, update_state))) - async def _async_get_condition(self, config): - if isinstance(config, template.Template): - config_cache_key = config.template - else: - config_cache_key = frozenset((k, str(v)) for k, v in config.items()) + async def _async_get_condition(self, config: ConfigType) -> ConditionCheckerType: + config_cache_key = frozenset((k, str(v)) for k, v in config.items()) if not (cond := self._config_cache.get(config_cache_key)): cond = await condition.async_from_config(self._hass, config) self._config_cache[config_cache_key] = cond diff --git a/homeassistant/helpers/script_variables.py b/homeassistant/helpers/script_variables.py index 043101b9b86..2b4507abd64 100644 --- a/homeassistant/helpers/script_variables.py +++ b/homeassistant/helpers/script_variables.py @@ -36,7 +36,6 @@ class ScriptVariables: """ if self._has_template is None: self._has_template = template.is_complex(self.variables) - template.attach(hass, self.variables) if not self._has_template: if render_as_defaults: diff --git a/homeassistant/helpers/selector.py b/homeassistant/helpers/selector.py index 1db4dd9f80b..025b8de8896 100644 --- a/homeassistant/helpers/selector.py +++ b/homeassistant/helpers/selector.py @@ -75,6 +75,13 @@ class Selector[_T: Mapping[str, Any]]: self.config = self.CONFIG_SCHEMA(config) + def __eq__(self, other: object) -> bool: + """Check equality.""" + if not isinstance(other, Selector): + return NotImplemented + + return self.selector_type == other.selector_type and self.config == other.config + def serialize(self) -> dict[str, dict[str, _T]]: """Serialize Selector for voluptuous_serialize.""" return {"selector": {self.selector_type: self.config}} @@ -278,7 +285,7 @@ class AssistPipelineSelector(Selector[AssistPipelineSelectorConfig]): CONFIG_SCHEMA = vol.Schema({}) - def __init__(self, config: AssistPipelineSelectorConfig) -> None: + def __init__(self, config: AssistPipelineSelectorConfig | None = None) -> None: """Instantiate a selector.""" super().__init__(config) @@ -430,10 +437,10 @@ class ColorTempSelector(Selector[ColorTempSelectorConfig]): range_min = self.config.get("min") range_max = self.config.get("max") - if not range_min: + if range_min is None: range_min = self.config.get("min_mireds") - if not range_max: + if range_max is None: range_max = self.config.get("max_mireds") value: int = vol.All( @@ -517,7 +524,7 @@ class ConstantSelector(Selector[ConstantSelectorConfig]): } ) - def __init__(self, config: ConstantSelectorConfig | None = None) -> None: + def __init__(self, config: ConstantSelectorConfig) -> None: """Instantiate a selector.""" super().__init__(config) @@ -560,7 +567,7 @@ class QrCodeSelector(Selector[QrCodeSelectorConfig]): } ) - def __init__(self, config: QrCodeSelectorConfig | None = None) -> None: + def __init__(self, config: QrCodeSelectorConfig) -> None: """Instantiate a selector.""" super().__init__(config) @@ -588,7 +595,7 @@ class ConversationAgentSelector(Selector[ConversationAgentSelectorConfig]): } ) - def __init__(self, config: ConversationAgentSelectorConfig) -> None: + def __init__(self, config: ConversationAgentSelectorConfig | None = None) -> None: """Instantiate a selector.""" super().__init__(config) @@ -718,6 +725,7 @@ class DurationSelectorConfig(TypedDict, total=False): """Class to represent a duration selector config.""" enable_day: bool + enable_millisecond: bool allow_negative: bool @@ -732,6 +740,8 @@ class DurationSelector(Selector[DurationSelectorConfig]): # Enable day field in frontend. A selection with `days` set is allowed # even if `enable_day` is not set vol.Optional("enable_day"): cv.boolean, + # Enable millisecond field in frontend. + vol.Optional("enable_millisecond"): cv.boolean, # Allow negative durations. Will default to False in HA Core 2025.6.0. vol.Optional("allow_negative"): cv.boolean, } @@ -820,7 +830,7 @@ class FloorSelectorConfig(TypedDict, total=False): @SELECTORS.register("floor") -class FloorSelector(Selector[AreaSelectorConfig]): +class FloorSelector(Selector[FloorSelectorConfig]): """Selector of a single or list of floors.""" selector_type = "floor" @@ -934,7 +944,7 @@ class LanguageSelector(Selector[LanguageSelectorConfig]): } ) - def __init__(self, config: LanguageSelectorConfig) -> None: + def __init__(self, config: LanguageSelectorConfig | None = None) -> None: """Instantiate a selector.""" super().__init__(config) @@ -1159,7 +1169,7 @@ class SelectSelector(Selector[SelectSelectorConfig]): } ) - def __init__(self, config: SelectSelectorConfig | None = None) -> None: + def __init__(self, config: SelectSelectorConfig) -> None: """Instantiate a selector.""" super().__init__(config) @@ -1175,7 +1185,7 @@ class SelectSelector(Selector[SelectSelectorConfig]): for option in cast(Sequence[SelectOptionDict], config_options) ] - parent_schema = vol.In(options) + parent_schema: vol.In | vol.Any = vol.In(options) if self.config["custom_value"]: parent_schema = vol.Any(parent_schema, str) @@ -1434,7 +1444,7 @@ class FileSelector(Selector[FileSelectorConfig]): } ) - def __init__(self, config: FileSelectorConfig | None = None) -> None: + def __init__(self, config: FileSelectorConfig) -> None: """Instantiate a selector.""" super().__init__(config) diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index a9959902084..0551b5289c5 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -20,8 +20,8 @@ from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FLOOR_ID, ATTR_LABEL_ID, + CONF_ACTION, CONF_ENTITY_ID, - CONF_SERVICE, CONF_SERVICE_DATA, CONF_SERVICE_DATA_TEMPLATE, CONF_SERVICE_TEMPLATE, @@ -33,6 +33,7 @@ from homeassistant.core import ( Context, EntityServiceResponse, HassJob, + HassJobType, HomeAssistant, ServiceCall, ServiceResponse, @@ -63,7 +64,7 @@ from . import ( ) from .group import expand_entity_ids from .selector import TargetSelector -from .typing import ConfigType, TemplateVarsType +from .typing import ConfigType, TemplateVarsType, VolDictType, VolSchemaType if TYPE_CHECKING: from .entity import Entity @@ -179,10 +180,19 @@ _FIELD_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) +_SECTION_SCHEMA = vol.Schema( + { + vol.Required("fields"): vol.Schema({str: _FIELD_SCHEMA}), + }, + extra=vol.ALLOW_EXTRA, +) + _SERVICE_SCHEMA = vol.Schema( { vol.Optional("target"): vol.Any(TargetSelector.CONFIG_SCHEMA, None), - vol.Optional("fields"): vol.Schema({str: _FIELD_SCHEMA}), + vol.Optional("fields"): vol.Schema( + {str: vol.Any(_SECTION_SCHEMA, _FIELD_SCHEMA)} + ), }, extra=vol.ALLOW_EXTRA, ) @@ -349,14 +359,13 @@ def async_prepare_call_from_config( f"Invalid config for calling service: {ex}" ) from ex - if CONF_SERVICE in config: - domain_service = config[CONF_SERVICE] + if CONF_ACTION in config: + domain_service = config[CONF_ACTION] else: domain_service = config[CONF_SERVICE_TEMPLATE] if isinstance(domain_service, template.Template): try: - domain_service.hass = hass domain_service = domain_service.async_render(variables) domain_service = cv.service(domain_service) except TemplateError as ex: @@ -375,10 +384,8 @@ def async_prepare_call_from_config( conf = config[CONF_TARGET] try: if isinstance(conf, template.Template): - conf.hass = hass target.update(conf.async_render(variables)) else: - template.attach(hass, conf) target.update(template.render_complex(conf, variables)) if CONF_ENTITY_ID in target: @@ -404,7 +411,6 @@ def async_prepare_call_from_config( if conf not in config: continue try: - template.attach(hass, config[conf]) render = template.render_complex(config[conf], variables) if not isinstance(render, dict): raise HomeAssistantError( @@ -1100,7 +1106,7 @@ def async_register_admin_service( domain: str, service: str, service_func: Callable[[ServiceCall], Awaitable[None] | None], - schema: vol.Schema = vol.Schema({}, extra=vol.PREVENT_EXTRA), + schema: VolSchemaType = vol.Schema({}, extra=vol.PREVENT_EXTRA), ) -> None: """Register a service that requires admin access.""" hass.services.async_register( @@ -1235,3 +1241,58 @@ class ReloadServiceHelper[_T]: self._service_running = False self._pending_reload_targets -= reload_targets self._service_condition.notify_all() + + +@callback +def async_register_entity_service( + hass: HomeAssistant, + domain: str, + name: str, + *, + entities: dict[str, Entity], + func: str | Callable[..., Any], + job_type: HassJobType | None, + required_features: Iterable[int] | None = None, + schema: VolDictType | VolSchemaType | None, + supports_response: SupportsResponse = SupportsResponse.NONE, +) -> None: + """Help registering an entity service. + + This is called by EntityComponent.async_register_entity_service and + EntityPlatform.async_register_entity_service and should not be called + directly by integrations. + """ + if schema is None or isinstance(schema, dict): + schema = cv.make_entity_service_schema(schema) + # Do a sanity check to check this is a valid entity service schema, + # the check could be extended to require All/Any to have sub schema(s) + # with all entity service fields + elif ( + # Don't check All/Any + not isinstance(schema, (vol.All, vol.Any)) + # Don't check All/Any wrapped in schema + and not isinstance(schema.schema, (vol.All, vol.Any)) + and any(key not in schema.schema for key in cv.ENTITY_SERVICE_FIELDS) + ): + raise HomeAssistantError( + "The schema does not include all required keys: " + f"{", ".join(str(key) for key in cv.ENTITY_SERVICE_FIELDS)}" + ) + + service_func: str | HassJob[..., Any] + service_func = func if isinstance(func, str) else HassJob(func) + + hass.services.async_register( + domain, + name, + partial( + entity_service_call, + hass, + entities, + service_func, + required_features=required_features, + ), + schema, + supports_response, + job_type=job_type, + ) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 714a57336bd..7742418c5a7 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -6,11 +6,11 @@ from ast import literal_eval import asyncio import base64 import collections.abc -from collections.abc import Callable, Iterable +from collections.abc import Callable, Generator, Iterable from contextlib import AbstractContextManager from contextvars import ContextVar from datetime import date, datetime, time, timedelta -from functools import cache, lru_cache, partial, wraps +from functools import cache, cached_property, lru_cache, partial, wraps import json import logging import math @@ -34,7 +34,6 @@ from jinja2.sandbox import ImmutableSandboxedEnvironment from jinja2.utils import Namespace from lru import LRU import orjson -from typing_extensions import Generator import voluptuous as vol from homeassistant.const import ( @@ -1023,7 +1022,7 @@ class TemplateStateBase(State): return self.state_with_unit raise KeyError - @property + @cached_property def entity_id(self) -> str: # type: ignore[override] """Wrap State.entity_id. @@ -1381,6 +1380,24 @@ def device_attr(hass: HomeAssistant, device_or_entity_id: str, attr_name: str) - return getattr(device, attr_name) +def config_entry_attr( + hass: HomeAssistant, config_entry_id_: str, attr_name: str +) -> Any: + """Get config entry specific attribute.""" + if not isinstance(config_entry_id_, str): + raise TemplateError("Must provide a config entry ID") + + if attr_name not in ("domain", "title", "state", "source", "disabled_by"): + raise TemplateError("Invalid config entry attribute") + + config_entry = hass.config_entries.async_get_entry(config_entry_id_) + + if config_entry is None: + return None + + return getattr(config_entry, attr_name) + + def is_device_attr( hass: HomeAssistant, device_or_entity_id: str, attr_name: str, attr_value: Any ) -> bool: @@ -2310,7 +2327,7 @@ def regex_match(value, find="", ignorecase=False): """Match value using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return bool(_regex_cache(find, flags).match(value)) @@ -2321,7 +2338,7 @@ def regex_replace(value="", find="", replace="", ignorecase=False): """Replace using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return _regex_cache(find, flags).sub(replace, value) @@ -2329,7 +2346,7 @@ def regex_search(value, find="", ignorecase=False): """Search using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return bool(_regex_cache(find, flags).search(value)) @@ -2342,7 +2359,7 @@ def regex_findall(value, find="", ignorecase=False): """Find all matches using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return _regex_cache(find, flags).findall(value) @@ -2868,6 +2885,9 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["device_attr"] = hassfunction(device_attr) self.filters["device_attr"] = self.globals["device_attr"] + self.globals["config_entry_attr"] = hassfunction(config_entry_attr) + self.filters["config_entry_attr"] = self.globals["config_entry_attr"] + self.globals["is_device_attr"] = hassfunction(is_device_attr) self.tests["is_device_attr"] = hassfunction(is_device_attr, pass_eval_context) diff --git a/homeassistant/helpers/trace.py b/homeassistant/helpers/trace.py index 6f29ff23bec..431a7a7d1f8 100644 --- a/homeassistant/helpers/trace.py +++ b/homeassistant/helpers/trace.py @@ -3,14 +3,12 @@ from __future__ import annotations from collections import deque -from collections.abc import Callable, Coroutine +from collections.abc import Callable, Coroutine, Generator from contextlib import contextmanager from contextvars import ContextVar from functools import wraps from typing import Any -from typing_extensions import Generator - from homeassistant.core import ServiceResponse import homeassistant.util.dt as dt_util @@ -36,7 +34,7 @@ class TraceElement: """Container for trace data.""" self._child_key: str | None = None self._child_run_id: str | None = None - self._error: Exception | None = None + self._error: BaseException | None = None self.path: str = path self._result: dict[str, Any] | None = None self.reuse_by_child = False @@ -54,7 +52,7 @@ class TraceElement: self._child_key = child_key self._child_run_id = child_run_id - def set_error(self, ex: Exception) -> None: + def set_error(self, ex: BaseException | None) -> None: """Set error.""" self._error = ex diff --git a/homeassistant/helpers/trigger_template_entity.py b/homeassistant/helpers/trigger_template_entity.py index 7b1c4ab8078..7f8ad41d7bb 100644 --- a/homeassistant/helpers/trigger_template_entity.py +++ b/homeassistant/helpers/trigger_template_entity.py @@ -30,7 +30,7 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads from . import config_validation as cv from .entity import Entity -from .template import attach as template_attach, render_complex +from .template import render_complex from .typing import ConfigType CONF_AVAILABILITY = "availability" @@ -157,11 +157,6 @@ class TriggerBaseEntity(Entity): """Return extra attributes.""" return self._rendered.get(CONF_ATTRIBUTES) - async def async_added_to_hass(self) -> None: - """Handle being added to Home Assistant.""" - await super().async_added_to_hass() - template_attach(self.hass, self._config) - def _set_unique_id(self, unique_id: str | None) -> None: """Set unique id.""" self._unique_id = unique_id diff --git a/homeassistant/helpers/typing.py b/homeassistant/helpers/typing.py index 3cdd9ec9250..65774a0b168 100644 --- a/homeassistant/helpers/typing.py +++ b/homeassistant/helpers/typing.py @@ -5,6 +5,8 @@ from enum import Enum from functools import partial from typing import Any, Never +import voluptuous as vol + from .deprecation import ( DeferredDeprecatedAlias, all_with_deprecated_constants, @@ -19,6 +21,8 @@ type ServiceDataType = dict[str, Any] type StateType = str | int | float | None type TemplateVarsType = Mapping[str, Any] | None type NoEventData = Mapping[str, Never] +type VolSchemaType = vol.Schema | vol.All | vol.Any +type VolDictType = dict[str | vol.Marker, Any] # Custom type for recorder Queries type QueryType = Any diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index 8451c69d2b3..4fe4953d752 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -4,8 +4,9 @@ from __future__ import annotations from abc import abstractmethod import asyncio -from collections.abc import Awaitable, Callable, Coroutine +from collections.abc import Awaitable, Callable, Coroutine, Generator from datetime import datetime, timedelta +from functools import cached_property import logging from random import randint from time import monotonic @@ -14,7 +15,7 @@ import urllib.error import aiohttp import requests -from typing_extensions import Generator, TypeVar +from typing_extensions import TypeVar from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP @@ -70,6 +71,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): name: str, update_interval: timedelta | None = None, update_method: Callable[[], Awaitable[_DataT]] | None = None, + setup_method: Callable[[], Awaitable[None]] | None = None, request_refresh_debouncer: Debouncer[Coroutine[Any, Any, None]] | None = None, always_update: bool = True, ) -> None: @@ -78,6 +80,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): self.logger = logger self.name = name self.update_method = update_method + self.setup_method = setup_method self._update_interval_seconds: float | None = None self.update_interval = update_interval self._shutdown_requested = False @@ -274,15 +277,54 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): fails. Additionally logging is handled by config entry setup to ensure that multiple retries do not cause log spam. """ - await self._async_refresh( - log_failures=False, raise_on_auth_failed=True, raise_on_entry_error=True - ) - if self.last_update_success: - return + if await self.__wrap_async_setup(): + await self._async_refresh( + log_failures=False, raise_on_auth_failed=True, raise_on_entry_error=True + ) + if self.last_update_success: + return ex = ConfigEntryNotReady() ex.__cause__ = self.last_exception raise ex + async def __wrap_async_setup(self) -> bool: + """Error handling for _async_setup.""" + try: + await self._async_setup() + except ( + TimeoutError, + requests.exceptions.Timeout, + aiohttp.ClientError, + requests.exceptions.RequestException, + urllib.error.URLError, + UpdateFailed, + ) as err: + self.last_exception = err + + except (ConfigEntryError, ConfigEntryAuthFailed) as err: + self.last_exception = err + self.last_update_success = False + raise + + except Exception as err: # pylint: disable=broad-except + self.last_exception = err + self.logger.exception("Unexpected error fetching %s data", self.name) + else: + return True + + self.last_update_success = False + return False + + async def _async_setup(self) -> None: + """Set up the coordinator. + + Can be overwritten by integrations to load data or resources + only once during the first refresh. + """ + if self.setup_method is None: + return None + return await self.setup_method() + async def async_refresh(self) -> None: """Refresh data and log errors.""" await self._async_refresh(log_failures=True) @@ -392,7 +434,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): self.logger.debug( "Finished fetching %s data in %.3f seconds (success: %s)", self.name, - monotonic() - start, + monotonic() - start, # pylint: disable=possibly-used-before-assignment self.last_update_success, ) if not auth_failed and self._listeners and not self.hass.is_stopping: @@ -471,7 +513,7 @@ class BaseCoordinatorEntity[ self.coordinator = coordinator self.coordinator_context = context - @property + @cached_property def should_poll(self) -> bool: """No need to poll. Coordinator notifies entity of updates.""" return False diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 9afad610420..90b88ba2109 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -102,6 +102,23 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = { "mydolphin_plus": BlockedIntegration( AwesomeVersion("1.0.13"), "crashes Home Assistant" ), + # Added in 2024.7.2 because of + # https://github.com/gcobb321/icloud3/issues/349 + # Note: Current version 3.0.5.2, the fixed version is a guesstimate, + # as no solution is available at time of writing. + "icloud3": BlockedIntegration( + AwesomeVersion("3.0.5.3"), "prevents recorder from working" + ), + # Added in 2024.7.2 because of + # https://github.com/custom-components/places/issues/289 + "places": BlockedIntegration( + AwesomeVersion("2.7.1"), "prevents recorder from working" + ), + # Added in 2024.7.2 because of + # https://github.com/enkama/hass-variables/issues/120 + "variable": BlockedIntegration( + AwesomeVersion("3.4.4"), "prevents recorder from working" + ), } DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey( @@ -928,7 +945,7 @@ class Integration: except IntegrationNotFound as err: _LOGGER.error( ( - "Unable to resolve dependencies for %s: we are unable to resolve" + "Unable to resolve dependencies for %s: unable to resolve" " (sub)dependency %s" ), self.domain, @@ -937,7 +954,7 @@ class Integration: except CircularDependency as err: _LOGGER.error( ( - "Unable to resolve dependencies for %s: it contains a circular" + "Unable to resolve dependencies for %s: it contains a circular" " dependency: %s -> %s" ), self.domain, diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index edb0f29919d..809331332e4 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -1,66 +1,66 @@ # Automatically generated by gen_requirements_all.py, do not edit -aiodhcpwatcher==1.0.0 +aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohttp-fast-url-dispatcher==0.3.0 -aiohttp-fast-zlib==0.1.0 -aiohttp==3.9.5 +aiohttp-fast-zlib==0.1.1 +aiohttp==3.10.3 aiohttp_cors==0.7.0 -aiozoneinfo==0.2.0 +aiozoneinfo==0.2.1 astral==2.2 -async-interrupt==1.1.1 -async-upnp-client==0.38.3 +async-interrupt==1.1.2 +async-upnp-client==0.40.0 atomicwrites-homeassistant==1.4.1 attrs==23.2.0 -awesomeversion==24.2.0 -bcrypt==4.1.2 +awesomeversion==24.6.0 +bcrypt==4.1.3 bleak-retry-connector==3.5.0 -bleak==0.22.1 -bluetooth-adapters==0.19.2 +bleak==0.22.2 +bluetooth-adapters==0.19.4 bluetooth-auto-recovery==1.4.2 -bluetooth-data-tools==1.19.0 +bluetooth-data-tools==1.19.4 cached_ipaddress==0.3.0 certifi>=2021.5.30 ciso8601==2.3.1 -cryptography==42.0.8 -dbus-fast==2.21.3 +cryptography==43.0.0 +dbus-fast==2.22.1 fnv-hash-fast==0.5.0 ha-av==10.1.1 ha-ffmpeg==3.2.0 -habluetooth==3.1.1 +habluetooth==3.1.3 hass-nabucasa==0.81.1 -hassil==1.7.1 -home-assistant-bluetooth==1.12.1 -home-assistant-frontend==20240610.1 -home-assistant-intents==2024.6.21 +hassil==1.7.4 +home-assistant-bluetooth==1.12.2 +home-assistant-frontend==20240809.0 +home-assistant-intents==2024.8.7 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.9.15 +orjson==3.10.7 packaging>=23.1 paho-mqtt==1.6.1 -Pillow==10.3.0 +Pillow==10.4.0 pip>=21.3.1 psutil-home-assistant==0.0.1 -PyJWT==2.8.0 +PyJWT==2.9.0 +pymicro-vad==1.0.1 PyNaCl==1.5.0 -pyOpenSSL==24.1.0 +pyOpenSSL==24.2.1 pyserial==3.5 python-slugify==8.0.4 PyTurboJPEG==1.7.1 pyudev==0.24.1 -PyYAML==6.0.1 +PyYAML==6.0.2 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.9.0 +ulid-transform==0.13.1 urllib3>=1.26.5,<2 +voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 -voluptuous==0.13.1 -webrtc-noise-gain==1.2.3 +voluptuous==0.15.2 yarl==1.9.4 zeroconf==0.132.2 @@ -79,11 +79,6 @@ grpcio==1.59.0 grpcio-status==1.59.0 grpcio-reflection==1.59.0 -# libcst >=0.4.0 requires a newer Rust than we currently have available, -# thus our wheels builds fail. This pins it to the last working version, -# which at this point satisfies our needs. -libcst==0.3.23 - # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -98,11 +93,6 @@ enum34==1000000000.0.0 typing==1000000000.0.0 uuid==1000000000.0.0 -# regex causes segfault with version 2021.8.27 -# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error -# This is fixed in 2021.8.28 -regex==2021.8.28 - # httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these @@ -135,6 +125,9 @@ backoff>=2.0 # v2 has breaking changes (#99218). pydantic==1.10.17 +# Required for Python 3.12.4 compatibility (#119223). +mashumaro>=3.13.1 + # Breaks asyncio # https://github.com/pubnub/python/issues/130 pubnub!=6.4.0 @@ -149,7 +142,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.1 +protobuf==4.25.4 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder @@ -163,9 +156,6 @@ websockets>=11.0.1 # pysnmplib is no longer maintained and does not work with newer # python pysnmplib==1000000000.0.0 -# pysnmp is no longer maintained and does not work with newer -# python -pysnmp==1000000000.0.0 # The get-mac package has been replaced with getmac. Installing get-mac alongside getmac # breaks getmac due to them both sharing the same python package name inside 'getmac'. @@ -183,8 +173,8 @@ dacite>=1.7.0 # Musle wheels for pandas 2.2.0 cannot be build for any architecture. pandas==2.1.4 -# chacha20poly1305-reuseable==0.12.0 is incompatible with cryptography==42.0.x -chacha20poly1305-reuseable>=0.12.1 +# chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x +chacha20poly1305-reuseable>=0.13.0 # pycountry<23.12.11 imports setuptools at run time # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 @@ -203,4 +193,4 @@ tuf>=4.0.0 pyserial-asyncio==1000000000.0.0 # https://github.com/jd/tenacity/issues/471 -tenacity<8.4.0 +tenacity!=8.4.0 diff --git a/homeassistant/requirements.py b/homeassistant/requirements.py index c0e92610b6e..4de5fed5a73 100644 --- a/homeassistant/requirements.py +++ b/homeassistant/requirements.py @@ -4,16 +4,16 @@ from __future__ import annotations import asyncio from collections.abc import Iterable +import contextlib import logging import os -from typing import Any, cast +from typing import Any from packaging.requirements import Requirement from .core import HomeAssistant, callback from .exceptions import HomeAssistantError from .helpers import singleton -from .helpers.typing import UNDEFINED, UndefinedType from .loader import Integration, IntegrationNotFound, async_get_integration from .util import package as pkg_util @@ -119,11 +119,6 @@ def _install_requirements_if_missing( return installed, failures -def _set_result_unless_done(future: asyncio.Future[None]) -> None: - if not future.done(): - future.set_result(None) - - class RequirementsManager: """Manage requirements.""" @@ -132,7 +127,7 @@ class RequirementsManager: self.hass = hass self.pip_lock = asyncio.Lock() self.integrations_with_reqs: dict[ - str, Integration | asyncio.Future[None] | None | UndefinedType + str, Integration | asyncio.Future[Integration] ] = {} self.install_failure_history: set[str] = set() self.is_installed_cache: set[str] = set() @@ -151,37 +146,32 @@ class RequirementsManager: else: done.add(domain) - if self.hass.config.skip_pip: - return await async_get_integration(self.hass, domain) - cache = self.integrations_with_reqs - int_or_fut = cache.get(domain, UNDEFINED) - - if isinstance(int_or_fut, asyncio.Future): - await int_or_fut - - # When we have waited and it's UNDEFINED, it doesn't exist - # We don't cache that it doesn't exist, or else people can't fix it - # and then restart, because their config will never be valid. - if (int_or_fut := cache.get(domain, UNDEFINED)) is UNDEFINED: - raise IntegrationNotFound(domain) - - if int_or_fut is not UNDEFINED: - return cast(Integration, int_or_fut) + if int_or_fut := cache.get(domain): + if isinstance(int_or_fut, Integration): + return int_or_fut + return await int_or_fut future = cache[domain] = self.hass.loop.create_future() - try: integration = await async_get_integration(self.hass, domain) - await self._async_process_integration(integration, done) - except Exception: + if not self.hass.config.skip_pip: + await self._async_process_integration(integration, done) + except BaseException as ex: + # We do not cache failures as we want to retry, or + # else people can't fix it and then restart, because + # their config will never be valid. del cache[domain] + future.set_exception(ex) + with contextlib.suppress(BaseException): + # Clear the flag as its normal that nothing + # will wait for this future to be resolved + # if there are no concurrent requirements fetches. + await future raise - finally: - _set_result_unless_done(future) cache[domain] = integration - _set_result_unless_done(future) + future.set_result(integration) return integration async def _async_process_integration( diff --git a/homeassistant/scripts/benchmark/__init__.py b/homeassistant/scripts/benchmark/__init__.py index 34bc536502f..b769d385a4f 100644 --- a/homeassistant/scripts/benchmark/__init__.py +++ b/homeassistant/scripts/benchmark/__init__.py @@ -4,10 +4,8 @@ from __future__ import annotations import argparse import asyncio -import collections from collections.abc import Callable from contextlib import suppress -import json import logging from timeit import default_timer as timer @@ -18,7 +16,7 @@ from homeassistant.helpers.event import ( async_track_state_change, async_track_state_change_event, ) -from homeassistant.helpers.json import JSON_DUMP, JSONEncoder +from homeassistant.helpers.json import JSON_DUMP # mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs # mypy: no-warn-return-any @@ -310,48 +308,3 @@ async def json_serialize_states(hass): start = timer() JSON_DUMP(states) return timer() - start - - -def _create_state_changed_event_from_old_new( - entity_id, event_time_fired, old_state, new_state -): - """Create a state changed event from a old and new state.""" - attributes = {} - if new_state is not None: - attributes = new_state.get("attributes") - attributes_json = json.dumps(attributes, cls=JSONEncoder) - if attributes_json == "null": - attributes_json = "{}" - row = collections.namedtuple( - "Row", - [ - "event_type" - "event_data" - "time_fired" - "context_id" - "context_user_id" - "state" - "entity_id" - "domain" - "attributes" - "state_id", - "old_state_id", - ], - ) - - row.event_type = EVENT_STATE_CHANGED - row.event_data = "{}" - row.attributes = attributes_json - row.time_fired = event_time_fired - row.state = new_state and new_state.get("state") - row.entity_id = entity_id - row.domain = entity_id and core.split_entity_id(entity_id)[0] - row.context_id = None - row.context_user_id = None - row.old_state_id = old_state and 1 - row.state_id = new_state and 1 - - # pylint: disable-next=import-outside-toplevel - from homeassistant.components import logbook - - return logbook.LazyEventPartialState(row, {}) diff --git a/homeassistant/scripts/macos/__init__.py b/homeassistant/scripts/macos/__init__.py index f629492ec39..0bf88da81dc 100644 --- a/homeassistant/scripts/macos/__init__.py +++ b/homeassistant/scripts/macos/__init__.py @@ -44,7 +44,7 @@ def uninstall_osx(): print("Home Assistant has been uninstalled.") -def run(args): +def run(args: list[str]) -> int: """Handle OSX commandline script.""" commands = "install", "uninstall", "restart" if not args or args[0] not in commands: @@ -63,3 +63,5 @@ def run(args): time.sleep(0.5) install_osx() return 0 + + raise ValueError(f"Invalid command {args[0]}") diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 9775a3fee45..102c48e1d07 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from collections import defaultdict -from collections.abc import Awaitable, Callable, Mapping +from collections.abc import Awaitable, Callable, Generator, Mapping import contextlib import contextvars from enum import StrEnum @@ -14,8 +14,6 @@ import time from types import ModuleType from typing import Any, Final, TypedDict -from typing_extensions import Generator - from . import config as conf_util, core, loader, requirements from .const import ( BASE_PLATFORMS, # noqa: F401 @@ -31,7 +29,7 @@ from .core import ( callback, ) from .exceptions import DependencyError, HomeAssistantError -from .helpers import singleton, translation +from .helpers import issue_registry as ir, singleton, translation from .helpers.issue_registry import IssueSeverity, async_create_issue from .helpers.typing import ConfigType from .util.async_ import create_eager_task @@ -283,6 +281,20 @@ async def _async_setup_component( integration = await loader.async_get_integration(hass, domain) except loader.IntegrationNotFound: _log_error_setup_error(hass, domain, None, "Integration not found.") + if not hass.config.safe_mode: + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"integration_not_found.{domain}", + is_fixable=True, + issue_domain=HOMEASSISTANT_DOMAIN, + severity=IssueSeverity.ERROR, + translation_key="integration_not_found", + translation_placeholders={ + "domain": domain, + }, + data={"domain": domain}, + ) return False log_error = partial(_log_error_setup_error, hass, domain, integration) diff --git a/homeassistant/util/__init__.py b/homeassistant/util/__init__.py index c9aa2817640..c2d825a1676 100644 --- a/homeassistant/util/__init__.py +++ b/homeassistant/util/__init__.py @@ -129,13 +129,11 @@ class Throttle: async def throttled_value() -> None: """Stand-in function for when real func is being throttled.""" - return None else: def throttled_value() -> None: # type: ignore[misc] """Stand-in function for when real func is being throttled.""" - return None if self.limit_no_throttle is not None: method = Throttle(self.limit_no_throttle)(method) diff --git a/homeassistant/util/async_.py b/homeassistant/util/async_.py index f2dc1291324..dcb788f0685 100644 --- a/homeassistant/util/async_.py +++ b/homeassistant/util/async_.py @@ -2,7 +2,15 @@ from __future__ import annotations -from asyncio import AbstractEventLoop, Future, Semaphore, Task, gather, get_running_loop +from asyncio import ( + AbstractEventLoop, + Future, + Semaphore, + Task, + TimerHandle, + gather, + get_running_loop, +) from collections.abc import Awaitable, Callable, Coroutine import concurrent.futures import logging @@ -124,3 +132,9 @@ def shutdown_run_callback_threadsafe(loop: AbstractEventLoop) -> None: python is going to exit. """ setattr(loop, _SHUTDOWN_RUN_CALLBACK_THREADSAFE, True) + + +def get_scheduled_timer_handles(loop: AbstractEventLoop) -> list[TimerHandle]: + """Return a list of scheduled TimerHandles.""" + handles: list[TimerHandle] = loop._scheduled # type: ignore[attr-defined] # noqa: SLF001 + return handles diff --git a/homeassistant/util/color.py b/homeassistant/util/color.py index ab5c4037f9b..0745bc96dfb 100644 --- a/homeassistant/util/color.py +++ b/homeassistant/util/color.py @@ -244,7 +244,7 @@ def color_RGB_to_xy_brightness( y = Y / (X + Y + Z) # Brightness - Y = 1 if Y > 1 else Y + Y = min(Y, 1) brightness = round(Y * 255) # Check if the given xy value is within the color-reach of the lamp. diff --git a/homeassistant/util/json.py b/homeassistant/util/json.py index 1479550b615..fa67f6b1dcc 100644 --- a/homeassistant/util/json.py +++ b/homeassistant/util/json.py @@ -2,8 +2,6 @@ from __future__ import annotations -from collections.abc import Callable -import json import logging from os import PathLike from typing import Any @@ -12,8 +10,6 @@ import orjson from homeassistant.exceptions import HomeAssistantError -from .file import WriteError # noqa: F401 - _SENTINEL = object() _LOGGER = logging.getLogger(__name__) @@ -129,63 +125,9 @@ def load_json_object( raise HomeAssistantError(f"Expected JSON to be parsed as a dict got {type(value)}") -def save_json( - filename: str, - data: list | dict, - private: bool = False, - *, - encoder: type[json.JSONEncoder] | None = None, - atomic_writes: bool = False, -) -> None: - """Save JSON data to a file.""" - # pylint: disable-next=import-outside-toplevel - from homeassistant.helpers.frame import report - - report( - ( - "uses save_json from homeassistant.util.json module." - " This is deprecated and will stop working in Home Assistant 2022.4, it" - " should be updated to use homeassistant.helpers.json module instead" - ), - error_if_core=False, - ) - - # pylint: disable-next=import-outside-toplevel - import homeassistant.helpers.json as json_helper - - json_helper.save_json( - filename, data, private, encoder=encoder, atomic_writes=atomic_writes - ) - - def format_unserializable_data(data: dict[str, Any]) -> str: """Format output of find_paths in a friendly way. Format is comma separated: =() """ return ", ".join(f"{path}={value}({type(value)}" for path, value in data.items()) - - -def find_paths_unserializable_data( - bad_data: Any, *, dump: Callable[[Any], str] = json.dumps -) -> dict[str, Any]: - """Find the paths to unserializable data. - - This method is slow! Only use for error handling. - """ - # pylint: disable-next=import-outside-toplevel - from homeassistant.helpers.frame import report - - report( - ( - "uses find_paths_unserializable_data from homeassistant.util.json module." - " This is deprecated and will stop working in Home Assistant 2022.4, it" - " should be updated to use homeassistant.helpers.json module instead" - ), - error_if_core=False, - ) - - # pylint: disable-next=import-outside-toplevel - import homeassistant.helpers.json as json_helper - - return json_helper.find_paths_unserializable_data(bad_data, dump=dump) diff --git a/homeassistant/util/language.py b/homeassistant/util/language.py index 8644f8014b6..8a82de9065f 100644 --- a/homeassistant/util/language.py +++ b/homeassistant/util/language.py @@ -137,9 +137,6 @@ class Dialect: region_idx = pref_regions.index(self.region) elif dialect.region is not None: region_idx = pref_regions.index(dialect.region) - else: - # Can't happen, but mypy is not smart enough - raise ValueError # More preferred regions are at the front. # Add 1 to boost above a weak match where no regions are set. diff --git a/homeassistant/util/location.py b/homeassistant/util/location.py index 24c49c5427c..c00cf88699e 100644 --- a/homeassistant/util/location.py +++ b/homeassistant/util/location.py @@ -163,7 +163,8 @@ async def _get_whoami(session: aiohttp.ClientSession) -> dict[str, Any] | None: """Query whoami.home-assistant.io for location data.""" try: resp = await session.get( - WHOAMI_URL_DEV if HA_VERSION.endswith("0.dev0") else WHOAMI_URL, timeout=30 + WHOAMI_URL_DEV if HA_VERSION.endswith("0.dev0") else WHOAMI_URL, + timeout=aiohttp.ClientTimeout(total=30), ) except (aiohttp.ClientError, TimeoutError): return None diff --git a/homeassistant/util/loop.py b/homeassistant/util/loop.py index 8a469569601..d7593013046 100644 --- a/homeassistant/util/loop.py +++ b/homeassistant/util/loop.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable import functools +from functools import cache import linecache import logging import threading @@ -26,6 +27,11 @@ def _get_line_from_cache(filename: str, lineno: int) -> str: return (linecache.getline(filename, lineno) or "?").strip() +# Set of previously reported blocking calls +# (integration, filename, lineno) +_PREVIOUSLY_REPORTED: set[tuple[str | None, str, int | Any]] = set() + + def raise_for_blocking_call( func: Callable[..., Any], check_allowed: Callable[[dict[str, Any]], bool] | None = None, @@ -42,28 +48,48 @@ def raise_for_blocking_call( offender_filename = offender_frame.f_code.co_filename offender_lineno = offender_frame.f_lineno offender_line = _get_line_from_cache(offender_filename, offender_lineno) + report_key: tuple[str | None, str, int | Any] try: integration_frame = get_integration_frame() except MissingIntegrationFrame: # Did not source from integration? Hard error. + report_key = (None, offender_filename, offender_lineno) + was_reported = report_key in _PREVIOUSLY_REPORTED + _PREVIOUSLY_REPORTED.add(report_key) if not strict_core: - _LOGGER.warning( - "Detected blocking call to %s with args %s in %s, " - "line %s: %s inside the event loop; " - "This is causing stability issues. " - "Please create a bug report at " - "https://github.com/home-assistant/core/issues?q=is%%3Aopen+is%%3Aissue\n" - "%s\n" - "Traceback (most recent call last):\n%s", - func.__name__, - mapped_args.get("args"), - offender_filename, - offender_lineno, - offender_line, - _dev_help_message(func.__name__), - "".join(traceback.format_stack(f=offender_frame)), - ) + if was_reported: + _LOGGER.debug( + "Detected blocking call to %s with args %s in %s, " + "line %s: %s inside the event loop; " + "This is causing stability issues. " + "Please create a bug report at " + "https://github.com/home-assistant/core/issues?q=is%%3Aopen+is%%3Aissue\n" + "%s\n", + func.__name__, + mapped_args.get("args"), + offender_filename, + offender_lineno, + offender_line, + _dev_help_message(func.__name__), + ) + else: + _LOGGER.warning( + "Detected blocking call to %s with args %s in %s, " + "line %s: %s inside the event loop; " + "This is causing stability issues. " + "Please create a bug report at " + "https://github.com/home-assistant/core/issues?q=is%%3Aopen+is%%3Aissue\n" + "%s\n" + "Traceback (most recent call last):\n%s", + func.__name__, + mapped_args.get("args"), + offender_filename, + offender_lineno, + offender_line, + _dev_help_message(func.__name__), + "".join(traceback.format_stack(f=offender_frame)), + ) return if found_frame is None: @@ -77,39 +103,63 @@ def raise_for_blocking_call( f"{_dev_help_message(func.__name__)}" ) + report_key = (integration_frame.integration, offender_filename, offender_lineno) + was_reported = report_key in _PREVIOUSLY_REPORTED + _PREVIOUSLY_REPORTED.add(report_key) + report_issue = async_suggest_report_issue( async_get_hass_or_none(), integration_domain=integration_frame.integration, module=integration_frame.module, ) - _LOGGER.warning( - "Detected blocking call to %s with args %s " - "inside the event loop by %sintegration '%s' " - "at %s, line %s: %s (offender: %s, line %s: %s), please %s\n" - "%s\n" - "Traceback (most recent call last):\n%s", - func.__name__, - mapped_args.get("args"), - "custom " if integration_frame.custom_integration else "", - integration_frame.integration, - integration_frame.relative_filename, - integration_frame.line_number, - integration_frame.line, - offender_filename, - offender_lineno, - offender_line, - report_issue, - _dev_help_message(func.__name__), - "".join(traceback.format_stack(f=integration_frame.frame)), - ) + if was_reported: + _LOGGER.debug( + "Detected blocking call to %s with args %s " + "inside the event loop by %sintegration '%s' " + "at %s, line %s: %s (offender: %s, line %s: %s), please %s\n" + "%s\n", + func.__name__, + mapped_args.get("args"), + "custom " if integration_frame.custom_integration else "", + integration_frame.integration, + integration_frame.relative_filename, + integration_frame.line_number, + integration_frame.line, + offender_filename, + offender_lineno, + offender_line, + report_issue, + _dev_help_message(func.__name__), + ) + else: + _LOGGER.warning( + "Detected blocking call to %s with args %s " + "inside the event loop by %sintegration '%s' " + "at %s, line %s: %s (offender: %s, line %s: %s), please %s\n" + "%s\n" + "Traceback (most recent call last):\n%s", + func.__name__, + mapped_args.get("args"), + "custom " if integration_frame.custom_integration else "", + integration_frame.integration, + integration_frame.relative_filename, + integration_frame.line_number, + integration_frame.line, + offender_filename, + offender_lineno, + offender_line, + report_issue, + _dev_help_message(func.__name__), + "".join(traceback.format_stack(f=integration_frame.frame)), + ) if strict: raise RuntimeError( - "Caught blocking call to {func.__name__} with args " - f"{mapped_args.get('args')} inside the event loop by" + f"Caught blocking call to {func.__name__} with args " + f"{mapped_args.get('args')} inside the event loop by " f"{'custom ' if integration_frame.custom_integration else ''}" - "integration '{integration_frame.integration}' at " + f"integration '{integration_frame.integration}' at " f"{integration_frame.relative_filename}, line {integration_frame.line_number}:" f" {integration_frame.line}. (offender: {offender_filename}, line " f"{offender_lineno}: {offender_line}), please {report_issue}\n" @@ -117,6 +167,7 @@ def raise_for_blocking_call( ) +@cache def _dev_help_message(what: str) -> str: """Generate help message to guide developers.""" return ( diff --git a/homeassistant/util/timeout.py b/homeassistant/util/timeout.py index 72cabffeed6..821f502694b 100644 --- a/homeassistant/util/timeout.py +++ b/homeassistant/util/timeout.py @@ -61,18 +61,16 @@ class _GlobalFreezeContext: def _enter(self) -> None: """Run freeze.""" - if not self._manager.freezes_done: - return + if self._manager.freezes_done: + # Global reset + for task in self._manager.global_tasks: + task.pause() - # Global reset - for task in self._manager.global_tasks: - task.pause() - - # Zones reset - for zone in self._manager.zones.values(): - if not zone.freezes_done: - continue - zone.pause() + # Zones reset + for zone in self._manager.zones.values(): + if not zone.freezes_done: + continue + zone.pause() self._manager.global_freezes.append(self) diff --git a/homeassistant/util/ulid.py b/homeassistant/util/ulid.py index 65f1b8226c0..f4895f9d963 100644 --- a/homeassistant/util/ulid.py +++ b/homeassistant/util/ulid.py @@ -4,10 +4,12 @@ from __future__ import annotations from ulid_transform import ( bytes_to_ulid, + bytes_to_ulid_or_none, ulid_at_time, ulid_hex, ulid_now, ulid_to_bytes, + ulid_to_bytes_or_none, ) __all__ = [ @@ -17,6 +19,8 @@ __all__ = [ "ulid_to_bytes", "bytes_to_ulid", "ulid_now", + "ulid_to_bytes_or_none", + "bytes_to_ulid_or_none", ] diff --git a/homeassistant/util/yaml/objects.py b/homeassistant/util/yaml/objects.py index d35ba11d25e..7e4019331c6 100644 --- a/homeassistant/util/yaml/objects.py +++ b/homeassistant/util/yaml/objects.py @@ -29,7 +29,7 @@ class NodeStrClass(str): def __voluptuous_compile__(self, schema: vol.Schema) -> Any: """Needed because vol.Schema.compile does not handle str subclasses.""" - return _compile_scalar(self) + return _compile_scalar(self) # type: ignore[no-untyped-call] class NodeDictClass(dict): diff --git a/mypy.ini b/mypy.ini index 740eb4f2b5b..f0a941f20eb 100644 --- a/mypy.ini +++ b/mypy.ini @@ -85,6 +85,9 @@ disallow_any_generics = true [mypy-homeassistant.helpers.reload] disallow_any_generics = true +[mypy-homeassistant.helpers.script] +disallow_any_generics = true + [mypy-homeassistant.helpers.script_variables] disallow_any_generics = true @@ -702,26 +705,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.asterisk_cdr.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - -[mypy-homeassistant.components.asterisk_mbox.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.asuswrt.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -732,6 +715,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.autarco.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.auth.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -942,6 +935,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.bryant_evolution.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.bthome.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1413,6 +1416,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.elevenlabs.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.elgato.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1713,6 +1726,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.fyta.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.generic_hygrostat.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2293,6 +2316,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.iotty.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.ipp.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2533,6 +2566,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.linkplay.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.litejet.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2633,6 +2676,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.madvr.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.mailbox.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3463,6 +3516,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.roborock.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.roku.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3573,6 +3636,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.script.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.search.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index 6dd19d96d01..2c58e7aae15 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -79,7 +79,7 @@ _INNER_MATCH_POSSIBILITIES = [i + 1 for i in range(5)] _TYPE_HINT_MATCHERS.update( { f"x_of_y_{i}": re.compile( - rf"^(\w+)\[{_INNER_MATCH}" + f", {_INNER_MATCH}" * (i - 1) + r"\]$" + rf"^([\w\.]+)\[{_INNER_MATCH}" + f", {_INNER_MATCH}" * (i - 1) + r"\]$" ) for i in _INNER_MATCH_POSSIBILITIES } @@ -100,8 +100,9 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "aiohttp_client": "ClientSessionGenerator", "aiohttp_server": "Callable[[], TestServer]", "area_registry": "AreaRegistry", - "async_setup_recorder_instance": "RecorderInstanceGenerator", + "async_test_recorder": "RecorderInstanceGenerator", "caplog": "pytest.LogCaptureFixture", + "capsys": "pytest.CaptureFixture[str]", "current_request_with_host": "None", "device_registry": "DeviceRegistry", "enable_bluetooth": "None", @@ -125,7 +126,6 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "hass_owner_user": "MockUser", "hass_read_only_access_token": "str", "hass_read_only_user": "MockUser", - "hass_recorder": "Callable[..., HomeAssistant]", "hass_storage": "dict[str, Any]", "hass_supervisor_access_token": "str", "hass_supervisor_user": "MockUser", @@ -146,6 +146,7 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "mock_tts_get_cache_files": "MagicMock", "mock_tts_init_cache_dir": "MagicMock", "mock_zeroconf": "MagicMock", + "monkeypatch": "pytest.MonkeyPatch", "mqtt_client_mock": "MqttMockPahoClient", "mqtt_mock": "MqttMockHAClient", "mqtt_mock_entry": "MqttMockHAClientGenerator", diff --git a/pylint/plugins/hass_imports.py b/pylint/plugins/hass_imports.py index b4d30be483d..57b71560b53 100644 --- a/pylint/plugins/hass_imports.py +++ b/pylint/plugins/hass_imports.py @@ -360,6 +360,12 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^RESULT_TYPE_(\w*)$"), ), ], + "homeassistant.helpers.config_validation": [ + ObsoleteImportMatch( + reason="should be imported from homeassistant/components/", + constant=re.compile(r"^PLATFORM_SCHEMA(_BASE)?$"), + ), + ], "homeassistant.helpers.device_registry": [ ObsoleteImportMatch( reason="replaced by DeviceEntryDisabler enum", @@ -386,12 +392,6 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^IMPERIAL_SYSTEM$"), ), ], - "homeassistant.util.json": [ - ObsoleteImportMatch( - reason="moved to homeassistant.helpers.json", - constant=re.compile(r"^save_json|find_paths_unserializable_data$"), - ), - ], } diff --git a/pyproject.toml b/pyproject.toml index 9f83edd7f3e..5f6324bbac5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.7.0.dev0" +version = "2024.9.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -24,17 +24,16 @@ classifiers = [ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.9.5", + "aiohttp==3.10.3", "aiohttp_cors==0.7.0", - "aiohttp-fast-url-dispatcher==0.3.0", - "aiohttp-fast-zlib==0.1.0", - "aiozoneinfo==0.2.0", + "aiohttp-fast-zlib==0.1.1", + "aiozoneinfo==0.2.1", "astral==2.2", - "async-interrupt==1.1.1", + "async-interrupt==1.1.2", "attrs==23.2.0", "atomicwrites-homeassistant==1.4.1", - "awesomeversion==24.2.0", - "bcrypt==4.1.2", + "awesomeversion==24.6.0", + "bcrypt==4.1.3", "certifi>=2021.5.30", "ciso8601==2.3.1", "fnv-hash-fast==0.5.0", @@ -44,31 +43,32 @@ dependencies = [ # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all "httpx==0.27.0", - "home-assistant-bluetooth==1.12.1", + "home-assistant-bluetooth==1.12.2", "ifaddr==0.2.0", "Jinja2==3.1.4", "lru-dict==1.3.0", - "PyJWT==2.8.0", + "PyJWT==2.9.0", # PyJWT has loose dependency. We want the latest one. - "cryptography==42.0.8", - "Pillow==10.3.0", - "pyOpenSSL==24.1.0", - "orjson==3.9.15", + "cryptography==43.0.0", + "Pillow==10.4.0", + "pyOpenSSL==24.2.1", + "orjson==3.10.7", "packaging>=23.1", "pip>=21.3.1", "psutil-home-assistant==0.0.1", "python-slugify==8.0.4", - "PyYAML==6.0.1", + "PyYAML==6.0.2", "requests==2.32.3", "SQLAlchemy==2.0.31", "typing-extensions>=4.12.2,<5.0", - "ulid-transform==0.9.0", + "ulid-transform==0.13.1", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", - "voluptuous==0.13.1", + "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", + "voluptuous-openapi==0.0.5", "yarl==1.9.4", ] @@ -184,6 +184,7 @@ disable = [ "bidirectional-unicode", # PLE2502 "continue-in-finally", # PLE0116 "duplicate-bases", # PLE0241 + "misplaced-bare-raise", # PLE0704 "format-needs-mapping", # F502 "function-redefined", # F811 # Needed because ruff does not understand type of __all__ generated by a function @@ -311,6 +312,7 @@ disable = [ "no-else-return", # RET505 "broad-except", # BLE001 "protected-access", # SLF001 + "broad-exception-raised", # TRY002 # "no-self-use", # PLR6301 # Optional plugin, not enabled # Handled by mypy @@ -456,16 +458,14 @@ filterwarnings = [ # Ignore custom pytest marks "ignore:Unknown pytest.mark.disable_autouse_fixture:pytest.PytestUnknownMarkWarning:tests.components.met", "ignore:Unknown pytest.mark.dataset:pytest.PytestUnknownMarkWarning:tests.components.screenlogic", - # https://github.com/rokam/sunweg/blob/3.0.1/sunweg/plant.py#L96 - v3.0.1 - 2024-05-29 + # https://github.com/rokam/sunweg/blob/3.0.2/sunweg/plant.py#L96 - v3.0.2 - 2024-07-10 "ignore:The '(kwh_per_kwp|performance_rate)' property is deprecated and will return 0:DeprecationWarning:tests.components.sunweg.test_init", # -- design choice 3rd party # https://github.com/gwww/elkm1/blob/2.2.7/elkm1_lib/util.py#L8-L19 "ignore:ssl.TLSVersion.TLSv1 is deprecated:DeprecationWarning:elkm1_lib.util", - # https://github.com/michaeldavie/env_canada/blob/v0.6.2/env_canada/ec_cache.py - "ignore:Inheritance class CacheClientSession from ClientSession is discouraged:DeprecationWarning:env_canada.ec_cache", # https://github.com/allenporter/ical/pull/215 - # https://github.com/allenporter/ical/blob/8.0.0/ical/util.py#L20-L22 + # https://github.com/allenporter/ical/blob/8.1.1/ical/util.py#L21-L23 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:ical.util", # https://github.com/bachya/regenmaschine/blob/2024.03.0/regenmaschine/client.py#L52 "ignore:ssl.TLSVersion.SSLv3 is deprecated:DeprecationWarning:regenmaschine.client", @@ -477,11 +477,11 @@ filterwarnings = [ "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", # -- tracked upstream / open PRs + # https://github.com/ronf/asyncssh/issues/674 - v2.15.0 + "ignore:ARC4 has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and will be removed from this module in 48.0.0:UserWarning:asyncssh.crypto.cipher", + "ignore:TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from this module in 48.0.0:UserWarning:asyncssh.crypto.cipher", # https://github.com/certbot/certbot/issues/9828 - v2.10.0 "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - # https://github.com/influxdata/influxdb-client-python/issues/603 - v1.42.0 - # https://github.com/influxdata/influxdb-client-python/pull/652 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", # https://github.com/beetbox/mediafile/issues/67 - v0.12.0 "ignore:'imghdr' is deprecated and slated for removal in Python 3.13:DeprecationWarning:mediafile", # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 @@ -499,8 +499,9 @@ filterwarnings = [ "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:devialet.devialet_api", # https://github.com/httplib2/httplib2/pull/226 - >=0.21.0 "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2", - # https://github.com/jaraco/jaraco.abode/commit/9e3e789efc96cddcaa15f920686bbeb79a7469e0 - update jaraco.abode to >=5.1.0 - "ignore:`jaraco.functools.call_aside` is deprecated, use `jaraco.functools.invoke` instead:DeprecationWarning:jaraco.abode.helpers.timeline", + # https://github.com/influxdata/influxdb-client-python/issues/603 >1.45.0 + # https://github.com/influxdata/influxdb-client-python/pull/652 + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", # https://github.com/majuss/lupupy/pull/15 - >0.3.2 "ignore:\"is not\" with 'str' literal. Did you mean \"!=\"?:SyntaxWarning:.*lupupy.devices.alarm", # https://github.com/nextcord/nextcord/pull/1095 - >2.6.1 @@ -521,10 +522,6 @@ filterwarnings = [ "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", # https://github.com/mvantellingen/python-zeep/pull/1364 - >4.2.1 "ignore:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning:zeep.utils", - # https://github.com/timmo001/system-bridge-connector/pull/27 - >=4.1.0 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:systembridgeconnector.version", - # https://github.com/jschlyter/ttls/commit/d64f1251397b8238cf6a35bea64784de25e3386c - >=1.8.1 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:ttls", # -- fixed for Python 3.13 # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 @@ -543,7 +540,7 @@ filterwarnings = [ # https://pypi.org/project/emulated-roku/ - v0.3.0 - 2023-12-19 # https://github.com/martonperei/emulated_roku "ignore:loop argument is deprecated:DeprecationWarning:emulated_roku", - # https://github.com/thecynic/pylutron - v0.2.13 + # https://github.com/thecynic/pylutron - v0.2.15 "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", @@ -558,6 +555,9 @@ filterwarnings = [ # https://pypi.org/project/aprslib/ - v0.7.2 - 2022-07-10 "ignore:invalid escape sequence:SyntaxWarning:.*aprslib.parsing.common", "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aprslib.parsing.common", + # https://pypi.org/project/panasonic-viera/ - v0.4.2 - 2024-04-24 + # https://github.com/florianholzapfel/panasonic-viera/blob/0.4.2/panasonic_viera/__init__.py#L789 + "ignore:invalid escape sequence:SyntaxWarning:.*panasonic_viera", # https://pypi.org/project/pyblackbird/ - v0.6 - 2023-03-15 # https://github.com/koolsb/pyblackbird/pull/9 -> closed "ignore:invalid escape sequence:SyntaxWarning:.*pyblackbird", @@ -582,9 +582,14 @@ filterwarnings = [ "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", - # https://pypi.org/project/velbus-aio/ - v2024.4.1 - 2024-04-07 - # https://github.com/Cereal2nd/velbus-aio/blob/2024.4.1/velbusaio/handler.py#L12 + # https://pypi.org/project/velbus-aio/ - v2024.7.5 - 2024-07-05 + # https://github.com/Cereal2nd/velbus-aio/blob/2024.7.5/velbusaio/handler.py#L22 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", + # - pyOpenSSL v24.2.1 + # https://pypi.org/project/acme/ - v2.11.0 - 2024-06-06 + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + # https://pypi.org/project/josepy/ - v1.14.0 - 2023-11-01 + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", # -- Python 3.13 # HomeAssistant @@ -594,9 +599,6 @@ filterwarnings = [ # https://github.com/nextcord/nextcord/issues/1174 # https://github.com/nextcord/nextcord/blob/v2.6.1/nextcord/player.py#L5 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:nextcord.player", - # https://pypi.org/project/pylutron/ - v0.2.12 - 2024-02-12 - # https://github.com/thecynic/pylutron/issues/89 - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pylutron", # https://pypi.org/project/SpeechRecognition/ - v3.10.4 - 2024-05-05 # https://github.com/Uberi/speech_recognition/blob/3.10.4/speech_recognition/__init__.py#L7 "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", @@ -656,10 +658,6 @@ filterwarnings = [ "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*pyiss", # https://pypi.org/project/PyMetEireann/ - v2021.8.0 - 2021-08-16 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteireann", - # https://pypi.org/project/pyowm/ - v3.3.0 - 2022-02-14 - # https://github.com/csparpa/pyowm/issues/435 - # https://github.com/csparpa/pyowm/blob/3.3.0/pyowm/commons/cityidregistry.py#L7 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pyowm.commons.cityidregistry", # https://pypi.org/project/PyPasser/ - v0.0.5 - 2021-10-21 "ignore:invalid escape sequence:SyntaxWarning:.*pypasser.utils", # https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19 @@ -669,16 +667,37 @@ filterwarnings = [ "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:rx.internal.constants", # https://pypi.org/project/rxv/ - v0.7.0 - 2021-10-10 "ignore:defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead:DeprecationWarning:rxv.ssdp", - # https://pypi.org/project/webrtcvad/ - v2.0.10 - 2017-01-08 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:webrtcvad", +] + +[tool.coverage.run] +source = ["homeassistant"] + +[tool.coverage.report] +exclude_lines = [ + # Have to re-enable the standard pragma + "pragma: no cover", + # Don't complain about missing debug-only code: + "def __repr__", + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", + # TYPE_CHECKING and @overload blocks are never executed during pytest run + "if TYPE_CHECKING:", + "@overload", ] [tool.ruff] -required-version = ">=0.4.8" +required-version = ">=0.5.3" [tool.ruff.lint] select = [ "A001", # Variable {name} is shadowing a Python builtin + "ASYNC210", # Async functions should not call blocking HTTP methods + "ASYNC220", # Async functions should not create subprocesses with blocking methods + "ASYNC221", # Async functions should not run processes with blocking methods + "ASYNC222", # Async functions should not wait on processes with blocking methods + "ASYNC230", # Async functions should not open files with blocking methods like open + "ASYNC251", # Async functions should not call time.sleep "B002", # Python does not support the unary prefix increment "B005", # Using .strip() with multi-character strings is misleading "B007", # Loop control variable {name} not used within loop body @@ -700,6 +719,7 @@ select = [ "E", # pycodestyle "F", # pyflakes/autoflake "FLY", # flynt + "FURB", # refurb "G", # flake8-logging-format "I", # isort "INP", # flake8-no-pep420 @@ -721,6 +741,7 @@ select = [ "RUF006", # Store a reference to the return value of asyncio.create_task "RUF010", # Use explicit conversion flag "RUF013", # PEP 484 prohibits implicit Optional + "RUF017", # Avoid quadratic list summation "RUF018", # Avoid assignment expressions in assert statements "RUF019", # Unnecessary key check before dictionary access # "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up @@ -797,19 +818,54 @@ ignore = [ "ISC001", # Disabled because ruff does not understand type of __all__ generated by a function - "PLE0605", - - # temporarily disabled - "PT019", - "PYI024", # Use typing.NamedTuple instead of collections.namedtuple - "RET503", - "RET501", - "TRY002", - "TRY301" + "PLE0605" ] [tool.ruff.lint.flake8-import-conventions.extend-aliases] voluptuous = "vol" +"homeassistant.components.air_quality.PLATFORM_SCHEMA" = "AIR_QUALITY_PLATFORM_SCHEMA" +"homeassistant.components.alarm_control_panel.PLATFORM_SCHEMA" = "ALARM_CONTROL_PANEL_PLATFORM_SCHEMA" +"homeassistant.components.binary_sensor.PLATFORM_SCHEMA" = "BINARY_SENSOR_PLATFORM_SCHEMA" +"homeassistant.components.button.PLATFORM_SCHEMA" = "BUTTON_PLATFORM_SCHEMA" +"homeassistant.components.calendar.PLATFORM_SCHEMA" = "CALENDAR_PLATFORM_SCHEMA" +"homeassistant.components.camera.PLATFORM_SCHEMA" = "CAMERA_PLATFORM_SCHEMA" +"homeassistant.components.climate.PLATFORM_SCHEMA" = "CLIMATE_PLATFORM_SCHEMA" +"homeassistant.components.conversation.PLATFORM_SCHEMA" = "CONVERSATION_PLATFORM_SCHEMA" +"homeassistant.components.cover.PLATFORM_SCHEMA" = "COVER_PLATFORM_SCHEMA" +"homeassistant.components.date.PLATFORM_SCHEMA" = "DATE_PLATFORM_SCHEMA" +"homeassistant.components.datetime.PLATFORM_SCHEMA" = "DATETIME_PLATFORM_SCHEMA" +"homeassistant.components.device_tracker.PLATFORM_SCHEMA" = "DEVICE_TRACKER_PLATFORM_SCHEMA" +"homeassistant.components.event.PLATFORM_SCHEMA" = "EVENT_PLATFORM_SCHEMA" +"homeassistant.components.fan.PLATFORM_SCHEMA" = "FAN_PLATFORM_SCHEMA" +"homeassistant.components.geo_location.PLATFORM_SCHEMA" = "GEO_LOCATION_PLATFORM_SCHEMA" +"homeassistant.components.humidifier.PLATFORM_SCHEMA" = "HUMIDIFIER_PLATFORM_SCHEMA" +"homeassistant.components.image.PLATFORM_SCHEMA" = "IMAGE_PLATFORM_SCHEMA" +"homeassistant.components.image_processing.PLATFORM_SCHEMA" = "IMAGE_PROCESSING_PLATFORM_SCHEMA" +"homeassistant.components.lawn_mower.PLATFORM_SCHEMA" = "LAWN_MOWER_PLATFORM_SCHEMA" +"homeassistant.components.light.PLATFORM_SCHEMA" = "LIGHT_PLATFORM_SCHEMA" +"homeassistant.components.lock.PLATFORM_SCHEMA" = "LOCK_PLATFORM_SCHEMA" +"homeassistant.components.mailbox.PLATFORM_SCHEMA" = "MAILBOX_PLATFORM_SCHEMA" +"homeassistant.components.media_player.PLATFORM_SCHEMA" = "MEDIA_PLAYER_PLATFORM_SCHEMA" +"homeassistant.components.notify.PLATFORM_SCHEMA" = "NOTIFY_PLATFORM_SCHEMA" +"homeassistant.components.number.PLATFORM_SCHEMA" = "NUMBER_PLATFORM_SCHEMA" +"homeassistant.components.remote.PLATFORM_SCHEMA" = "REMOTE_PLATFORM_SCHEMA" +"homeassistant.components.scene.PLATFORM_SCHEMA" = "SCENE_PLATFORM_SCHEMA" +"homeassistant.components.select.PLATFORM_SCHEMA" = "SELECT_PLATFORM_SCHEMA" +"homeassistant.components.sensor.PLATFORM_SCHEMA" = "SENSOR_PLATFORM_SCHEMA" +"homeassistant.components.siren.PLATFORM_SCHEMA" = "SIREN_PLATFORM_SCHEMA" +"homeassistant.components.stt.PLATFORM_SCHEMA" = "STT_PLATFORM_SCHEMA" +"homeassistant.components.switch.PLATFORM_SCHEMA" = "SWITCH_PLATFORM_SCHEMA" +"homeassistant.components.text.PLATFORM_SCHEMA" = "TEXT_PLATFORM_SCHEMA" +"homeassistant.components.time.PLATFORM_SCHEMA" = "TIME_PLATFORM_SCHEMA" +"homeassistant.components.todo.PLATFORM_SCHEMA" = "TODO_PLATFORM_SCHEMA" +"homeassistant.components.tts.PLATFORM_SCHEMA" = "TTS_PLATFORM_SCHEMA" +"homeassistant.components.vacuum.PLATFORM_SCHEMA" = "VACUUM_PLATFORM_SCHEMA" +"homeassistant.components.valve.PLATFORM_SCHEMA" = "VALVE_PLATFORM_SCHEMA" +"homeassistant.components.update.PLATFORM_SCHEMA" = "UPDATE_PLATFORM_SCHEMA" +"homeassistant.components.wake_word.PLATFORM_SCHEMA" = "WAKE_WORD_PLATFORM_SCHEMA" +"homeassistant.components.water_heater.PLATFORM_SCHEMA" = "WATER_HEATER_PLATFORM_SCHEMA" +"homeassistant.components.weather.PLATFORM_SCHEMA" = "WEATHER_PLATFORM_SCHEMA" +"homeassistant.core.DOMAIN" = "HOMEASSISTANT_DOMAIN" "homeassistant.helpers.area_registry" = "ar" "homeassistant.helpers.category_registry" = "cr" "homeassistant.helpers.config_validation" = "cv" diff --git a/requirements.txt b/requirements.txt index 4c5e349d8b6..556f9013cee 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,41 +4,41 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.9.5 +aiohttp==3.10.3 aiohttp_cors==0.7.0 -aiohttp-fast-url-dispatcher==0.3.0 -aiohttp-fast-zlib==0.1.0 -aiozoneinfo==0.2.0 +aiohttp-fast-zlib==0.1.1 +aiozoneinfo==0.2.1 astral==2.2 -async-interrupt==1.1.1 +async-interrupt==1.1.2 attrs==23.2.0 atomicwrites-homeassistant==1.4.1 -awesomeversion==24.2.0 -bcrypt==4.1.2 +awesomeversion==24.6.0 +bcrypt==4.1.3 certifi>=2021.5.30 ciso8601==2.3.1 fnv-hash-fast==0.5.0 hass-nabucasa==0.81.1 httpx==0.27.0 -home-assistant-bluetooth==1.12.1 +home-assistant-bluetooth==1.12.2 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 -PyJWT==2.8.0 -cryptography==42.0.8 -Pillow==10.3.0 -pyOpenSSL==24.1.0 -orjson==3.9.15 +PyJWT==2.9.0 +cryptography==43.0.0 +Pillow==10.4.0 +pyOpenSSL==24.2.1 +orjson==3.10.7 packaging>=23.1 pip>=21.3.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 -PyYAML==6.0.1 +PyYAML==6.0.2 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.9.0 +ulid-transform==0.13.1 urllib3>=1.26.5,<2 -voluptuous==0.13.1 +voluptuous==0.15.2 voluptuous-serialize==2.6.0 +voluptuous-openapi==0.0.5 yarl==1.9.4 diff --git a/requirements_all.txt b/requirements_all.txt index 0d98c4f773c..ad750c5ec4b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -4,7 +4,7 @@ -r requirements.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.2 +AEMET-OpenData==0.5.4 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 @@ -12,17 +12,14 @@ AIOSomecomfort==0.0.25 # homeassistant.components.adax Adax-local==0.1.5 -# homeassistant.components.blinksticklight -BlinkStick==1.2.0 - # homeassistant.components.doorbird -DoorBirdPy==2.1.0 +DoorBirdPy==3.0.2 # homeassistant.components.homekit HAP-python==4.9.1 # homeassistant.components.tasmota -HATasmota==0.8.0 +HATasmota==0.9.2 # homeassistant.components.mastodon Mastodon.py==1.8.1 @@ -36,10 +33,10 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.3.0 +Pillow==10.4.0 # homeassistant.components.plex -PlexAPI==4.15.13 +PlexAPI==4.15.14 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 @@ -60,10 +57,7 @@ PyFlume==0.6.5 PyFronius==0.7.3 # homeassistant.components.pyload -PyLoadAPI==1.1.0 - -# homeassistant.components.mvglive -PyMVGLive==1.1.4 +PyLoadAPI==1.3.2 # homeassistant.components.met_eireann PyMetEireann==2021.8.0 @@ -90,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.0 +PySwitchbot==0.48.1 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -149,7 +143,7 @@ adb-shell[async]==0.4.4 adext==0.4.3 # homeassistant.components.adguard -adguardhome==0.6.3 +adguardhome==0.7.0 # homeassistant.components.advantage_air advantage-air==0.4.4 @@ -182,10 +176,10 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.5.3 +aioairzone-cloud==0.6.2 # homeassistant.components.airzone -aioairzone==0.7.7 +aioairzone==0.8.2 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -195,16 +189,16 @@ aioambient==2024.01.0 aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.1.7 +aioaquacell==0.2.0 # homeassistant.components.aseko_pool_live -aioaseko==0.1.1 +aioaseko==0.2.0 # homeassistant.components.asuswrt aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.6.1 +aioautomower==2024.8.0 # homeassistant.components.azure_devops aioazuredevops==2.1.1 @@ -213,13 +207,13 @@ aioazuredevops==2.1.1 aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.0 +aiobotocore==2.13.1 # homeassistant.components.comelit aiocomelit==0.9.0 # homeassistant.components.dhcp -aiodhcpwatcher==1.0.0 +aiodhcpwatcher==1.0.2 # homeassistant.components.dhcp aiodiscover==2.1.0 @@ -243,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.6.0 +aioesphomeapi==25.1.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -261,10 +255,10 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.homekit_controller -aiohomekit==3.1.5 +aiohomekit==3.2.2 # homeassistant.components.hue -aiohue==4.7.1 +aiohue==4.7.2 # homeassistant.components.imap aioimaplib==1.1.0 @@ -279,10 +273,10 @@ aiokef==0.2.16 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.4.15 +aiolifx-themes==0.5.0 # homeassistant.components.lifx -aiolifx==1.0.2 +aiolifx==1.0.6 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -294,7 +288,7 @@ aiolookin==1.0.0 aiolyric==1.1.0 # homeassistant.components.mealie -aiomealie==0.4.0 +aiomealie==0.8.1 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -309,7 +303,7 @@ aionanoleaf==0.2.1 aionotion==2024.03.0 # homeassistant.components.nut -aionut==4.3.2 +aionut==4.3.3 # homeassistant.components.oncue aiooncue==0.3.7 @@ -318,7 +312,7 @@ aiooncue==0.3.7 aioopenexchangerates==0.4.0 # homeassistant.components.nmap_tracker -aiooui==0.1.5 +aiooui==0.1.6 # homeassistant.components.pegel_online aiopegelonline==0.0.10 @@ -341,10 +335,10 @@ aiopvpc==4.2.2 aiopyarr==23.4.0 # homeassistant.components.qnap_qsw -aioqsw==0.3.5 +aioqsw==0.4.1 # homeassistant.components.rainforest_raven -aioraven==0.6.0 +aioraven==0.7.0 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 @@ -355,6 +349,9 @@ aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed aioruckus==0.34 +# homeassistant.components.russound_rio +aiorussound==2.3.2 + # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -362,7 +359,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==10.0.1 +aioshelly==11.2.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -374,10 +371,10 @@ aioslimproto==3.0.0 aiosolaredge==0.2.0 # homeassistant.components.steamist -aiosteamist==0.3.2 +aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==3.4.3 +aioswitcher==4.0.2 # homeassistant.components.syncthing aiosyncthing==0.5.1 @@ -386,10 +383,10 @@ aiosyncthing==0.5.1 aiotankerkoenig==0.4.1 # homeassistant.components.tractive -aiotractive==0.5.6 +aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==79 +aiounifi==80 # homeassistant.components.vlc_telnet aiovlc==0.3.2 @@ -404,16 +401,16 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webostv -aiowebostv==0.4.0 +aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.0.1 +aiowithings==3.0.2 # homeassistant.components.yandex_transport -aioymaps==1.2.2 +aioymaps==1.2.5 # homeassistant.components.airgradient -airgradient==0.6.0 +airgradient==0.8.0 # homeassistant.components.airly airly==1.1.0 @@ -428,13 +425,13 @@ airthings-cloud==0.2.0 airtouch4pyapi==1.0.5 # homeassistant.components.airtouch5 -airtouch5py==0.2.8 +airtouch5py==0.2.10 # homeassistant.components.alpha_vantage alpha-vantage==2.3.1 # homeassistant.components.amberelectric -amberelectric==1.1.0 +amberelectric==1.1.1 # homeassistant.components.amcrest amcrest==1.9.8 @@ -449,11 +446,14 @@ androidtvremote2==0.1.1 anel-pwrctrl-homeassistant==0.0.1.dev2 # homeassistant.components.anova -anova-wifi==0.12.0 +anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 +# homeassistant.components.anthropic +anthropic==0.31.2 + # homeassistant.components.weatherkit apple_weatherkit==1.1.2 @@ -464,7 +464,7 @@ apprise==1.8.0 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==1.3.1 +apsystems-ez1==1.3.3 # homeassistant.components.aqualogic aqualogic==2.6 @@ -481,19 +481,16 @@ arris-tg2492lg==2.2.0 # homeassistant.components.ampio asmog==0.0.6 -# homeassistant.components.asterisk_mbox -asterisk_mbox==0.5.0 - # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.38.3 +async-upnp-client==0.40.0 # homeassistant.components.arve -asyncarve==0.0.9 +asyncarve==0.1.1 # homeassistant.components.keyboard_remote asyncinotify==4.0.2 @@ -513,6 +510,9 @@ auroranoaa==0.0.3 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 +# homeassistant.components.autarco +autarco==2.0.0 + # homeassistant.components.avea # avea==1.5.1 @@ -520,16 +520,16 @@ aurorapy==0.2.7 # avion==0.10 # homeassistant.components.axis -axis==61 +axis==62 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 # homeassistant.components.azure_data_explorer -azure-kusto-data[aio]==3.1.0 +azure-kusto-data[aio]==4.5.1 # homeassistant.components.azure_data_explorer -azure-kusto-ingest==3.1.0 +azure-kusto-ingest==4.5.1 # homeassistant.components.azure_service_bus azure-servicebus==7.10.0 @@ -555,11 +555,8 @@ beautifulsoup4==4.12.3 # homeassistant.components.beewi_smartclim # beewi-smartclim==0.0.10 -# homeassistant.components.zha -bellows==0.39.1 - # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.15.3 +bimmer-connected[china]==0.16.1 # homeassistant.components.bizkaibus bizkaibus==0.1.1 @@ -572,7 +569,7 @@ bleak-esphome==1.0.0 bleak-retry-connector==3.5.0 # homeassistant.components.bluetooth -bleak==0.22.1 +bleak==0.22.2 # homeassistant.components.blebox blebox-uniapi==2.4.2 @@ -594,7 +591,7 @@ bluemaestro-ble==0.2.3 # bluepy==1.3.0 # homeassistant.components.bluetooth -bluetooth-adapters==0.19.2 +bluetooth-adapters==0.19.4 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 @@ -603,7 +600,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.0 +bluetooth-data-tools==1.19.4 # homeassistant.components.bond bond-async==0.2.1 @@ -613,16 +610,19 @@ boschshcpy==0.2.91 # homeassistant.components.amazon_polly # homeassistant.components.route53 -boto3==1.34.51 +boto3==1.34.131 + +# homeassistant.components.aws +botocore==1.34.131 # homeassistant.components.bring -bring-api==0.7.1 +bring-api==0.8.1 # homeassistant.components.broadlink broadlink==0.19.0 # homeassistant.components.brother -brother==4.2.0 +brother==4.3.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -660,6 +660,9 @@ clearpasspy==1.0.2 # homeassistant.components.sinch clx-sdk-xms==1.0.0 +# homeassistant.components.coinbase +coinbase-advanced-py==1.2.2 + # homeassistant.components.coinbase coinbase==2.1.0 @@ -670,7 +673,7 @@ colorlog==6.8.2 colorthief==0.2.1 # homeassistant.components.concord232 -concord232==0.15 +concord232==0.15.1 # homeassistant.components.upc_connect connect-box==0.3.1 @@ -697,7 +700,7 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.21.3 +dbus-fast==2.22.1 # homeassistant.components.debugpy debugpy==1.8.1 @@ -709,7 +712,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.0.0 +deebot-client==8.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -734,6 +737,9 @@ devolo-home-control-api==0.18.3 # homeassistant.components.devolo_home_network devolo-plc-api==1.4.1 +# homeassistant.components.chacon_dio +dio-chacon-wifi-api==1.2.0 + # homeassistant.components.directv directv==0.4.0 @@ -741,10 +747,7 @@ directv==0.4.0 discogs-client==2.3.0 # homeassistant.components.steamist -discovery30303==0.2.1 - -# homeassistant.components.dovado -dovado==0.4.1 +discovery30303==0.3.2 # homeassistant.components.dremel_3d_printer dremel3dpy==2.1.1 @@ -753,7 +756,7 @@ dremel3dpy==2.1.1 dropmqttapi==1.0.3 # homeassistant.components.dsmr -dsmr-parser==1.3.1 +dsmr-parser==1.4.2 # homeassistant.components.dwd_weather_warnings dwdwfsapi==1.0.7 @@ -771,7 +774,7 @@ dynalite-panel==0.0.4 eagle100==0.1.1 # homeassistant.components.easyenergy -easyenergy==2.1.1 +easyenergy==2.1.2 # homeassistant.components.ebusd ebusdpy==0.0.17 @@ -782,6 +785,9 @@ ecoaliface==0.4.0 # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 +# homeassistant.components.elevenlabs +elevenlabs==1.6.1 + # homeassistant.components.elgato elgato==5.1.2 @@ -807,7 +813,7 @@ emulated-roku==0.3.0 energyflip-client==0.2.2 # homeassistant.components.energyzero -energyzero==2.1.0 +energyzero==2.1.1 # homeassistant.components.enocean enocean==0.50 @@ -816,7 +822,7 @@ enocean==0.50 enturclient==0.2.4 # homeassistant.components.environment_canada -env-canada==0.6.3 +env-canada==0.7.2 # homeassistant.components.season ephem==4.1.5 @@ -831,7 +837,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.1.8 +eq3btsmart==1.1.9 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -846,7 +852,10 @@ eufylife-ble-client==0.1.8 # evdev==1.6.1 # homeassistant.components.evohome -evohome-async==0.4.19 +evohome-async==0.4.20 + +# homeassistant.components.bryant_evolution +evolutionhttp==0.0.18 # homeassistant.components.faa_delays faadelays==2023.9.1 @@ -912,7 +921,7 @@ freesms==0.2.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.4.1 +fyta_cli==0.6.0 # homeassistant.components.google_translate gTTS==2.2.4 @@ -924,10 +933,7 @@ gardena-bluetooth==1.4.2 gassist-text==0.0.11 # homeassistant.components.google -gcal-sync==6.0.4 - -# homeassistant.components.aladdin_connect -genie-partner-sdk==1.0.2 +gcal-sync==6.1.4 # homeassistant.components.geniushub geniushub-client==0.7.1 @@ -945,7 +951,7 @@ georss-generic-client==0.8 georss-ign-sismologia-client==0.8 # homeassistant.components.qld_bushfire -georss-qld-bushfire-alert-client==0.7 +georss-qld-bushfire-alert-client==0.8 # homeassistant.components.dlna_dmr # homeassistant.components.kef @@ -977,13 +983,13 @@ google-api-python-client==2.71.0 google-cloud-pubsub==2.13.11 # homeassistant.components.google_cloud -google-cloud-texttospeech==2.12.3 +google-cloud-texttospeech==2.16.3 # homeassistant.components.google_generative_ai_conversation google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.5 +google-nest-sdm==4.0.6 # homeassistant.components.google_travel_time googlemaps==2.5.1 @@ -995,10 +1001,10 @@ goslide-api==0.5.1 gotailwind==0.2.3 # homeassistant.components.govee_ble -govee-ble==0.31.2 +govee-ble==0.40.0 # homeassistant.components.govee_light_local -govee-local-api==1.5.0 +govee-local-api==1.5.1 # homeassistant.components.remote_rpi_gpio gpiozero==1.6.2 @@ -1007,7 +1013,7 @@ gpiozero==1.6.2 gps3==0.33.3 # homeassistant.components.gree -greeclimate==1.4.1 +greeclimate==2.1.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 @@ -1016,7 +1022,7 @@ greeneye_monitor==3.0.3 greenwavereality==0.5.1 # homeassistant.components.pure_energie -gridnet==5.0.0 +gridnet==5.0.1 # homeassistant.components.growatt_server growattServer==1.5.0 @@ -1050,7 +1056,7 @@ ha-philipsjs==3.2.2 habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.1.1 +habluetooth==3.1.3 # homeassistant.components.cloud hass-nabucasa==0.81.1 @@ -1059,7 +1065,7 @@ hass-nabucasa==0.81.1 hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==1.7.1 +hassil==1.7.4 # homeassistant.components.jewish_calendar hdate==0.10.9 @@ -1068,10 +1074,10 @@ hdate==0.10.9 heatmiserV3==1.1.18 # homeassistant.components.here_travel_time -here-routing==0.2.0 +here-routing==1.0.1 # homeassistant.components.here_travel_time -here-transit==1.2.0 +here-transit==1.2.1 # homeassistant.components.hikvisioncam hikvision==0.4 @@ -1090,19 +1096,19 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.51 +holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240610.1 +home-assistant-frontend==20240809.0 # homeassistant.components.conversation -home-assistant-intents==2024.6.21 +home-assistant-intents==2024.8.7 # homeassistant.components.home_connect -homeconnect==0.7.2 +homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.1 +homematicip==1.1.2 # homeassistant.components.horizon horimote==0.4.1 @@ -1134,13 +1140,13 @@ ibmiotf==0.3.4 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.0.1 +ical==8.1.1 # homeassistant.components.ping icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.5.3 +idasen-ha==2.6.2 # homeassistant.components.network ifaddr==0.2.0 @@ -1155,7 +1161,7 @@ ihcsdk==2.8.5 imgw_pib==1.0.5 # homeassistant.components.incomfort -incomfort-client==0.6.2 +incomfort-client==0.6.3-1 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -1164,7 +1170,7 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.5.6 +inkbird-ble==0.5.8 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 @@ -1172,6 +1178,9 @@ insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire intellifire4py==2.2.2 +# homeassistant.components.iotty +iottycloud==0.1.3 + # homeassistant.components.iperf3 iperf3==0.1.11 @@ -1181,20 +1190,21 @@ isal==1.6.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 -# homeassistant.components.abode -jaraco.abode==3.3.0 +# homeassistant.components.israel_rail +israel-rail-api==0.1.2 # homeassistant.components.abode -jaraco.functools==3.9.0 +jaraco.abode==5.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 +# homeassistant.components.command_line # homeassistant.components.rest jsonpath==0.82.2 # homeassistant.components.justnimbus -justnimbus==0.7.3 +justnimbus==0.7.4 # homeassistant.components.kaiterra kaiterra-async-client==1.0.0 @@ -1209,10 +1219,10 @@ kegtron-ble==0.4.0 kiwiki-client==0.1.1 # homeassistant.components.knocki -knocki==0.1.5 +knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.1.20.105944 +knx-frontend==2024.8.9.225351 # homeassistant.components.konnected konnected==1.2.0 @@ -1221,13 +1231,13 @@ konnected==1.2.0 krakenex==2.1.0 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.1 +lacrosse-view==1.0.2 # homeassistant.components.eufy lakeside==0.13 # homeassistant.components.laundrify -laundrify-aio==1.1.2 +laundrify-aio==1.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1236,7 +1246,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.1 +led-ble==1.0.2 # homeassistant.components.foscam libpyfoscam==1.2.2 @@ -1271,9 +1281,6 @@ lmcloud==1.1.13 # homeassistant.components.google_maps locationsharinglib==5.0.1 -# homeassistant.components.logi_circle -logi-circle==0.2.3 - # homeassistant.components.london_underground london-tube-status==0.5 @@ -1293,7 +1300,7 @@ lw12==0.9.2 lxml==5.1.0 # homeassistant.components.matrix -matrix-nio==0.24.0 +matrix-nio==0.25.0 # homeassistant.components.maxcube maxcube-api==0.4.3 @@ -1317,13 +1324,13 @@ melnor-bluetooth==0.0.25 messagebird==1.2.0 # homeassistant.components.meteoalarm -meteoalertapi==0.3.0 +meteoalertapi==0.3.1 # homeassistant.components.meteo_france meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.3.0 +mficlient==0.5.0 # homeassistant.components.xiaomi_miio micloud==0.5 @@ -1344,16 +1351,16 @@ minio==7.1.12 moat-ble==0.1.1 # homeassistant.components.moehlenhoff_alpha2 -moehlenhoff-alpha2==1.3.0 +moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.0 +monzopy==1.3.2 # homeassistant.components.mopeka -mopeka-iot-ble==0.7.0 +mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.23 +motionblinds==0.6.24 # homeassistant.components.motionblinds_ble motionblindsble==0.1.0 @@ -1362,7 +1369,7 @@ motionblindsble==0.1.0 motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.4.1.8.5 +mozart-api==3.4.1.8.6 # homeassistant.components.mullvad mullvad-api==1.0.0 @@ -1395,7 +1402,7 @@ netdata==1.1.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.2.0 +nettigo-air-monitor==3.3.0 # homeassistant.components.neurio_energy neurio==0.3.1 @@ -1404,16 +1411,19 @@ neurio==0.3.1 nexia==2.0.8 # homeassistant.components.nextcloud -nextcloudmonitor==1.5.0 +nextcloudmonitor==1.5.1 # homeassistant.components.discord nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.0.0 +nextdns==3.1.0 # homeassistant.components.nibe_heatpump -nibe==2.8.0 +nibe==2.11.0 + +# homeassistant.components.nice_go +nice-go==0.1.6 # homeassistant.components.niko_home_control niko-home-control==0.2.1 @@ -1459,13 +1469,13 @@ oauth2client==4.1.3 objgraph==3.5.0 # homeassistant.components.garages_amsterdam -odp-amsterdam==6.0.1 +odp-amsterdam==6.0.2 # homeassistant.components.oem oemthermostat==1.1.1 # homeassistant.components.ollama -ollama-hass==0.1.7 +ollama==0.3.1 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1473,9 +1483,6 @@ omnilogic==0.4.5 # homeassistant.components.ondilo_ico ondilo==0.5.0 -# homeassistant.components.onkyo -onkyo-eiscp==1.2.7 - # homeassistant.components.onvif onvif-zeep-async==3.1.12 @@ -1486,7 +1493,7 @@ open-garage==0.2.0 open-meteo==0.3.1 # homeassistant.components.openai_conversation -openai==1.3.8 +openai==1.35.7 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1501,7 +1508,7 @@ openhomedevice==2.2.0 opensensemap-api==0.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.4 +openwebifpy==4.2.7 # homeassistant.components.luci openwrt-luci-rpc==1.1.17 @@ -1510,7 +1517,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.4.7 +opower==0.6.0 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1528,7 +1535,7 @@ ourgroceries==1.5.4 ovoenergy==2.0.0 # homeassistant.components.p1_monitor -p1monitor==3.0.0 +p1monitor==3.0.1 # homeassistant.components.mqtt paho-mqtt==1.6.1 @@ -1537,7 +1544,7 @@ paho-mqtt==1.6.1 panacotta==0.2 # homeassistant.components.panasonic_viera -panasonic-viera==0.3.6 +panasonic-viera==0.4.2 # homeassistant.components.dunehd pdunehd==1.3.2 @@ -1566,7 +1573,7 @@ pigpio==1.78 pilight==0.1.1 # homeassistant.components.dominos -pizzapi==0.0.3 +pizzapi==0.0.6 # homeassistant.components.plex plexauth==0.0.6 @@ -1575,7 +1582,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.37.4.1 +plugwise==0.38.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1607,7 +1614,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==5.9.8 +psutil==6.0.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 @@ -1642,11 +1649,14 @@ py-dormakaba-dkey==1.0.5 # homeassistant.components.improv_ble py-improv-ble-client==1.0.3 +# homeassistant.components.madvr +py-madvr2==1.6.29 + # homeassistant.components.melissa py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==1.0.2 +py-nextbusnext==2.0.4 # homeassistant.components.nightscout py-nightscout==1.2.2 @@ -1658,14 +1668,11 @@ py-schluter==0.1.7 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.4.4 +py-synologydsm-api==2.5.2 # homeassistant.components.zabbix py-zabbix==1.1.7 -# homeassistant.components.seventeentrack -py17track==2021.12.2 - # homeassistant.components.atome pyAtome==0.1.1 @@ -1679,7 +1686,7 @@ pyControl4==1.1.0 pyDuotecno==2024.5.1 # homeassistant.components.electrasmart -pyElectra==1.2.3 +pyElectra==1.2.4 # homeassistant.components.emby pyEmby==1.9 @@ -1719,7 +1726,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.0 +pyaprilaire==0.7.4 # homeassistant.components.asuswrt pyasuswrt==0.1.21 @@ -1731,13 +1738,13 @@ pyatag==0.3.5.3 pyatmo==8.0.3 # homeassistant.components.apple_tv -pyatv==0.14.3 +pyatv==0.15.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 # homeassistant.components.balboa -pybalboa==1.0.1 +pybalboa==1.0.2 # homeassistant.components.bbox pybbox==0.0.5-alpha @@ -1745,6 +1752,9 @@ pybbox==0.0.5-alpha # homeassistant.components.blackbird pyblackbird==0.6 +# homeassistant.components.bluesound +pyblu==0.4.0 + # homeassistant.components.neato pybotvac==0.0.25 @@ -1773,7 +1783,7 @@ pycmus==0.1.1 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.1.5 +pycoolmasternet-async==0.2.2 # homeassistant.components.microsoft pycsspeechtts==1.0.8 @@ -1782,7 +1792,7 @@ pycsspeechtts==1.0.8 # pycups==1.9.73 # homeassistant.components.daikin -pydaikin==2.11.1 +pydaikin==2.13.4 # homeassistant.components.danfoss_air pydanfossair==0.1.0 @@ -1803,7 +1813,7 @@ pydiscovergy==3.0.1 pydoods==1.0.2 # homeassistant.components.hydrawise -pydrawise==2024.6.4 +pydrawise==2024.8.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1829,11 +1839,14 @@ pyefergy==22.5.0 # homeassistant.components.energenie_power_sockets pyegps==0.2.5 +# homeassistant.components.onkyo +pyeiscp==0.0.7 + # homeassistant.components.emoncms pyemoncms==0.0.7 # homeassistant.components.enphase_envoy -pyenphase==1.20.3 +pyenphase==1.22.0 # homeassistant.components.envisalink pyenvisalink==4.7 @@ -1860,7 +1873,7 @@ pyfido==2.1.2 pyfireservicerota==0.0.43 # homeassistant.components.flic -pyflic==2.0.3 +pyflic==2.0.4 # homeassistant.components.futurenow pyfnip==0.2 @@ -1872,7 +1885,7 @@ pyforked-daapd==0.1.14 pyfreedompro==1.1.0 # homeassistant.components.fritzbox -pyfritzhome==0.6.11 +pyfritzhome==0.6.12 # homeassistant.components.ifttt pyfttt==0.3 @@ -1899,7 +1912,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==0.0.6 +pyhomeworks==1.1.1 # homeassistant.components.ialarm pyialarm==2.2.0 @@ -1908,7 +1921,7 @@ pyialarm==2.2.0 pyicloud==1.0.0 # homeassistant.components.insteon -pyinsteon==1.6.1 +pyinsteon==1.6.3 # homeassistant.components.intesishome pyintesishome==1.8.0 @@ -1935,7 +1948,7 @@ pyisy==3.1.14 pyitachip2ir==0.0.7 # homeassistant.components.jvc_projector -pyjvcprojector==1.0.11 +pyjvcprojector==1.0.12 # homeassistant.components.kaleidescape pykaleidescape==1.0.1 @@ -1983,10 +1996,10 @@ pylitejet==0.6.2 pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.20.0 +pylutron-caseta==0.21.1 # homeassistant.components.lutron -pylutron==0.2.13 +pylutron==0.2.15 # homeassistant.components.mailgun pymailgunner==1.4 @@ -2003,6 +2016,9 @@ pymelcloud==2.5.9 # homeassistant.components.meteoclimatic pymeteoclimatic==0.1.0 +# homeassistant.components.assist_pipeline +pymicro-vad==1.0.1 + # homeassistant.components.xiaomi_tv pymitv==1.4.3 @@ -2010,7 +2026,7 @@ pymitv==1.4.3 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.8 +pymodbus==3.6.9 # homeassistant.components.monoprice pymonoprice==0.4 @@ -2021,6 +2037,9 @@ pymsteams==0.1.12 # homeassistant.components.mysensors pymysensors==0.24.0 +# homeassistant.components.iron_os +pynecil==0.2.0 + # homeassistant.components.netgear pynetgear==0.10.10 @@ -2037,7 +2056,7 @@ pynuki==1.6.3 pynws[retry]==1.8.2 # homeassistant.components.nx584 -pynx584==0.5 +pynx584==0.8.2 # homeassistant.components.nzbget pynzbgetapi==0.2.0 @@ -2055,7 +2074,7 @@ pyombi==0.1.10 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.0.9 +pyopenweathermap==0.1.1 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -2075,7 +2094,7 @@ pyotgw==2.2.0 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.11 +pyoverkiz==1.13.14 # homeassistant.components.onewire pyownet==0.10.0.post1 @@ -2084,7 +2103,7 @@ pyownet==0.10.0.post1 pypca==0.0.7 # homeassistant.components.lcn -pypck==0.7.17 +pypck==0.7.21 # homeassistant.components.pjlink pypjlink2==1.2.1 @@ -2144,25 +2163,26 @@ pysabnzbd==1.1.1 pysaj==0.0.16 # homeassistant.components.schlage -pyschlage==2024.6.0 +pyschlage==2024.8.0 # homeassistant.components.sensibo pysensibo==1.0.36 # homeassistant.components.serial -# homeassistant.components.zha -pyserial-asyncio-fast==0.11 +pyserial-asyncio-fast==0.13 # homeassistant.components.acer_projector # homeassistant.components.crownstone # homeassistant.components.usb -# homeassistant.components.zha # homeassistant.components.zwave_js pyserial==3.5 # homeassistant.components.sesame pysesame2==1.0.1 +# homeassistant.components.seventeentrack +pyseventeentrack==1.0.0 + # homeassistant.components.sia pysiaalarm==3.1.1 @@ -2188,7 +2208,7 @@ pysmartthings==0.7.8 pysml==0.0.12 # homeassistant.components.snmp -pysnmp-lextudio==6.0.11 +pysnmp==6.2.5 # homeassistant.components.snooz pysnooz==0.8.6 @@ -2209,13 +2229,13 @@ pystiebeleltron==0.0.1.dev2 pysuez==0.2.0 # homeassistant.components.switchbee -pyswitchbee==1.8.0 +pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 # homeassistant.components.tedee -pytedee-async==0.2.17 +pytedee-async==0.2.20 # homeassistant.components.tfiac pytfiac==0.4 @@ -2233,7 +2253,7 @@ python-awair==0.2.4 python-blockchain-api==0.0.2 # homeassistant.components.bsblan -python-bsblan==0.5.18 +python-bsblan==0.6.2 # homeassistant.components.clementine python-clementine-remote==1.0.1 @@ -2251,7 +2271,7 @@ python-etherscan-api==0.0.3 python-family-hub-local==0.0.2 # homeassistant.components.fully_kiosk -python-fullykiosk==0.0.13 +python-fullykiosk==0.0.14 # homeassistant.components.sms # python-gammu==3.2.4 @@ -2263,10 +2283,10 @@ python-gc100==1.0.3a0 python-gitlab==1.6.0 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.6.0 +python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.0.0 +python-homewizard-energy==v6.2.0 # homeassistant.components.hp_ilo python-hpilo==4.4.3 @@ -2281,13 +2301,16 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.6.2.1 +python-kasa[speedups]==0.7.1 + +# homeassistant.components.linkplay +python-linkplay==0.0.6 # homeassistant.components.lirc # python-lirc==1.2.3 # homeassistant.components.matter -python-matter-server==6.1.0 +python-matter-server==6.3.0 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -2302,7 +2325,7 @@ python-mystrom==2.2.0 python-opendata-transport==0.4.0 # homeassistant.components.opensky -python-opensky==1.0.0 +python-opensky==1.0.1 # homeassistant.components.otbr # homeassistant.components.thread @@ -2318,7 +2341,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.3.0 +python-roborock==2.5.0 # homeassistant.components.smarttub python-smarttub==0.0.36 @@ -2330,7 +2353,7 @@ python-songpal==0.16.2 python-tado==0.17.6 # homeassistant.components.technove -python-technove==1.2.2 +python-technove==1.3.1 # homeassistant.components.telegram_bot python-telegram-bot[socks]==21.0.1 @@ -2361,10 +2384,10 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==0.3.10 +pytrafikverket==1.0.0 # homeassistant.components.v2c -pytrydan==0.7.0 +pytrydan==0.8.0 # homeassistant.components.usb pyudev==0.24.1 @@ -2382,7 +2405,7 @@ pyvera==0.3.13 pyversasense==0.0.6 # homeassistant.components.vesync -pyvesync==2.1.10 +pyvesync==2.1.12 # homeassistant.components.vizio pyvizio==0.1.61 @@ -2394,7 +2417,7 @@ pyvlx==0.2.21 pyvolumio==0.1.5 # homeassistant.components.waze_travel_time -pywaze==1.0.1 +pywaze==1.0.2 # homeassistant.components.weatherflow pyweatherflowudp==1.4.5 @@ -2454,19 +2477,19 @@ rapt-ble==0.1.2 raspyrfm-client==1.2.8 # homeassistant.components.refoss -refoss-ha==1.2.1 +refoss-ha==1.2.4 # homeassistant.components.rainmachine regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.3 +renault-api==0.2.5 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.3 +reolink-aio==0.9.7 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2475,7 +2498,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.11 +ring-doorbell[listen]==0.8.12 # homeassistant.components.fleetgo ritassist==0.9.2 @@ -2507,9 +2530,6 @@ rpi-bad-power==0.1.0 # homeassistant.components.rtsp_to_webrtc rtsp-to-webrtc==0.5.1 -# homeassistant.components.russound_rio -russound-rio==1.0.0 - # homeassistant.components.russound_rnet russound==0.1.9 @@ -2545,7 +2565,7 @@ sendgrid==6.8.2 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.12.2 +sense-energy==0.12.4 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2571,6 +2591,9 @@ sharp_aquos_rc==0.3.2 # homeassistant.components.shodan shodan==1.28.0 +# homeassistant.components.simplefin +simplefin4py==0.0.18 + # homeassistant.components.sighthound simplehound==0.3 @@ -2605,7 +2628,7 @@ soco==0.30.4 solaredge-local==0.2.3 # homeassistant.components.solarlog -solarlog_cli==0.1.5 +solarlog_cli==0.1.6 # homeassistant.components.solax solax==3.1.1 @@ -2668,7 +2691,7 @@ stringcase==1.2.0 subarulink==0.7.11 # homeassistant.components.sunweg -sunweg==3.0.1 +sunweg==3.0.2 # homeassistant.components.surepetcare surepy==0.9.0 @@ -2677,19 +2700,19 @@ surepy==0.9.0 swisshydrodata==0.1.0 # homeassistant.components.switchbot_cloud -switchbot-api==2.1.0 +switchbot-api==2.2.1 # homeassistant.components.synology_srm synology-srm==0.2.0 # homeassistant.components.system_bridge -systembridgeconnector==4.0.3 +systembridgeconnector==4.1.5 # homeassistant.components.system_bridge -systembridgemodels==4.0.4 +systembridgemodels==4.2.4 # homeassistant.components.tailscale -tailscale==0.6.0 +tailscale==0.6.1 # homeassistant.components.tank_utility tank-utility==1.5.0 @@ -2715,8 +2738,10 @@ temperusb==1.6.1 # homeassistant.components.tensorflow # tensorflow==2.5.0 +# homeassistant.components.tesla_fleet # homeassistant.components.teslemetry -tesla-fleet-api==0.6.1 +# homeassistant.components.tessie +tesla-fleet-api==0.7.3 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2725,13 +2750,13 @@ tesla-powerwall==0.5.2 tesla-wall-connector==1.0.2 # homeassistant.components.tessie -tessie-api==0.0.9 +tessie-api==0.1.1 # homeassistant.components.tensorflow # tf-models-official==2.5.0 # homeassistant.components.thermobeacon -thermobeacon-ble==0.6.2 +thermobeacon-ble==0.7.0 # homeassistant.components.thermopro thermopro-ble==0.10.0 @@ -2773,10 +2798,10 @@ tplink-omada-client==1.3.12 transmission-rpc==7.0.3 # homeassistant.components.twinkly -ttls==1.5.1 +ttls==1.8.3 # homeassistant.components.thethingsnetwork -ttn_client==1.0.0 +ttn_client==1.1.0 # homeassistant.components.tuya tuya-device-sharing-sdk==0.1.9 @@ -2788,19 +2813,19 @@ twentemilieu==2.0.1 twilio==6.32.0 # homeassistant.components.twitch -twitchAPI==4.0.0 +twitchAPI==4.2.1 # homeassistant.components.ukraine_alarm uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==1.20.0 +uiprotect==6.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 # homeassistant.components.unifiprotect -unifi-discovery==1.1.8 +unifi-discovery==1.2.0 # homeassistant.components.unifi_direct unifi_ap==0.0.1 @@ -2809,10 +2834,10 @@ unifi_ap==0.0.1 unifiled==0.11 # homeassistant.components.zha -universal-silabs-flasher==0.0.20 +universal-silabs-flasher==0.0.22 # homeassistant.components.upb -upb-lib==0.5.6 +upb-lib==0.5.8 # homeassistant.components.upcloud upcloud-api==2.5.1 @@ -2823,19 +2848,19 @@ upcloud-api==2.5.1 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.11.0 +uvcclient==0.12.1 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 # homeassistant.components.vallox -vallox-websocket-api==5.1.1 +vallox-websocket-api==5.3.0 # homeassistant.components.rdw -vehicle==2.2.1 +vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.5.1 +velbus-aio==2024.7.6 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2849,10 +2874,6 @@ voip-utils==0.1.0 # homeassistant.components.volkszaehler volkszaehler==0.4.0 -# homeassistant.components.google_generative_ai_conversation -# homeassistant.components.openai_conversation -voluptuous-openapi==0.0.4 - # homeassistant.components.volvooncall volvooncall==0.10.3 @@ -2873,7 +2894,7 @@ vultr==0.1.2 wakeonlan==2.1.0 # homeassistant.components.wallbox -wallbox==0.6.0 +wallbox==0.7.0 # homeassistant.components.folder_watcher watchdog==2.3.1 @@ -2887,9 +2908,6 @@ weatherflow4py==0.2.21 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 -# homeassistant.components.assist_pipeline -webrtc-noise-gain==1.2.3 - # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2903,10 +2921,10 @@ wiffi==1.1.2 wirelesstagpy==0.8.1 # homeassistant.components.wled -wled==0.18.0 +wled==0.20.2 # homeassistant.components.wolflink -wolf-comm==0.0.8 +wolf-comm==0.0.9 # homeassistant.components.wyoming wyoming==1.5.4 @@ -2915,15 +2933,14 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.0 +xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==2.12.2 +xknx==3.1.0 # homeassistant.components.knx xknxproject==3.7.1 -# homeassistant.components.bluesound # homeassistant.components.fritz # homeassistant.components.rest # homeassistant.components.startca @@ -2939,10 +2956,10 @@ yalesmartalarmclient==0.3.9 # homeassistant.components.august # homeassistant.components.yalexs_ble -yalexs-ble==2.4.2 +yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.0 +yalexs==8.0.2 # homeassistant.components.yeelight yeelight==0.7.14 @@ -2951,16 +2968,16 @@ yeelight==0.7.14 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.4 +yolink-api==0.4.7 # homeassistant.components.youless -youless-api==2.1.0 +youless-api==2.1.2 # homeassistant.components.youtube youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.05.27 +yt-dlp==2024.08.06 # homeassistant.components.zamg zamg==0.3.6 @@ -2975,7 +2992,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.116 +zha==0.0.31 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 @@ -2983,21 +3000,6 @@ zhong-hong-hvac==1.0.12 # homeassistant.components.ziggo_mediabox_xl ziggo-mediabox-xl==1.1.0 -# homeassistant.components.zha -zigpy-deconz==0.23.1 - -# homeassistant.components.zha -zigpy-xbee==0.20.1 - -# homeassistant.components.zha -zigpy-zigate==0.12.0 - -# homeassistant.components.zha -zigpy-znp==0.12.1 - -# homeassistant.components.zha -zigpy==0.64.1 - # homeassistant.components.zoneminder zm-py==0.5.4 diff --git a/requirements_test.txt b/requirements_test.txt index fce669c4929..19a60b6aa28 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -7,17 +7,18 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt -astroid==3.2.2 -coverage==7.5.3 -freezegun==1.5.0 +astroid==3.2.4 +coverage==7.6.0 +freezegun==1.5.1 mock-open==1.4.0 -mypy-dev==1.11.0a8 +mypy-dev==1.12.0a2 pre-commit==3.7.1 pydantic==1.10.17 -pylint==3.2.2 +pylint==3.2.6 pylint-per-file-ignores==1.3.2 -pipdeptree==2.19.0 -pytest-asyncio==0.23.6 +pipdeptree==2.23.1 +pip-licenses==4.5.1 +pytest-asyncio==0.23.8 pytest-aiohttp==1.0.5 pytest-cov==5.0.0 pytest-freezer==0.4.8 @@ -25,15 +26,15 @@ pytest-github-actions-annotate-failures==0.2.0 pytest-socket==0.7.0 pytest-sugar==1.0.0 pytest-timeout==2.3.1 -pytest-unordered==0.6.0 +pytest-unordered==0.6.1 pytest-picked==0.5.0 pytest-xdist==3.6.1 -pytest==8.2.0 +pytest==8.3.1 requests-mock==1.12.1 respx==0.21.1 syrupy==4.6.1 tqdm==4.66.4 -types-aiofiles==23.2.0.20240403 +types-aiofiles==23.2.0.20240623 types-atomicwrites==1.4.5.1 types-croniter==2.0.0.20240423 types-beautifulsoup4==4.12.0.20240511 @@ -41,13 +42,13 @@ types-caldav==1.3.0.20240331 types-chardet==0.1.5 types-decorator==5.1.8.20240310 types-paho-mqtt==1.6.0.20240321 -types-pillow==10.2.0.20240511 +types-pillow==10.2.0.20240520 types-protobuf==4.24.0.20240106 -types-psutil==5.9.5.20240511 +types-psutil==6.0.0.20240621 types-python-dateutil==2.9.0.20240316 types-python-slugify==8.0.2.20240310 types-pytz==2024.1.0.20240417 types-PyYAML==6.0.12.20240311 types-requests==2.31.0.3 types-xmltodict==0.13.0.3 -uv==0.2.13 +uv==0.2.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ad51c91f9b6..808f93b6ad5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -4,7 +4,7 @@ -r requirements_test.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.2 +AEMET-OpenData==0.5.4 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 @@ -13,13 +13,16 @@ AIOSomecomfort==0.0.25 Adax-local==0.1.5 # homeassistant.components.doorbird -DoorBirdPy==2.1.0 +DoorBirdPy==3.0.2 # homeassistant.components.homekit HAP-python==4.9.1 # homeassistant.components.tasmota -HATasmota==0.8.0 +HATasmota==0.9.2 + +# homeassistant.components.mastodon +Mastodon.py==1.8.1 # homeassistant.components.doods # homeassistant.components.generic @@ -30,10 +33,10 @@ HATasmota==0.8.0 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.3.0 +Pillow==10.4.0 # homeassistant.components.plex -PlexAPI==4.15.13 +PlexAPI==4.15.14 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 @@ -51,7 +54,7 @@ PyFlume==0.6.5 PyFronius==0.7.3 # homeassistant.components.pyload -PyLoadAPI==1.1.0 +PyLoadAPI==1.3.2 # homeassistant.components.met_eireann PyMetEireann==2021.8.0 @@ -78,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.0 +PySwitchbot==0.48.1 # homeassistant.components.syncthru PySyncThru==0.7.10 @@ -128,7 +131,7 @@ adb-shell[async]==0.4.4 adext==0.4.3 # homeassistant.components.adguard -adguardhome==0.6.3 +adguardhome==0.7.0 # homeassistant.components.advantage_air advantage-air==0.4.4 @@ -161,10 +164,10 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.5.3 +aioairzone-cloud==0.6.2 # homeassistant.components.airzone -aioairzone==0.7.7 +aioairzone==0.8.2 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -174,16 +177,16 @@ aioambient==2024.01.0 aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.1.7 +aioaquacell==0.2.0 # homeassistant.components.aseko_pool_live -aioaseko==0.1.1 +aioaseko==0.2.0 # homeassistant.components.asuswrt aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.6.1 +aioautomower==2024.8.0 # homeassistant.components.azure_devops aioazuredevops==2.1.1 @@ -192,13 +195,13 @@ aioazuredevops==2.1.1 aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.0 +aiobotocore==2.13.1 # homeassistant.components.comelit aiocomelit==0.9.0 # homeassistant.components.dhcp -aiodhcpwatcher==1.0.0 +aiodhcpwatcher==1.0.2 # homeassistant.components.dhcp aiodiscover==2.1.0 @@ -222,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.6.0 +aioesphomeapi==25.1.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -237,10 +240,10 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.homekit_controller -aiohomekit==3.1.5 +aiohomekit==3.2.2 # homeassistant.components.hue -aiohue==4.7.1 +aiohue==4.7.2 # homeassistant.components.imap aioimaplib==1.1.0 @@ -252,10 +255,10 @@ aiokafka==0.10.0 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.4.15 +aiolifx-themes==0.5.0 # homeassistant.components.lifx -aiolifx==1.0.2 +aiolifx==1.0.6 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -267,7 +270,7 @@ aiolookin==1.0.0 aiolyric==1.1.0 # homeassistant.components.mealie -aiomealie==0.4.0 +aiomealie==0.8.1 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -282,7 +285,7 @@ aionanoleaf==0.2.1 aionotion==2024.03.0 # homeassistant.components.nut -aionut==4.3.2 +aionut==4.3.3 # homeassistant.components.oncue aiooncue==0.3.7 @@ -291,7 +294,7 @@ aiooncue==0.3.7 aioopenexchangerates==0.4.0 # homeassistant.components.nmap_tracker -aiooui==0.1.5 +aiooui==0.1.6 # homeassistant.components.pegel_online aiopegelonline==0.0.10 @@ -314,10 +317,10 @@ aiopvpc==4.2.2 aiopyarr==23.4.0 # homeassistant.components.qnap_qsw -aioqsw==0.3.5 +aioqsw==0.4.1 # homeassistant.components.rainforest_raven -aioraven==0.6.0 +aioraven==0.7.0 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 @@ -328,6 +331,9 @@ aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed aioruckus==0.34 +# homeassistant.components.russound_rio +aiorussound==2.3.2 + # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -335,7 +341,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==10.0.1 +aioshelly==11.2.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -347,10 +353,10 @@ aioslimproto==3.0.0 aiosolaredge==0.2.0 # homeassistant.components.steamist -aiosteamist==0.3.2 +aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==3.4.3 +aioswitcher==4.0.2 # homeassistant.components.syncthing aiosyncthing==0.5.1 @@ -359,10 +365,10 @@ aiosyncthing==0.5.1 aiotankerkoenig==0.4.1 # homeassistant.components.tractive -aiotractive==0.5.6 +aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==79 +aiounifi==80 # homeassistant.components.vlc_telnet aiovlc==0.3.2 @@ -377,16 +383,16 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webostv -aiowebostv==0.4.0 +aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.0.1 +aiowithings==3.0.2 # homeassistant.components.yandex_transport -aioymaps==1.2.2 +aioymaps==1.2.5 # homeassistant.components.airgradient -airgradient==0.6.0 +airgradient==0.8.0 # homeassistant.components.airly airly==1.1.0 @@ -401,10 +407,10 @@ airthings-cloud==0.2.0 airtouch4pyapi==1.0.5 # homeassistant.components.airtouch5 -airtouch5py==0.2.8 +airtouch5py==0.2.10 # homeassistant.components.amberelectric -amberelectric==1.1.0 +amberelectric==1.1.1 # homeassistant.components.androidtv androidtv[async]==0.0.73 @@ -413,11 +419,14 @@ androidtv[async]==0.0.73 androidtvremote2==0.1.1 # homeassistant.components.anova -anova-wifi==0.12.0 +anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 +# homeassistant.components.anthropic +anthropic==0.31.2 + # homeassistant.components.weatherkit apple_weatherkit==1.1.2 @@ -428,7 +437,7 @@ apprise==1.8.0 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==1.3.1 +apsystems-ez1==1.3.3 # homeassistant.components.aranet aranet4==2.3.4 @@ -436,19 +445,16 @@ aranet4==2.3.4 # homeassistant.components.arcam_fmj arcam-fmj==1.5.2 -# homeassistant.components.asterisk_mbox -asterisk_mbox==0.5.0 - # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.38.3 +async-upnp-client==0.40.0 # homeassistant.components.arve -asyncarve==0.0.9 +asyncarve==0.1.1 # homeassistant.components.sleepiq asyncsleepiq==1.5.2 @@ -459,17 +465,20 @@ auroranoaa==0.0.3 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 +# homeassistant.components.autarco +autarco==2.0.0 + # homeassistant.components.axis -axis==61 +axis==62 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 # homeassistant.components.azure_data_explorer -azure-kusto-data[aio]==3.1.0 +azure-kusto-data[aio]==4.5.1 # homeassistant.components.azure_data_explorer -azure-kusto-ingest==3.1.0 +azure-kusto-ingest==4.5.1 # homeassistant.components.holiday babel==2.15.0 @@ -480,11 +489,8 @@ base36==0.1.1 # homeassistant.components.scrape beautifulsoup4==4.12.3 -# homeassistant.components.zha -bellows==0.39.1 - # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.15.3 +bimmer-connected[china]==0.16.1 # homeassistant.components.eq3btsmart # homeassistant.components.esphome @@ -494,7 +500,7 @@ bleak-esphome==1.0.0 bleak-retry-connector==3.5.0 # homeassistant.components.bluetooth -bleak==0.22.1 +bleak==0.22.2 # homeassistant.components.blebox blebox-uniapi==2.4.2 @@ -509,7 +515,7 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.bluetooth -bluetooth-adapters==0.19.2 +bluetooth-adapters==0.19.4 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 @@ -518,7 +524,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.0 +bluetooth-data-tools==1.19.4 # homeassistant.components.bond bond-async==0.2.1 @@ -526,14 +532,17 @@ bond-async==0.2.1 # homeassistant.components.bosch_shc boschshcpy==0.2.91 +# homeassistant.components.aws +botocore==1.34.131 + # homeassistant.components.bring -bring-api==0.7.1 +bring-api==0.8.1 # homeassistant.components.broadlink broadlink==0.19.0 # homeassistant.components.brother -brother==4.2.0 +brother==4.3.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -553,6 +562,9 @@ cached_ipaddress==0.3.0 # homeassistant.components.caldav caldav==1.3.9 +# homeassistant.components.coinbase +coinbase-advanced-py==1.2.2 + # homeassistant.components.coinbase coinbase==2.1.0 @@ -584,13 +596,13 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.21.3 +dbus-fast==2.22.1 # homeassistant.components.debugpy debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==8.0.0 +deebot-client==8.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -615,11 +627,14 @@ devolo-home-control-api==0.18.3 # homeassistant.components.devolo_home_network devolo-plc-api==1.4.1 +# homeassistant.components.chacon_dio +dio-chacon-wifi-api==1.2.0 + # homeassistant.components.directv directv==0.4.0 # homeassistant.components.steamist -discovery30303==0.2.1 +discovery30303==0.3.2 # homeassistant.components.dremel_3d_printer dremel3dpy==2.1.1 @@ -628,7 +643,7 @@ dremel3dpy==2.1.1 dropmqttapi==1.0.3 # homeassistant.components.dsmr -dsmr-parser==1.3.1 +dsmr-parser==1.4.2 # homeassistant.components.dwd_weather_warnings dwdwfsapi==1.0.7 @@ -643,11 +658,14 @@ dynalite-panel==0.0.4 eagle100==0.1.1 # homeassistant.components.easyenergy -easyenergy==2.1.1 +easyenergy==2.1.2 # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 +# homeassistant.components.elevenlabs +elevenlabs==1.6.1 + # homeassistant.components.elgato elgato==5.1.2 @@ -667,13 +685,13 @@ emulated-roku==0.3.0 energyflip-client==0.2.2 # homeassistant.components.energyzero -energyzero==2.1.0 +energyzero==2.1.1 # homeassistant.components.enocean enocean==0.50 # homeassistant.components.environment_canada -env-canada==0.6.3 +env-canada==0.7.2 # homeassistant.components.season ephem==4.1.5 @@ -688,7 +706,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.1.8 +eq3btsmart==1.1.9 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -699,6 +717,12 @@ eternalegypt==0.0.16 # homeassistant.components.eufylife_ble eufylife-ble-client==0.1.8 +# homeassistant.components.evohome +evohome-async==0.4.20 + +# homeassistant.components.bryant_evolution +evolutionhttp==0.0.18 + # homeassistant.components.faa_delays faadelays==2023.9.1 @@ -750,7 +774,7 @@ freebox-api==1.1.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.4.1 +fyta_cli==0.6.0 # homeassistant.components.google_translate gTTS==2.2.4 @@ -762,10 +786,10 @@ gardena-bluetooth==1.4.2 gassist-text==0.0.11 # homeassistant.components.google -gcal-sync==6.0.4 +gcal-sync==6.1.4 -# homeassistant.components.aladdin_connect -genie-partner-sdk==1.0.2 +# homeassistant.components.geniushub +geniushub-client==0.7.1 # homeassistant.components.geocaching geocachingapi==0.2.1 @@ -780,7 +804,7 @@ georss-generic-client==0.8 georss-ign-sismologia-client==0.8 # homeassistant.components.qld_bushfire -georss-qld-bushfire-alert-client==0.7 +georss-qld-bushfire-alert-client==0.8 # homeassistant.components.dlna_dmr # homeassistant.components.kef @@ -812,7 +836,7 @@ google-cloud-pubsub==2.13.11 google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.5 +google-nest-sdm==4.0.6 # homeassistant.components.google_travel_time googlemaps==2.5.1 @@ -821,22 +845,22 @@ googlemaps==2.5.1 gotailwind==0.2.3 # homeassistant.components.govee_ble -govee-ble==0.31.2 +govee-ble==0.40.0 # homeassistant.components.govee_light_local -govee-local-api==1.5.0 +govee-local-api==1.5.1 # homeassistant.components.gpsd gps3==0.33.3 # homeassistant.components.gree -greeclimate==1.4.1 +greeclimate==2.1.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 # homeassistant.components.pure_energie -gridnet==5.0.0 +gridnet==5.0.1 # homeassistant.components.growatt_server growattServer==1.5.0 @@ -867,22 +891,22 @@ ha-philipsjs==3.2.2 habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.1.1 +habluetooth==3.1.3 # homeassistant.components.cloud hass-nabucasa==0.81.1 # homeassistant.components.conversation -hassil==1.7.1 +hassil==1.7.4 # homeassistant.components.jewish_calendar hdate==0.10.9 # homeassistant.components.here_travel_time -here-routing==0.2.0 +here-routing==1.0.1 # homeassistant.components.here_travel_time -here-transit==1.2.0 +here-transit==1.2.1 # homeassistant.components.hko hko==0.3.2 @@ -895,19 +919,19 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.51 +holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240610.1 +home-assistant-frontend==20240809.0 # homeassistant.components.conversation -home-assistant-intents==2024.6.21 +home-assistant-intents==2024.8.7 # homeassistant.components.home_connect -homeconnect==0.7.2 +homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.1 +homematicip==1.1.2 # homeassistant.components.remember_the_milk httplib2==0.20.4 @@ -930,13 +954,13 @@ ibeacon-ble==1.2.0 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.0.1 +ical==8.1.1 # homeassistant.components.ping icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.5.3 +idasen-ha==2.6.2 # homeassistant.components.network ifaddr==0.2.0 @@ -945,7 +969,7 @@ ifaddr==0.2.0 imgw_pib==1.0.5 # homeassistant.components.incomfort -incomfort-client==0.6.2 +incomfort-client==0.6.3-1 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -954,7 +978,7 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.5.6 +inkbird-ble==0.5.8 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 @@ -962,35 +986,39 @@ insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire intellifire4py==2.2.2 +# homeassistant.components.iotty +iottycloud==0.1.3 + # homeassistant.components.isal isal==1.6.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 -# homeassistant.components.abode -jaraco.abode==3.3.0 +# homeassistant.components.israel_rail +israel-rail-api==0.1.2 # homeassistant.components.abode -jaraco.functools==3.9.0 +jaraco.abode==5.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 +# homeassistant.components.command_line # homeassistant.components.rest jsonpath==0.82.2 # homeassistant.components.justnimbus -justnimbus==0.7.3 +justnimbus==0.7.4 # homeassistant.components.kegtron kegtron-ble==0.4.0 # homeassistant.components.knocki -knocki==0.1.5 +knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.1.20.105944 +knx-frontend==2024.8.9.225351 # homeassistant.components.konnected konnected==1.2.0 @@ -999,10 +1027,10 @@ konnected==1.2.0 krakenex==2.1.0 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.1 +lacrosse-view==1.0.2 # homeassistant.components.laundrify -laundrify-aio==1.1.2 +laundrify-aio==1.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1011,7 +1039,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.1 +led-ble==1.0.2 # homeassistant.components.foscam libpyfoscam==1.2.2 @@ -1028,9 +1056,6 @@ linear-garage-door==0.2.9 # homeassistant.components.lamarzocco lmcloud==1.1.13 -# homeassistant.components.logi_circle -logi-circle==0.2.3 - # homeassistant.components.london_underground london-tube-status==0.5 @@ -1047,7 +1072,7 @@ lupupy==0.3.2 lxml==5.1.0 # homeassistant.components.matrix -matrix-nio==0.24.0 +matrix-nio==0.25.0 # homeassistant.components.maxcube maxcube-api==0.4.3 @@ -1071,7 +1096,7 @@ melnor-bluetooth==0.0.25 meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.3.0 +mficlient==0.5.0 # homeassistant.components.xiaomi_miio micloud==0.5 @@ -1092,16 +1117,16 @@ minio==7.1.12 moat-ble==0.1.1 # homeassistant.components.moehlenhoff_alpha2 -moehlenhoff-alpha2==1.3.0 +moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.0 +monzopy==1.3.2 # homeassistant.components.mopeka -mopeka-iot-ble==0.7.0 +mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.23 +motionblinds==0.6.24 # homeassistant.components.motionblinds_ble motionblindsble==0.1.0 @@ -1110,7 +1135,7 @@ motionblindsble==0.1.0 motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.4.1.8.5 +mozart-api==3.4.1.8.6 # homeassistant.components.mullvad mullvad-api==1.0.0 @@ -1137,22 +1162,25 @@ nessclient==1.0.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.2.0 +nettigo-air-monitor==3.3.0 # homeassistant.components.nexia nexia==2.0.8 # homeassistant.components.nextcloud -nextcloudmonitor==1.5.0 +nextcloudmonitor==1.5.1 # homeassistant.components.discord nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.0.0 +nextdns==3.1.0 # homeassistant.components.nibe_heatpump -nibe==2.8.0 +nibe==2.11.0 + +# homeassistant.components.nice_go +nice-go==0.1.6 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 @@ -1183,10 +1211,10 @@ oauth2client==4.1.3 objgraph==3.5.0 # homeassistant.components.garages_amsterdam -odp-amsterdam==6.0.1 +odp-amsterdam==6.0.2 # homeassistant.components.ollama -ollama-hass==0.1.7 +ollama==0.3.1 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1204,7 +1232,7 @@ open-garage==0.2.0 open-meteo==0.3.1 # homeassistant.components.openai_conversation -openai==1.3.8 +openai==1.35.7 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1213,10 +1241,10 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.4 +openwebifpy==4.2.7 # homeassistant.components.opower -opower==0.4.7 +opower==0.6.0 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1228,13 +1256,13 @@ ourgroceries==1.5.4 ovoenergy==2.0.0 # homeassistant.components.p1_monitor -p1monitor==3.0.0 +p1monitor==3.0.1 # homeassistant.components.mqtt paho-mqtt==1.6.1 # homeassistant.components.panasonic_viera -panasonic-viera==0.3.6 +panasonic-viera==0.4.2 # homeassistant.components.dunehd pdunehd==1.3.2 @@ -1258,7 +1286,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.37.4.1 +plugwise==0.38.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1281,7 +1309,7 @@ prometheus-client==0.17.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==5.9.8 +psutil==6.0.0 # homeassistant.components.androidtv pure-python-adb[async]==0.3.0.dev0 @@ -1313,11 +1341,14 @@ py-dormakaba-dkey==1.0.5 # homeassistant.components.improv_ble py-improv-ble-client==1.0.3 +# homeassistant.components.madvr +py-madvr2==1.6.29 + # homeassistant.components.melissa py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==1.0.2 +py-nextbusnext==2.0.4 # homeassistant.components.nightscout py-nightscout==1.2.2 @@ -1326,10 +1357,7 @@ py-nightscout==1.2.2 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.4.4 - -# homeassistant.components.seventeentrack -py17track==2021.12.2 +py-synologydsm-api==2.5.2 # homeassistant.components.hdmi_cec pyCEC==0.5.2 @@ -1341,7 +1369,7 @@ pyControl4==1.1.0 pyDuotecno==2024.5.1 # homeassistant.components.electrasmart -pyElectra==1.2.3 +pyElectra==1.2.4 # homeassistant.components.rfxtrx pyRFXtrx==0.31.1 @@ -1366,7 +1394,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.0 +pyaprilaire==0.7.4 # homeassistant.components.asuswrt pyasuswrt==0.1.21 @@ -1378,17 +1406,20 @@ pyatag==0.3.5.3 pyatmo==8.0.3 # homeassistant.components.apple_tv -pyatv==0.14.3 +pyatv==0.15.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 # homeassistant.components.balboa -pybalboa==1.0.1 +pybalboa==1.0.2 # homeassistant.components.blackbird pyblackbird==0.6 +# homeassistant.components.bluesound +pyblu==0.4.0 + # homeassistant.components.neato pybotvac==0.0.25 @@ -1402,13 +1433,13 @@ pycfdns==3.0.0 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.1.5 +pycoolmasternet-async==0.2.2 # homeassistant.components.microsoft pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.11.1 +pydaikin==2.13.4 # homeassistant.components.deconz pydeconz==116 @@ -1420,7 +1451,7 @@ pydexcom==0.2.3 pydiscovergy==3.0.1 # homeassistant.components.hydrawise -pydrawise==2024.6.4 +pydrawise==2024.8.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1440,8 +1471,11 @@ pyefergy==22.5.0 # homeassistant.components.energenie_power_sockets pyegps==0.2.5 +# homeassistant.components.emoncms +pyemoncms==0.0.7 + # homeassistant.components.enphase_envoy -pyenphase==1.20.3 +pyenphase==1.22.0 # homeassistant.components.everlights pyeverlights==0.1.0 @@ -1462,7 +1496,7 @@ pyfido==2.1.2 pyfireservicerota==0.0.43 # homeassistant.components.flic -pyflic==2.0.3 +pyflic==2.0.4 # homeassistant.components.forked_daapd pyforked-daapd==0.1.14 @@ -1471,7 +1505,7 @@ pyforked-daapd==0.1.14 pyfreedompro==1.1.0 # homeassistant.components.fritzbox -pyfritzhome==0.6.11 +pyfritzhome==0.6.12 # homeassistant.components.ifttt pyfttt==0.3 @@ -1492,7 +1526,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==0.0.6 +pyhomeworks==1.1.1 # homeassistant.components.ialarm pyialarm==2.2.0 @@ -1501,7 +1535,7 @@ pyialarm==2.2.0 pyicloud==1.0.0 # homeassistant.components.insteon -pyinsteon==1.6.1 +pyinsteon==1.6.3 # homeassistant.components.ipma pyipma==3.0.7 @@ -1519,7 +1553,7 @@ pyiss==1.0.1 pyisy==3.1.14 # homeassistant.components.jvc_projector -pyjvcprojector==1.0.11 +pyjvcprojector==1.0.12 # homeassistant.components.kaleidescape pykaleidescape==1.0.1 @@ -1561,10 +1595,10 @@ pylitejet==0.6.2 pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.20.0 +pylutron-caseta==0.21.1 # homeassistant.components.lutron -pylutron==0.2.13 +pylutron==0.2.15 # homeassistant.components.mailgun pymailgunner==1.4 @@ -1578,11 +1612,14 @@ pymelcloud==2.5.9 # homeassistant.components.meteoclimatic pymeteoclimatic==0.1.0 +# homeassistant.components.assist_pipeline +pymicro-vad==1.0.1 + # homeassistant.components.mochad pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.8 +pymodbus==3.6.9 # homeassistant.components.monoprice pymonoprice==0.4 @@ -1590,6 +1627,9 @@ pymonoprice==0.4 # homeassistant.components.mysensors pymysensors==0.24.0 +# homeassistant.components.iron_os +pynecil==0.2.0 + # homeassistant.components.netgear pynetgear==0.10.10 @@ -1603,7 +1643,7 @@ pynuki==1.6.3 pynws[retry]==1.8.2 # homeassistant.components.nx584 -pynx584==0.5 +pynx584==0.8.2 # homeassistant.components.nzbget pynzbgetapi==0.2.0 @@ -1618,7 +1658,7 @@ pyoctoprintapi==0.1.12 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.0.9 +pyopenweathermap==0.1.1 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -1635,13 +1675,13 @@ pyotgw==2.2.0 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.11 +pyoverkiz==1.13.14 # homeassistant.components.onewire pyownet==0.10.0.post1 # homeassistant.components.lcn -pypck==0.7.17 +pypck==0.7.21 # homeassistant.components.pjlink pypjlink2==1.2.1 @@ -1686,22 +1726,20 @@ pyrympro==0.0.8 pysabnzbd==1.1.1 # homeassistant.components.schlage -pyschlage==2024.6.0 +pyschlage==2024.8.0 # homeassistant.components.sensibo pysensibo==1.0.36 -# homeassistant.components.serial -# homeassistant.components.zha -pyserial-asyncio-fast==0.11 - # homeassistant.components.acer_projector # homeassistant.components.crownstone # homeassistant.components.usb -# homeassistant.components.zha # homeassistant.components.zwave_js pyserial==3.5 +# homeassistant.components.seventeentrack +pyseventeentrack==1.0.0 + # homeassistant.components.sia pysiaalarm==3.1.1 @@ -1724,7 +1762,7 @@ pysmartthings==0.7.8 pysml==0.0.12 # homeassistant.components.snmp -pysnmp-lextudio==6.0.11 +pysnmp==6.2.5 # homeassistant.components.snooz pysnooz==0.8.6 @@ -1742,13 +1780,13 @@ pysqueezebox==0.7.1 pysuez==0.2.0 # homeassistant.components.switchbee -pyswitchbee==1.8.0 +pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 # homeassistant.components.tedee -pytedee-async==0.2.17 +pytedee-async==0.2.20 # homeassistant.components.motionmount python-MotionMount==2.0.0 @@ -1757,22 +1795,22 @@ python-MotionMount==2.0.0 python-awair==0.2.4 # homeassistant.components.bsblan -python-bsblan==0.5.18 +python-bsblan==0.6.2 # homeassistant.components.ecobee python-ecobee-api==0.2.18 # homeassistant.components.fully_kiosk -python-fullykiosk==0.0.13 +python-fullykiosk==0.0.14 # homeassistant.components.sms # python-gammu==3.2.4 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.6.0 +python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.0.0 +python-homewizard-energy==v6.2.0 # homeassistant.components.izone python-izone==1.2.9 @@ -1781,10 +1819,13 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.6.2.1 +python-kasa[speedups]==0.7.1 + +# homeassistant.components.linkplay +python-linkplay==0.0.6 # homeassistant.components.matter -python-matter-server==6.1.0 +python-matter-server==6.3.0 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -1799,7 +1840,7 @@ python-mystrom==2.2.0 python-opendata-transport==0.4.0 # homeassistant.components.opensky -python-opensky==1.0.0 +python-opensky==1.0.1 # homeassistant.components.otbr # homeassistant.components.thread @@ -1812,7 +1853,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.3.0 +python-roborock==2.5.0 # homeassistant.components.smarttub python-smarttub==0.0.36 @@ -1824,7 +1865,7 @@ python-songpal==0.16.2 python-tado==0.17.6 # homeassistant.components.technove -python-technove==1.2.2 +python-technove==1.3.1 # homeassistant.components.telegram_bot python-telegram-bot[socks]==21.0.1 @@ -1846,10 +1887,10 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==0.3.10 +pytrafikverket==1.0.0 # homeassistant.components.v2c -pytrydan==0.7.0 +pytrydan==0.8.0 # homeassistant.components.usb pyudev==0.24.1 @@ -1861,7 +1902,7 @@ pyuptimerobot==22.2.0 pyvera==0.3.13 # homeassistant.components.vesync -pyvesync==2.1.10 +pyvesync==2.1.12 # homeassistant.components.vizio pyvizio==0.1.61 @@ -1873,7 +1914,7 @@ pyvlx==0.2.21 pyvolumio==0.1.5 # homeassistant.components.waze_travel_time -pywaze==1.0.1 +pywaze==1.0.2 # homeassistant.components.weatherflow pyweatherflowudp==1.4.5 @@ -1918,25 +1959,25 @@ radiotherm==2.1.0 rapt-ble==0.1.2 # homeassistant.components.refoss -refoss-ha==1.2.1 +refoss-ha==1.2.4 # homeassistant.components.rainmachine regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.3 +renault-api==0.2.5 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.3 +reolink-aio==0.9.7 # homeassistant.components.rflink rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.11 +ring-doorbell[listen]==0.8.12 # homeassistant.components.roku rokuecp==0.19.3 @@ -1982,7 +2023,7 @@ securetar==2024.2.1 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.12.2 +sense-energy==0.12.4 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2002,6 +2043,9 @@ sfrbox-api==0.0.8 # homeassistant.components.sharkiq sharkiq==1.0.2 +# homeassistant.components.simplefin +simplefin4py==0.0.18 + # homeassistant.components.sighthound simplehound==0.3 @@ -2027,7 +2071,7 @@ snapcast==2.3.6 soco==0.30.4 # homeassistant.components.solarlog -solarlog_cli==0.1.5 +solarlog_cli==0.1.6 # homeassistant.components.solax solax==3.1.1 @@ -2087,22 +2131,22 @@ stringcase==1.2.0 subarulink==0.7.11 # homeassistant.components.sunweg -sunweg==3.0.1 +sunweg==3.0.2 # homeassistant.components.surepetcare surepy==0.9.0 # homeassistant.components.switchbot_cloud -switchbot-api==2.1.0 +switchbot-api==2.2.1 # homeassistant.components.system_bridge -systembridgeconnector==4.0.3 +systembridgeconnector==4.1.5 # homeassistant.components.system_bridge -systembridgemodels==4.0.4 +systembridgemodels==4.2.4 # homeassistant.components.tailscale -tailscale==0.6.0 +tailscale==0.6.1 # homeassistant.components.tellduslive tellduslive==0.10.11 @@ -2113,8 +2157,10 @@ temescal==0.5 # homeassistant.components.temper temperusb==1.6.1 +# homeassistant.components.tesla_fleet # homeassistant.components.teslemetry -tesla-fleet-api==0.6.1 +# homeassistant.components.tessie +tesla-fleet-api==0.7.3 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2123,10 +2169,10 @@ tesla-powerwall==0.5.2 tesla-wall-connector==1.0.2 # homeassistant.components.tessie -tessie-api==0.0.9 +tessie-api==0.1.1 # homeassistant.components.thermobeacon -thermobeacon-ble==0.6.2 +thermobeacon-ble==0.7.0 # homeassistant.components.thermopro thermopro-ble==0.10.0 @@ -2153,10 +2199,10 @@ tplink-omada-client==1.3.12 transmission-rpc==7.0.3 # homeassistant.components.twinkly -ttls==1.5.1 +ttls==1.8.3 # homeassistant.components.thethingsnetwork -ttn_client==1.0.0 +ttn_client==1.1.0 # homeassistant.components.tuya tuya-device-sharing-sdk==0.1.9 @@ -2168,25 +2214,25 @@ twentemilieu==2.0.1 twilio==6.32.0 # homeassistant.components.twitch -twitchAPI==4.0.0 +twitchAPI==4.2.1 # homeassistant.components.ukraine_alarm uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==1.20.0 +uiprotect==6.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 # homeassistant.components.unifiprotect -unifi-discovery==1.1.8 +unifi-discovery==1.2.0 # homeassistant.components.zha -universal-silabs-flasher==0.0.20 +universal-silabs-flasher==0.0.22 # homeassistant.components.upb -upb-lib==0.5.6 +upb-lib==0.5.8 # homeassistant.components.upcloud upcloud-api==2.5.1 @@ -2197,19 +2243,19 @@ upcloud-api==2.5.1 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.11.0 +uvcclient==0.12.1 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 # homeassistant.components.vallox -vallox-websocket-api==5.1.1 +vallox-websocket-api==5.3.0 # homeassistant.components.rdw -vehicle==2.2.1 +vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.5.1 +velbus-aio==2024.7.6 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2220,10 +2266,6 @@ vilfo-api-client==0.5.0 # homeassistant.components.voip voip-utils==0.1.0 -# homeassistant.components.google_generative_ai_conversation -# homeassistant.components.openai_conversation -voluptuous-openapi==0.0.4 - # homeassistant.components.volvooncall volvooncall==0.10.3 @@ -2241,7 +2283,7 @@ vultr==0.1.2 wakeonlan==2.1.0 # homeassistant.components.wallbox -wallbox==0.6.0 +wallbox==0.7.0 # homeassistant.components.folder_watcher watchdog==2.3.1 @@ -2252,9 +2294,6 @@ weatherflow4py==0.2.21 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 -# homeassistant.components.assist_pipeline -webrtc-noise-gain==1.2.3 - # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2265,10 +2304,10 @@ whois==0.9.27 wiffi==1.1.2 # homeassistant.components.wled -wled==0.18.0 +wled==0.20.2 # homeassistant.components.wolflink -wolf-comm==0.0.8 +wolf-comm==0.0.9 # homeassistant.components.wyoming wyoming==1.5.4 @@ -2277,15 +2316,14 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.0 +xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==2.12.2 +xknx==3.1.0 # homeassistant.components.knx xknxproject==3.7.1 -# homeassistant.components.bluesound # homeassistant.components.fritz # homeassistant.components.rest # homeassistant.components.startca @@ -2298,25 +2336,25 @@ yalesmartalarmclient==0.3.9 # homeassistant.components.august # homeassistant.components.yalexs_ble -yalexs-ble==2.4.2 +yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.0 +yalexs==8.0.2 # homeassistant.components.yeelight yeelight==0.7.14 # homeassistant.components.yolink -yolink-api==0.4.4 +yolink-api==0.4.7 # homeassistant.components.youless -youless-api==2.1.0 +youless-api==2.1.2 # homeassistant.components.youtube youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.05.27 +yt-dlp==2024.08.06 # homeassistant.components.zamg zamg==0.3.6 @@ -2328,22 +2366,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.116 - -# homeassistant.components.zha -zigpy-deconz==0.23.1 - -# homeassistant.components.zha -zigpy-xbee==0.20.1 - -# homeassistant.components.zha -zigpy-zigate==0.12.0 - -# homeassistant.components.zha -zigpy-znp==0.12.1 - -# homeassistant.components.zha -zigpy==0.64.1 +zha==0.0.31 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index a7e5c20d86c..091f872d511 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.4.9 +ruff==0.6.0 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index eff61d5c4e6..09ee509b4ee 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -101,11 +101,6 @@ grpcio==1.59.0 grpcio-status==1.59.0 grpcio-reflection==1.59.0 -# libcst >=0.4.0 requires a newer Rust than we currently have available, -# thus our wheels builds fail. This pins it to the last working version, -# which at this point satisfies our needs. -libcst==0.3.23 - # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -120,11 +115,6 @@ enum34==1000000000.0.0 typing==1000000000.0.0 uuid==1000000000.0.0 -# regex causes segfault with version 2021.8.27 -# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error -# This is fixed in 2021.8.28 -regex==2021.8.28 - # httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these @@ -157,6 +147,9 @@ backoff>=2.0 # v2 has breaking changes (#99218). pydantic==1.10.17 +# Required for Python 3.12.4 compatibility (#119223). +mashumaro>=3.13.1 + # Breaks asyncio # https://github.com/pubnub/python/issues/130 pubnub!=6.4.0 @@ -171,7 +164,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.1 +protobuf==4.25.4 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder @@ -185,9 +178,6 @@ websockets>=11.0.1 # pysnmplib is no longer maintained and does not work with newer # python pysnmplib==1000000000.0.0 -# pysnmp is no longer maintained and does not work with newer -# python -pysnmp==1000000000.0.0 # The get-mac package has been replaced with getmac. Installing get-mac alongside getmac # breaks getmac due to them both sharing the same python package name inside 'getmac'. @@ -205,8 +195,8 @@ dacite>=1.7.0 # Musle wheels for pandas 2.2.0 cannot be build for any architecture. pandas==2.1.4 -# chacha20poly1305-reuseable==0.12.0 is incompatible with cryptography==42.0.x -chacha20poly1305-reuseable>=0.12.1 +# chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x +chacha20poly1305-reuseable>=0.13.0 # pycountry<23.12.11 imports setuptools at run time # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 @@ -225,7 +215,7 @@ tuf>=4.0.0 pyserial-asyncio==1000000000.0.0 # https://github.com/jd/tenacity/issues/471 -tenacity<8.4.0 +tenacity!=8.4.0 """ GENERATED_MESSAGE = ( diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index bcb19a14c37..ea3c56200a2 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -14,7 +14,6 @@ from . import ( codeowners, config_flow, config_schema, - coverage, dependencies, dhcp, docker, @@ -53,7 +52,6 @@ INTEGRATION_PLUGINS = [ config_flow, # This needs to run last, after translations are processed ] HASS_PLUGINS = [ - coverage, docker, mypy_config, metadata, diff --git a/script/hassfest/config_schema.py b/script/hassfest/config_schema.py index 141b087472b..06ef2065127 100644 --- a/script/hassfest/config_schema.py +++ b/script/hassfest/config_schema.py @@ -4,7 +4,7 @@ from __future__ import annotations import ast -from homeassistant.core import DOMAIN as HA_DOMAIN +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN from .model import Config, Integration @@ -12,7 +12,7 @@ CONFIG_SCHEMA_IGNORE = { # Configuration under the homeassistant key is a special case, it's handled by # conf_util.async_process_ha_core_config already during bootstrapping, not by # a schema in the homeassistant integration. - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, } @@ -21,7 +21,7 @@ def _has_assignment(module: ast.Module, name: str) -> bool: for item in module.body: if type(item) not in (ast.Assign, ast.AnnAssign, ast.AugAssign): continue - if type(item) == ast.Assign: + if type(item) is ast.Assign: for target in item.targets: if getattr(target, "id", None) == name: return True @@ -35,7 +35,7 @@ def _has_function( module: ast.Module, _type: ast.AsyncFunctionDef | ast.FunctionDef, name: str ) -> bool: """Test if the module defines a function.""" - return any(type(item) == _type and item.name == name for item in module.body) + return any(type(item) is _type and item.name == name for item in module.body) def _has_import(module: ast.Module, name: str) -> bool: diff --git a/script/hassfest/coverage.py b/script/hassfest/coverage.py deleted file mode 100644 index 388f2a1c761..00000000000 --- a/script/hassfest/coverage.py +++ /dev/null @@ -1,181 +0,0 @@ -"""Validate coverage files.""" - -from __future__ import annotations - -from pathlib import Path - -from .model import Config, Integration - -DONT_IGNORE = ( - "config_flow.py", - "device_action.py", - "device_condition.py", - "device_trigger.py", - "diagnostics.py", - "group.py", - "intent.py", - "logbook.py", - "media_source.py", - "recorder.py", - "scene.py", -) -FORCE_COVERAGE = ("gold", "platinum") - -CORE_PREFIX = """# Sorted by hassfest. -# -# To sort, run python3 -m script.hassfest -p coverage - -[run] -source = homeassistant -omit = -""" -COMPONENTS_PREFIX = ( - " # omit pieces of code that rely on external devices being present\n" -) -SUFFIX = """[report] -# Regexes for lines to exclude from consideration -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - - # Don't complain about missing debug-only code: - def __repr__ - - # Don't complain if tests don't hit defensive assertion code: - raise AssertionError - raise NotImplementedError - - # TYPE_CHECKING and @overload blocks are never executed during pytest run - if TYPE_CHECKING: - @overload -""" - - -def validate(integrations: dict[str, Integration], config: Config) -> None: - """Validate coverage.""" - coverage_path = config.root / ".coveragerc" - - not_found: list[str] = [] - unsorted: list[str] = [] - checking = False - - previous_line = "" - with coverage_path.open("rt") as fp: - for line in fp: - line = line.strip() - - if line == COMPONENTS_PREFIX.strip(): - previous_line = "" - continue - - if not line or line.startswith("#"): - continue - - if not checking: - if line == "omit =": - checking = True - continue - - # Finished - if line == "[report]": - break - - path = Path(line) - - # Discard wildcard - path_exists = path - while "*" in path_exists.name: - path_exists = path_exists.parent - - if not path_exists.exists(): - not_found.append(line) - continue - - if line < previous_line: - unsorted.append(line) - previous_line = line - - if not line.startswith("homeassistant/components/"): - continue - - # Ignore sub-directories - if len(path.parts) > 4: - continue - - integration_path = path.parent - - integration = integrations[integration_path.name] - - if integration.quality_scale in FORCE_COVERAGE: - integration.add_error( - "coverage", - f"has quality scale {integration.quality_scale} and " - "should not be present in .coveragerc file", - ) - continue - - if (last_part := path.parts[-1]) in {"*", "const.py"} and Path( - f"tests/components/{integration.domain}/__init__.py" - ).exists(): - integration.add_error( - "coverage", - f"has tests and should not use {last_part} in .coveragerc file", - ) - continue - - for check in DONT_IGNORE: - if path.parts[-1] not in {"*", check}: - continue - - if (integration_path / check).exists(): - integration.add_error( - "coverage", - f"{check} must not be ignored by the .coveragerc file", - ) - - if unsorted: - config.add_error( - "coverage", - "Paths are unsorted in .coveragerc file. " - "Run python3 -m script.hassfest\n - " - f"{'\n - '.join(unsorted)}", - fixable=True, - ) - - if not_found: - raise RuntimeError( - f".coveragerc references files that don't exist: {', '.join(not_found)}." - ) - - -def generate(integrations: dict[str, Integration], config: Config) -> None: - """Sort coverage.""" - coverage_path = config.root / ".coveragerc" - core = [] - components = [] - section = "header" - - with coverage_path.open("rt") as fp: - for line in fp: - if line == "[report]\n": - break - - if section != "core" and line == "omit =\n": - section = "core" - elif section != "components" and line == COMPONENTS_PREFIX: - section = "components" - elif section == "core" and line != "\n": - core.append(line) - elif section == "components" and line != "\n": - components.append(line) - - assert core, "core should be a non-empty list" - assert components, "components should be a non-empty list" - content = ( - f"{CORE_PREFIX}{"".join(sorted(core))}\n" - f"{COMPONENTS_PREFIX}{"".join(sorted(components))}\n" - f"\n{SUFFIX}" - ) - - with coverage_path.open("w") as fp: - fp.write(content) diff --git a/script/hassfest/icons.py b/script/hassfest/icons.py index e7451dfd498..087d395afeb 100644 --- a/script/hassfest/icons.py +++ b/script/hassfest/icons.py @@ -47,6 +47,19 @@ def ensure_not_same_as_default(value: dict) -> dict: return value +DATA_ENTRY_ICONS_SCHEMA = vol.Schema( + { + "step": { + str: { + "section": { + str: icon_value_validator, + } + } + } + } +) + + def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: """Create an icon schema.""" @@ -73,6 +86,11 @@ def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: schema = vol.Schema( { + vol.Optional("config"): DATA_ENTRY_ICONS_SCHEMA, + vol.Optional("issues"): vol.Schema( + {str: {"fix_flow": DATA_ENTRY_ICONS_SCHEMA}} + ), + vol.Optional("options"): DATA_ENTRY_ICONS_SCHEMA, vol.Optional("services"): state_validator, } ) diff --git a/script/hassfest/manifest.py b/script/hassfest/manifest.py index 8ff0750250f..1c01ee7cf58 100644 --- a/script/hassfest/manifest.py +++ b/script/hassfest/manifest.py @@ -120,10 +120,6 @@ NO_DIAGNOSTICS = [ "gdacs", "geonetnz_quakes", "hyperion", - # Modbus is excluded because it doesn't have to have a config flow - # according to ADR-0010, since it's a protocol integration. This - # means that it can't implement diagnostics. - "modbus", "nightscout", "pvpc_hourly_pricing", "risco", diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index 56734257f78..d2aff81aa05 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -36,7 +36,7 @@ GENERAL_SETTINGS: Final[dict[str, str]] = { "plugins": "pydantic.mypy", "show_error_codes": "true", "follow_imports": "normal", - "enable_incomplete_feature": ",".join( # noqa: FLY002 + "enable_incomplete_feature": ", ".join( # noqa: FLY002 [ "NewGenericSyntax", ] diff --git a/script/hassfest/services.py b/script/hassfest/services.py index ea4503d5410..92fca14d373 100644 --- a/script/hassfest/services.py +++ b/script/hassfest/services.py @@ -26,6 +26,23 @@ def exists(value: Any) -> Any: return value +def unique_field_validator(fields: Any) -> Any: + """Validate the inputs don't have duplicate keys under different sections.""" + all_fields = set() + for key, value in fields.items(): + if value and "fields" in value: + for key in value["fields"]: + if key in all_fields: + raise vol.Invalid(f"Duplicate use of field {key} in service.") + all_fields.add(key) + else: + if key in all_fields: + raise vol.Invalid(f"Duplicate use of field {key} in service.") + all_fields.add(key) + + return fields + + CORE_INTEGRATION_FIELD_SCHEMA = vol.Schema( { vol.Optional("example"): exists, @@ -44,6 +61,13 @@ CORE_INTEGRATION_FIELD_SCHEMA = vol.Schema( } ) +CORE_INTEGRATION_SECTION_SCHEMA = vol.Schema( + { + vol.Optional("collapsed"): bool, + vol.Required("fields"): vol.Schema({str: CORE_INTEGRATION_FIELD_SCHEMA}), + } +) + CUSTOM_INTEGRATION_FIELD_SCHEMA = CORE_INTEGRATION_FIELD_SCHEMA.extend( { vol.Optional("description"): str, @@ -57,7 +81,17 @@ CORE_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Optional("target"): vol.Any( selector.TargetSelector.CONFIG_SCHEMA, None ), - vol.Optional("fields"): vol.Schema({str: CORE_INTEGRATION_FIELD_SCHEMA}), + vol.Optional("fields"): vol.All( + vol.Schema( + { + str: vol.Any( + CORE_INTEGRATION_FIELD_SCHEMA, + CORE_INTEGRATION_SECTION_SCHEMA, + ) + } + ), + unique_field_validator, + ), } ), None, @@ -107,7 +141,7 @@ def grep_dir(path: pathlib.Path, glob_pattern: str, search_pattern: str) -> bool return False -def validate_services(config: Config, integration: Integration) -> None: +def validate_services(config: Config, integration: Integration) -> None: # noqa: C901 """Validate services.""" try: data = load_yaml_dict(str(integration.path / "services.yaml")) @@ -200,6 +234,9 @@ def validate_services(config: Config, integration: Integration) -> None: # The same check is done for the description in each of the fields of the # service schema. for field_name, field_schema in service_schema.get("fields", {}).items(): + if "fields" in field_schema: + # This is a section + continue if "name" not in field_schema: try: strings["services"][service_name]["fields"][field_name]["name"] @@ -233,6 +270,20 @@ def validate_services(config: Config, integration: Integration) -> None: f"Service {service_name} has a field {field_name} with a selector with a translation key {translation_key} that is not in the translations file", ) + # The same check is done for the description in each of the sections of the + # service schema. + for section_name, section_schema in service_schema.get("fields", {}).items(): + if "fields" not in section_schema: + # This is not a section + continue + try: + strings["services"][service_name]["sections"][section_name]["name"] + except KeyError: + integration.add_error( + "services", + f"Service {service_name} has a section {section_name} with no name {error_msg_suffix}", + ) + def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle dependencies for integrations.""" diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index 04ea85ca5d5..c5efd05948f 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -41,6 +41,7 @@ ALLOW_NAME_TRANSLATION = { "local_todo", "nmap_tracker", "rpi_power", + "swiss_public_transport", "waze_travel_time", "zodiac", } @@ -166,6 +167,13 @@ def gen_data_entry_schema( vol.Optional("data_description"): {str: translation_value_validator}, vol.Optional("menu_options"): {str: translation_value_validator}, vol.Optional("submit"): translation_value_validator, + vol.Optional("section"): { + str: { + vol.Optional("data"): {str: translation_value_validator}, + vol.Optional("description"): translation_value_validator, + vol.Optional("name"): translation_value_validator, + }, + }, } }, vol.Optional("error"): {str: translation_value_validator}, @@ -250,6 +258,14 @@ def gen_issues_schema(config: Config, integration: Integration) -> dict[str, Any } +_EXCEPTIONS_SCHEMA = { + vol.Optional("exceptions"): cv.schema_with_slug_keys( + {vol.Optional("message"): translation_value_validator}, + slug_validator=cv.slug, + ), +} + + def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: """Generate a strings schema.""" return vol.Schema( @@ -355,10 +371,7 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: ), slug_validator=cv.slug, ), - vol.Optional("exceptions"): cv.schema_with_slug_keys( - {vol.Optional("message"): translation_value_validator}, - slug_validator=cv.slug, - ), + **_EXCEPTIONS_SCHEMA, vol.Optional("services"): cv.schema_with_slug_keys( { vol.Required("name"): translation_value_validator, @@ -371,6 +384,13 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: }, slug_validator=translation_key_validator, ), + vol.Optional("sections"): cv.schema_with_slug_keys( + { + vol.Required("name"): str, + vol.Optional("description"): translation_value_validator, + }, + slug_validator=translation_key_validator, + ), }, slug_validator=translation_key_validator, ), @@ -397,6 +417,7 @@ def gen_auth_schema(config: Config, integration: Integration) -> vol.Schema: ) }, vol.Optional("issues"): gen_issues_schema(config, integration), + **_EXCEPTIONS_SCHEMA, } ) diff --git a/script/install_integration_requirements.py b/script/install_integration_requirements.py index ab91ea71557..91c9f6a8ed0 100644 --- a/script/install_integration_requirements.py +++ b/script/install_integration_requirements.py @@ -45,6 +45,7 @@ def main() -> int | None: cmd, check=True, ) + return None if __name__ == "__main__": diff --git a/script/licenses.py b/script/licenses.py new file mode 100644 index 00000000000..0663821ed2c --- /dev/null +++ b/script/licenses.py @@ -0,0 +1,245 @@ +"""Tool to check the licenses.""" + +from __future__ import annotations + +from dataclasses import dataclass +import json +from pathlib import Path +import sys + +from awesomeversion import AwesomeVersion + + +@dataclass +class PackageDefinition: + """Package definition.""" + + license: str + name: str + version: AwesomeVersion + + @classmethod + def from_dict(cls, data: dict[str, str]) -> PackageDefinition: + """Create a package definition from a dictionary.""" + return cls( + license=data["License"], + name=data["Name"], + version=AwesomeVersion(data["Version"]), + ) + + +OSI_APPROVED_LICENSES = { + "Academic Free License (AFL)", + "Apache Software License", + "Apple Public Source License", + "Artistic License", + "Attribution Assurance License", + "BSD License", + "Boost Software License 1.0 (BSL-1.0)", + "CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)", + "Common Development and Distribution License 1.0 (CDDL-1.0)", + "Common Public License", + "Eclipse Public License 1.0 (EPL-1.0)", + "Eclipse Public License 2.0 (EPL-2.0)", + "Educational Community License, Version 2.0 (ECL-2.0)", + "Eiffel Forum License", + "European Union Public Licence 1.0 (EUPL 1.0)", + "European Union Public Licence 1.1 (EUPL 1.1)", + "European Union Public Licence 1.2 (EUPL 1.2)", + "GNU Affero General Public License v3", + "GNU Affero General Public License v3 or later (AGPLv3+)", + "GNU Free Documentation License (FDL)", + "GNU General Public License (GPL)", + "GNU General Public License v2 (GPLv2)", + "GNU General Public License v2 or later (GPLv2+)", + "GNU General Public License v3 (GPLv3)", + "GNU General Public License v3 or later (GPLv3+)", + "GNU Lesser General Public License v2 (LGPLv2)", + "GNU Lesser General Public License v2 or later (LGPLv2+)", + "GNU Lesser General Public License v3 (LGPLv3)", + "GNU Lesser General Public License v3 or later (LGPLv3+)", + "GNU Library or Lesser General Public License (LGPL)", + "Historical Permission Notice and Disclaimer (HPND)", + "IBM Public License", + "ISC License (ISCL)", + "Intel Open Source License", + "Jabber Open Source License", + "MIT License", + "MIT No Attribution License (MIT-0)", + "MITRE Collaborative Virtual Workspace License (CVW)", + "MirOS License (MirOS)", + "Motosoto License", + "Mozilla Public License 1.0 (MPL)", + "Mozilla Public License 1.1 (MPL 1.1)", + "Mozilla Public License 2.0 (MPL 2.0)", + "Mulan Permissive Software License v2 (MulanPSL-2.0)", + "NASA Open Source Agreement v1.3 (NASA-1.3)", + "Nethack General Public License", + "Nokia Open Source License", + "Open Group Test Suite License", + "Open Software License 3.0 (OSL-3.0)", + "PostgreSQL License", + "Python License (CNRI Python License)", + "Python Software Foundation License", + "Qt Public License (QPL)", + "Ricoh Source Code Public License", + "SIL Open Font License 1.1 (OFL-1.1)", + "Sleepycat License", + "Sun Industry Standards Source License (SISSL)", + "Sun Public License", + "The Unlicense (Unlicense)", + "Universal Permissive License (UPL)", + "University of Illinois/NCSA Open Source License", + "Vovida Software License 1.0", + "W3C License", + "X.Net License", + "Zero-Clause BSD (0BSD)", + "Zope Public License", + "zlib/libpng License", + "Apache License", + "MIT", + "apache-2.0", + "GPL-3.0", + "GPLv3+", + "MPL2", + "MPL-2.0", + "Apache 2", + "LGPL v3", + "BSD", + "GNU-3.0", + "GPLv3", + "Eclipse Public License v2.0", + "ISC", + "GPL-2.0-only", + "mit", + "GNU General Public License v3", + "Unlicense", + "Apache-2", + "GPLv2", + "Python-2.0.1", +} + +EXCEPTIONS = { + "PyMicroBot", # https://github.com/spycle/pyMicroBot/pull/3 + "PySwitchmate", # https://github.com/Danielhiversen/pySwitchmate/pull/16 + "PyXiaomiGateway", # https://github.com/Danielhiversen/PyXiaomiGateway/pull/201 + "aiocomelit", # https://github.com/chemelli74/aiocomelit/pull/138 + "aioecowitt", # https://github.com/home-assistant-libs/aioecowitt/pull/180 + "aiohappyeyeballs", # Python-2.0.1 + "aioopenexchangerates", # https://github.com/MartinHjelmare/aioopenexchangerates/pull/94 + "aiooui", # https://github.com/Bluetooth-Devices/aiooui/pull/8 + "aioruuvigateway", # https://github.com/akx/aioruuvigateway/pull/6 + "aiovodafone", # https://github.com/chemelli74/aiovodafone/pull/131 + "airthings-ble", # https://github.com/Airthings/airthings-ble/pull/42 + "apple_weatherkit", # https://github.com/tjhorner/python-weatherkit/pull/3 + "asyncio", # PSF License + "chacha20poly1305", # LGPL + "chacha20poly1305-reuseable", # Apache 2.0 or BSD 3-Clause + "commentjson", # https://github.com/vaidik/commentjson/pull/55 + "crownstone-cloud", # https://github.com/crownstone/crownstone-lib-python-cloud/pull/5 + "crownstone-core", # https://github.com/crownstone/crownstone-lib-python-core/pull/6 + "crownstone-sse", # https://github.com/crownstone/crownstone-lib-python-sse/pull/2 + "crownstone-uart", # https://github.com/crownstone/crownstone-lib-python-uart/pull/12 + "dio-chacon-wifi-api", + "eliqonline", # https://github.com/molobrakos/eliqonline/pull/17 + "enocean", # https://github.com/kipe/enocean/pull/142 + "gardena-bluetooth", # https://github.com/elupus/gardena-bluetooth/pull/11 + "heatmiserV3", # https://github.com/andylockran/heatmiserV3/pull/94 + "huum", # https://github.com/frwickst/pyhuum/pull/8 + "imutils", # https://github.com/PyImageSearch/imutils/pull/292 + "iso4217", # Public domain + "kiwiki_client", # https://github.com/c7h/kiwiki_client/pull/6 + "krakenex", # https://github.com/veox/python3-krakenex/pull/145 + "ld2410-ble", # https://github.com/930913/ld2410-ble/pull/7 + "maxcube-api", # https://github.com/uebelack/python-maxcube-api/pull/48 + "nessclient", # https://github.com/nickw444/nessclient/pull/65 + "neurio", # https://github.com/jordanh/neurio-python/pull/13 + "nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14 + "pigpio", # https://github.com/joan2937/pigpio/pull/608 + "pyEmby", # https://github.com/mezz64/pyEmby/pull/12 + "pymitv", # MIT + "pyTibber", # https://github.com/Danielhiversen/pyTibber/pull/294 + "pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5 + "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41 + "pysabnzbd", # https://github.com/jeradM/pysabnzbd/pull/6 + "pyvera", # https://github.com/maximvelichko/pyvera/pull/164 + "pyxeoma", # https://github.com/jeradM/pyxeoma/pull/11 + "repoze.lru", + "russound", # https://github.com/laf/russound/pull/14 # codespell:ignore laf + "ruuvitag-ble", # https://github.com/Bluetooth-Devices/ruuvitag-ble/pull/10 + "sensirion-ble", # https://github.com/akx/sensirion-ble/pull/9 + "sharp_aquos_rc", # https://github.com/jmoore987/sharp_aquos_rc/pull/14 + "tapsaff", # https://github.com/bazwilliams/python-taps-aff/pull/5 + "tellduslive", # https://github.com/molobrakos/tellduslive/pull/24 + "tellsticknet", # https://github.com/molobrakos/tellsticknet/pull/33 + "vincenty", # Public domain + "zeversolar", # https://github.com/kvanzuijlen/zeversolar/pull/46 +} + +TODO = { + "aiocache": AwesomeVersion( + "0.12.2" + ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? +} + + +def main() -> int: + """Run the main script.""" + raw_licenses = json.loads(Path("licenses.json").read_text()) + package_definitions = [PackageDefinition.from_dict(data) for data in raw_licenses] + exit_code = 0 + for package in package_definitions: + previous_unapproved_version = TODO.get(package.name) + approved = False + for approved_license in OSI_APPROVED_LICENSES: + if approved_license in package.license: + approved = True + break + if previous_unapproved_version is not None: + if previous_unapproved_version < package.version: + if approved: + print( + "Approved license detected for " + f"{package.name}@{package.version}: {package.license}" + ) + print("Please remove the package from the TODO list.") + print() + else: + print( + "We could not detect an OSI-approved license for " + f"{package.name}@{package.version}: {package.license}" + ) + print() + exit_code = 1 + elif not approved and package.name not in EXCEPTIONS: + print( + "We could not detect an OSI-approved license for " + f"{package.name}@{package.version}: {package.license}" + ) + print() + exit_code = 1 + elif approved and package.name in EXCEPTIONS: + print( + "Approved license detected for " + f"{package.name}@{package.version}: {package.license}" + ) + print(f"Please remove the package from the EXCEPTIONS list: {package.name}") + print() + exit_code = 1 + current_packages = {package.name for package in package_definitions} + for package in [*TODO.keys(), *EXCEPTIONS]: + if package not in current_packages: + print( + f"Package {package} is tracked, but not used. Please remove from the licenses.py" + "file." + ) + print() + exit_code = 1 + return exit_code + + +if __name__ == "__main__": + exit_code = main() + if exit_code == 0: + print("All licenses are approved!") + sys.exit(exit_code) diff --git a/script/lint_and_test.py b/script/lint_and_test.py index e23870364b6..ff3db8aa1ed 100755 --- a/script/lint_and_test.py +++ b/script/lint_and_test.py @@ -20,7 +20,7 @@ except ImportError: RE_ASCII = re.compile(r"\033\[[^m]*m") -Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"]) +Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"]) # noqa: PYI024 PASS = "green" FAIL = "bold_red" diff --git a/script/scaffold/templates/config_flow/tests/conftest.py b/script/scaffold/templates/config_flow/tests/conftest.py index fc217636705..12faacd40df 100644 --- a/script/scaffold/templates/config_flow/tests/conftest.py +++ b/script/scaffold/templates/config_flow/tests/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the NEW_NAME tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/script/scaffold/templates/config_flow_helper/tests/conftest.py b/script/scaffold/templates/config_flow_helper/tests/conftest.py index fc217636705..12faacd40df 100644 --- a/script/scaffold/templates/config_flow_helper/tests/conftest.py +++ b/script/scaffold/templates/config_flow_helper/tests/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the NEW_NAME tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py b/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py index 809902fa0dd..8e7854835d8 100644 --- a/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py +++ b/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py @@ -59,7 +59,7 @@ def get_suggested(schema, key): return None return k.description["suggested_value"] # Wanted key absent from schema - raise Exception + raise KeyError(f"Key `{key}` is missing from schema") @pytest.mark.parametrize("platform", ["sensor"]) diff --git a/tests/auth/mfa_modules/test_insecure_example.py b/tests/auth/mfa_modules/test_insecure_example.py index f7f8a327059..8caca780ecb 100644 --- a/tests/auth/mfa_modules/test_insecure_example.py +++ b/tests/auth/mfa_modules/test_insecure_example.py @@ -121,7 +121,7 @@ async def test_login(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("pin") == str + assert result["data_schema"].schema.get("pin") is str result = await hass.auth.login_flow.async_configure( result["flow_id"], {"pin": "invalid-code"} diff --git a/tests/auth/mfa_modules/test_notify.py b/tests/auth/mfa_modules/test_notify.py index 23b8811dbf9..d6f4d80f99e 100644 --- a/tests/auth/mfa_modules/test_notify.py +++ b/tests/auth/mfa_modules/test_notify.py @@ -155,7 +155,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("code") == str + assert result["data_schema"].schema.get("code") is str # wait service call finished await hass.async_block_till_done() @@ -214,7 +214,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("code") == str + assert result["data_schema"].schema.get("code") is str # wait service call finished await hass.async_block_till_done() diff --git a/tests/auth/mfa_modules/test_totp.py b/tests/auth/mfa_modules/test_totp.py index 961db3f44ca..fadc3214712 100644 --- a/tests/auth/mfa_modules/test_totp.py +++ b/tests/auth/mfa_modules/test_totp.py @@ -114,7 +114,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("code") == str + assert result["data_schema"].schema.get("code") is str with patch("pyotp.TOTP.verify", return_value=False): result = await hass.auth.login_flow.async_configure( diff --git a/tests/auth/providers/test_command_line.py b/tests/auth/providers/test_command_line.py index 016ce767bad..2ce49730e5f 100644 --- a/tests/auth/providers/test_command_line.py +++ b/tests/auth/providers/test_command_line.py @@ -10,10 +10,11 @@ from homeassistant import data_entry_flow from homeassistant.auth import AuthManager, auth_store, models as auth_models from homeassistant.auth.providers import command_line from homeassistant.const import CONF_TYPE +from homeassistant.core import HomeAssistant @pytest.fixture -async def store(hass): +async def store(hass: HomeAssistant) -> auth_store.AuthStore: """Mock store.""" store = auth_store.AuthStore(hass) await store.async_load() @@ -21,7 +22,9 @@ async def store(hass): @pytest.fixture -def provider(hass, store): +def provider( + hass: HomeAssistant, store: auth_store.AuthStore +) -> command_line.CommandLineAuthProvider: """Mock provider.""" return command_line.CommandLineAuthProvider( hass, @@ -38,12 +41,18 @@ def provider(hass, store): @pytest.fixture -def manager(hass, store, provider): +def manager( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider: command_line.CommandLineAuthProvider, +) -> AuthManager: """Mock manager.""" return AuthManager(hass, store, {(provider.type, provider.id): provider}, {}) -async def test_create_new_credential(manager, provider) -> None: +async def test_create_new_credential( + manager: AuthManager, provider: command_line.CommandLineAuthProvider +) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "good-user", "password": "good-pass"} @@ -57,7 +66,9 @@ async def test_create_new_credential(manager, provider) -> None: assert not user.local_only -async def test_match_existing_credentials(store, provider) -> None: +async def test_match_existing_credentials( + provider: command_line.CommandLineAuthProvider, +) -> None: """See if we match existing users.""" existing = auth_models.Credentials( id=uuid.uuid4(), @@ -73,24 +84,26 @@ async def test_match_existing_credentials(store, provider) -> None: assert credentials is existing -async def test_invalid_username(provider) -> None: +async def test_invalid_username(provider: command_line.CommandLineAuthProvider) -> None: """Test we raise if incorrect user specified.""" with pytest.raises(command_line.InvalidAuthError): await provider.async_validate_login("bad-user", "good-pass") -async def test_invalid_password(provider) -> None: +async def test_invalid_password(provider: command_line.CommandLineAuthProvider) -> None: """Test we raise if incorrect password specified.""" with pytest.raises(command_line.InvalidAuthError): await provider.async_validate_login("good-user", "bad-pass") -async def test_good_auth(provider) -> None: +async def test_good_auth(provider: command_line.CommandLineAuthProvider) -> None: """Test nothing is raised with good credentials.""" await provider.async_validate_login("good-user", "good-pass") -async def test_good_auth_with_meta(manager, provider) -> None: +async def test_good_auth_with_meta( + manager: AuthManager, provider: command_line.CommandLineAuthProvider +) -> None: """Test metadata is added upon successful authentication.""" provider.config[command_line.CONF_ARGS] = ["--with-meta"] provider.config[command_line.CONF_META] = True @@ -110,7 +123,9 @@ async def test_good_auth_with_meta(manager, provider) -> None: assert user.local_only -async def test_utf_8_username_password(provider) -> None: +async def test_utf_8_username_password( + provider: command_line.CommandLineAuthProvider, +) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "ßßß", "password": "äöü"} @@ -118,7 +133,9 @@ async def test_utf_8_username_password(provider) -> None: assert credentials.is_new is True -async def test_login_flow_validates(provider) -> None: +async def test_login_flow_validates( + provider: command_line.CommandLineAuthProvider, +) -> None: """Test login flow.""" flow = await provider.async_login_flow({}) result = await flow.async_step_init() @@ -137,7 +154,7 @@ async def test_login_flow_validates(provider) -> None: assert result["data"]["username"] == "good-user" -async def test_strip_username(provider) -> None: +async def test_strip_username(provider: command_line.CommandLineAuthProvider) -> None: """Test authentication works with username with whitespace around.""" flow = await provider.async_login_flow({}) result = await flow.async_step_init( diff --git a/tests/auth/providers/test_homeassistant.py b/tests/auth/providers/test_homeassistant.py index dc5c255579c..dd2ce65b480 100644 --- a/tests/auth/providers/test_homeassistant.py +++ b/tests/auth/providers/test_homeassistant.py @@ -1,6 +1,7 @@ """Test the Home Assistant local auth provider.""" import asyncio +from typing import Any from unittest.mock import Mock, patch import pytest @@ -13,10 +14,12 @@ from homeassistant.auth.providers import ( homeassistant as hass_auth, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component @pytest.fixture -def data(hass): +def data(hass: HomeAssistant) -> hass_auth.Data: """Create a loaded data class.""" data = hass_auth.Data(hass) hass.loop.run_until_complete(data.async_load()) @@ -24,7 +27,7 @@ def data(hass): @pytest.fixture -def legacy_data(hass): +def legacy_data(hass: HomeAssistant) -> hass_auth.Data: """Create a loaded legacy data class.""" data = hass_auth.Data(hass) hass.loop.run_until_complete(data.async_load()) @@ -32,7 +35,13 @@ def legacy_data(hass): return data -async def test_validating_password_invalid_user(data, hass: HomeAssistant) -> None: +@pytest.fixture +async def load_auth_component(hass: HomeAssistant) -> None: + """Load the auth component for translations.""" + await async_setup_component(hass, "auth", {}) + + +async def test_validating_password_invalid_user(data: hass_auth.Data) -> None: """Test validating an invalid user.""" with pytest.raises(hass_auth.InvalidAuth): data.validate_login("non-existing", "pw") @@ -48,7 +57,9 @@ async def test_not_allow_set_id() -> None: ) -async def test_new_users_populate_values(hass: HomeAssistant, data) -> None: +async def test_new_users_populate_values( + hass: HomeAssistant, data: hass_auth.Data +) -> None: """Test that we populate data for new users.""" data.add_auth("hello", "test-pass") await data.async_save() @@ -61,7 +72,7 @@ async def test_new_users_populate_values(hass: HomeAssistant, data) -> None: assert user.is_active -async def test_changing_password_raises_invalid_user(data, hass: HomeAssistant) -> None: +async def test_changing_password_raises_invalid_user(data: hass_auth.Data) -> None: """Test that changing password raises invalid user.""" with pytest.raises(hass_auth.InvalidUser): data.change_password("non-existing", "pw") @@ -70,20 +81,34 @@ async def test_changing_password_raises_invalid_user(data, hass: HomeAssistant) # Modern mode -async def test_adding_user(data, hass: HomeAssistant) -> None: +async def test_adding_user(data: hass_auth.Data) -> None: """Test adding a user.""" data.add_auth("test-user", "test-pass") data.validate_login(" test-user ", "test-pass") -async def test_adding_user_duplicate_username(data, hass: HomeAssistant) -> None: +@pytest.mark.parametrize("username", ["test-user ", "TEST-USER"]) +@pytest.mark.usefixtures("load_auth_component") +def test_adding_user_not_normalized(data: hass_auth.Data, username: str) -> None: + """Test adding a user.""" + with pytest.raises( + hass_auth.InvalidUsername, match=f'Username "{username}" is not normalized' + ): + data.add_auth(username, "test-pass") + + +@pytest.mark.usefixtures("load_auth_component") +def test_adding_user_duplicate_username(data: hass_auth.Data) -> None: """Test adding a user with duplicate username.""" data.add_auth("test-user", "test-pass") - with pytest.raises(hass_auth.InvalidUser): - data.add_auth("TEST-user ", "other-pass") + + with pytest.raises( + hass_auth.InvalidUsername, match='Username "test-user" already exists' + ): + data.add_auth("test-user", "other-pass") -async def test_validating_password_invalid_password(data, hass: HomeAssistant) -> None: +async def test_validating_password_invalid_password(data: hass_auth.Data) -> None: """Test validating an invalid password.""" data.add_auth("test-user", "test-pass") @@ -97,7 +122,7 @@ async def test_validating_password_invalid_password(data, hass: HomeAssistant) - data.validate_login("test-user", "Test-pass") -async def test_changing_password(data, hass: HomeAssistant) -> None: +async def test_changing_password(data: hass_auth.Data) -> None: """Test adding a user.""" data.add_auth("test-user", "test-pass") data.change_password("TEST-USER ", "new-pass") @@ -108,7 +133,7 @@ async def test_changing_password(data, hass: HomeAssistant) -> None: data.validate_login("test-UsEr", "new-pass") -async def test_login_flow_validates(data, hass: HomeAssistant) -> None: +async def test_login_flow_validates(data: hass_auth.Data, hass: HomeAssistant) -> None: """Test login flow.""" data.add_auth("test-user", "test-pass") await data.async_save() @@ -139,7 +164,7 @@ async def test_login_flow_validates(data, hass: HomeAssistant) -> None: assert result["data"]["username"] == "test-USER" -async def test_saving_loading(data, hass: HomeAssistant) -> None: +async def test_saving_loading(data: hass_auth.Data, hass: HomeAssistant) -> None: """Test saving and loading JSON.""" data.add_auth("test-user", "test-pass") data.add_auth("second-user", "second-pass") @@ -151,7 +176,9 @@ async def test_saving_loading(data, hass: HomeAssistant) -> None: data.validate_login("second-user ", "second-pass") -async def test_get_or_create_credentials(hass: HomeAssistant, data) -> None: +async def test_get_or_create_credentials( + hass: HomeAssistant, data: hass_auth.Data +) -> None: """Test that we can get or create credentials.""" manager = await auth_manager_from_config(hass, [{"type": "homeassistant"}], []) provider = manager.auth_providers[0] @@ -167,26 +194,14 @@ async def test_get_or_create_credentials(hass: HomeAssistant, data) -> None: # Legacy mode -async def test_legacy_adding_user(legacy_data, hass: HomeAssistant) -> None: +async def test_legacy_adding_user(legacy_data: hass_auth.Data) -> None: """Test in legacy mode adding a user.""" legacy_data.add_auth("test-user", "test-pass") legacy_data.validate_login("test-user", "test-pass") -async def test_legacy_adding_user_duplicate_username( - legacy_data, hass: HomeAssistant -) -> None: - """Test in legacy mode adding a user with duplicate username.""" - legacy_data.add_auth("test-user", "test-pass") - with pytest.raises(hass_auth.InvalidUser): - legacy_data.add_auth("test-user", "other-pass") - # Not considered duplicate - legacy_data.add_auth("test-user ", "test-pass") - legacy_data.add_auth("Test-user", "test-pass") - - async def test_legacy_validating_password_invalid_password( - legacy_data, hass: HomeAssistant + legacy_data: hass_auth.Data, ) -> None: """Test in legacy mode validating an invalid password.""" legacy_data.add_auth("test-user", "test-pass") @@ -195,7 +210,7 @@ async def test_legacy_validating_password_invalid_password( legacy_data.validate_login("test-user", "invalid-pass") -async def test_legacy_changing_password(legacy_data, hass: HomeAssistant) -> None: +async def test_legacy_changing_password(legacy_data: hass_auth.Data) -> None: """Test in legacy mode adding a user.""" user = "test-user" legacy_data.add_auth(user, "test-pass") @@ -208,14 +223,16 @@ async def test_legacy_changing_password(legacy_data, hass: HomeAssistant) -> Non async def test_legacy_changing_password_raises_invalid_user( - legacy_data, hass: HomeAssistant + legacy_data: hass_auth.Data, ) -> None: """Test in legacy mode that we initialize an empty config.""" with pytest.raises(hass_auth.InvalidUser): legacy_data.change_password("non-existing", "pw") -async def test_legacy_login_flow_validates(legacy_data, hass: HomeAssistant) -> None: +async def test_legacy_login_flow_validates( + legacy_data: hass_auth.Data, hass: HomeAssistant +) -> None: """Test in legacy mode login flow.""" legacy_data.add_auth("test-user", "test-pass") await legacy_data.async_save() @@ -246,7 +263,9 @@ async def test_legacy_login_flow_validates(legacy_data, hass: HomeAssistant) -> assert result["data"]["username"] == "test-user" -async def test_legacy_saving_loading(legacy_data, hass: HomeAssistant) -> None: +async def test_legacy_saving_loading( + legacy_data: hass_auth.Data, hass: HomeAssistant +) -> None: """Test in legacy mode saving and loading JSON.""" legacy_data.add_auth("test-user", "test-pass") legacy_data.add_auth("second-user", "second-pass") @@ -263,7 +282,7 @@ async def test_legacy_saving_loading(legacy_data, hass: HomeAssistant) -> None: async def test_legacy_get_or_create_credentials( - hass: HomeAssistant, legacy_data + hass: HomeAssistant, legacy_data: hass_auth.Data ) -> None: """Test in legacy mode that we can get or create credentials.""" manager = await auth_manager_from_config(hass, [{"type": "homeassistant"}], []) @@ -308,3 +327,155 @@ async def test_race_condition_in_data_loading(hass: HomeAssistant) -> None: assert isinstance(results[0], hass_auth.InvalidAuth) # results[1] will be a TypeError if race condition occurred assert isinstance(results[1], hass_auth.InvalidAuth) + + +def test_change_username(data: hass_auth.Data) -> None: + """Test changing username.""" + data.add_auth("test-user", "test-pass") + users = data.users + assert len(users) == 1 + assert users[0]["username"] == "test-user" + + data.change_username("test-user", "new-user") + + users = data.users + assert len(users) == 1 + assert users[0]["username"] == "new-user" + + +@pytest.mark.parametrize("username", ["test-user ", "TEST-USER"]) +def test_change_username_legacy(legacy_data: hass_auth.Data, username: str) -> None: + """Test changing username.""" + # Cannot use add_auth as it normalizes username + legacy_data.users.append( + { + "username": username, + "password": legacy_data.hash_password("test-pass", True).decode(), + } + ) + + users = legacy_data.users + assert len(users) == 1 + assert users[0]["username"] == username + + legacy_data.change_username(username, "test-user") + + users = legacy_data.users + assert len(users) == 1 + assert users[0]["username"] == "test-user" + + +def test_change_username_invalid_user(data: hass_auth.Data) -> None: + """Test changing username raises on invalid user.""" + data.add_auth("test-user", "test-pass") + users = data.users + assert len(users) == 1 + assert users[0]["username"] == "test-user" + + with pytest.raises(hass_auth.InvalidUser): + data.change_username("non-existing", "new-user") + + users = data.users + assert len(users) == 1 + assert users[0]["username"] == "test-user" + + +@pytest.mark.usefixtures("load_auth_component") +async def test_change_username_not_normalized( + data: hass_auth.Data, hass: HomeAssistant +) -> None: + """Test changing username raises on not normalized username.""" + data.add_auth("test-user", "test-pass") + + with pytest.raises( + hass_auth.InvalidUsername, match='Username "TEST-user " is not normalized' + ): + data.change_username("test-user", "TEST-user ") + + +@pytest.mark.parametrize( + ("usernames_in_storage", "usernames_in_repair"), + [ + (["Uppercase"], '- "Uppercase"'), + ([" leading"], '- " leading"'), + (["trailing "], '- "trailing "'), + (["Test", "test", "Fritz "], '- "Fritz "\n- "Test"'), + ], +) +async def test_create_repair_on_legacy_usernames( + hass: HomeAssistant, + hass_storage: dict[str, Any], + issue_registry: ir.IssueRegistry, + usernames_in_storage: list[str], + usernames_in_repair: str, +) -> None: + """Test that we create a repair issue for legacy usernames.""" + assert not issue_registry.issues.get( + ("auth", "homeassistant_provider_not_normalized_usernames") + ), "Repair issue already exists" + + hass_storage[hass_auth.STORAGE_KEY] = { + "version": 1, + "minor_version": 1, + "key": "auth_provider.homeassistant", + "data": { + "users": [ + { + "username": username, + "password": "onlyherebecauseweneedapasswordstring", + } + for username in usernames_in_storage + ] + }, + } + data = hass_auth.Data(hass) + await data.async_load() + issue = issue_registry.issues.get( + ("auth", "homeassistant_provider_not_normalized_usernames") + ) + assert issue, "Repair issue not created" + assert issue.translation_placeholders == {"usernames": usernames_in_repair} + + +async def test_delete_repair_after_fixing_usernames( + hass: HomeAssistant, + hass_storage: dict[str, Any], + issue_registry: ir.IssueRegistry, +) -> None: + """Test that the repair is deleted after fixing the usernames.""" + hass_storage[hass_auth.STORAGE_KEY] = { + "version": 1, + "minor_version": 1, + "key": "auth_provider.homeassistant", + "data": { + "users": [ + { + "username": "Test", + "password": "onlyherebecauseweneedapasswordstring", + }, + { + "username": "bla ", + "password": "onlyherebecauseweneedapasswordstring", + }, + ] + }, + } + data = hass_auth.Data(hass) + await data.async_load() + issue = issue_registry.issues.get( + ("auth", "homeassistant_provider_not_normalized_usernames") + ) + assert issue, "Repair issue not created" + assert issue.translation_placeholders == {"usernames": '- "Test"\n- "bla "'} + + data.change_username("Test", "test") + issue = issue_registry.issues.get( + ("auth", "homeassistant_provider_not_normalized_usernames") + ) + assert issue + assert issue.translation_placeholders == {"usernames": '- "bla "'} + + data.change_username("bla ", "bla") + assert not issue_registry.issues.get( + ("auth", "homeassistant_provider_not_normalized_usernames") + ), "Repair issue should be deleted" diff --git a/tests/auth/providers/test_insecure_example.py b/tests/auth/providers/test_insecure_example.py index f0043231c04..7c28028753c 100644 --- a/tests/auth/providers/test_insecure_example.py +++ b/tests/auth/providers/test_insecure_example.py @@ -7,10 +7,11 @@ import pytest from homeassistant.auth import AuthManager, auth_store, models as auth_models from homeassistant.auth.providers import insecure_example +from homeassistant.core import HomeAssistant @pytest.fixture -async def store(hass): +async def store(hass: HomeAssistant) -> auth_store.AuthStore: """Mock store.""" store = auth_store.AuthStore(hass) await store.async_load() @@ -18,7 +19,9 @@ async def store(hass): @pytest.fixture -def provider(hass, store): +def provider( + hass: HomeAssistant, store: auth_store.AuthStore +) -> insecure_example.ExampleAuthProvider: """Mock provider.""" return insecure_example.ExampleAuthProvider( hass, @@ -38,12 +41,18 @@ def provider(hass, store): @pytest.fixture -def manager(hass, store, provider): +def manager( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider: insecure_example.ExampleAuthProvider, +) -> AuthManager: """Mock manager.""" return AuthManager(hass, store, {(provider.type, provider.id): provider}, {}) -async def test_create_new_credential(manager, provider) -> None: +async def test_create_new_credential( + manager: AuthManager, provider: insecure_example.ExampleAuthProvider +) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "user-test", "password": "password-test"} @@ -55,7 +64,9 @@ async def test_create_new_credential(manager, provider) -> None: assert user.is_active -async def test_match_existing_credentials(store, provider) -> None: +async def test_match_existing_credentials( + provider: insecure_example.ExampleAuthProvider, +) -> None: """See if we match existing users.""" existing = auth_models.Credentials( id=uuid.uuid4(), @@ -71,19 +82,21 @@ async def test_match_existing_credentials(store, provider) -> None: assert credentials is existing -async def test_verify_username(provider) -> None: +async def test_verify_username(provider: insecure_example.ExampleAuthProvider) -> None: """Test we raise if incorrect user specified.""" with pytest.raises(insecure_example.InvalidAuthError): await provider.async_validate_login("non-existing-user", "password-test") -async def test_verify_password(provider) -> None: +async def test_verify_password(provider: insecure_example.ExampleAuthProvider) -> None: """Test we raise if incorrect user specified.""" with pytest.raises(insecure_example.InvalidAuthError): await provider.async_validate_login("user-test", "incorrect-password") -async def test_utf_8_username_password(provider) -> None: +async def test_utf_8_username_password( + provider: insecure_example.ExampleAuthProvider, +) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "🎉", "password": "😎"} diff --git a/tests/auth/providers/test_trusted_networks.py b/tests/auth/providers/test_trusted_networks.py index 2f84a256f2d..e738e8f0911 100644 --- a/tests/auth/providers/test_trusted_networks.py +++ b/tests/auth/providers/test_trusted_networks.py @@ -17,7 +17,7 @@ from homeassistant.setup import async_setup_component @pytest.fixture -async def store(hass): +async def store(hass: HomeAssistant) -> auth_store.AuthStore: """Mock store.""" store = auth_store.AuthStore(hass) await store.async_load() @@ -25,7 +25,9 @@ async def store(hass): @pytest.fixture -def provider(hass, store): +def provider( + hass: HomeAssistant, store: auth_store.AuthStore +) -> tn_auth.TrustedNetworksAuthProvider: """Mock provider.""" return tn_auth.TrustedNetworksAuthProvider( hass, @@ -45,7 +47,9 @@ def provider(hass, store): @pytest.fixture -def provider_with_user(hass, store): +def provider_with_user( + hass: HomeAssistant, store: auth_store.AuthStore +) -> tn_auth.TrustedNetworksAuthProvider: """Mock provider with trusted users config.""" return tn_auth.TrustedNetworksAuthProvider( hass, @@ -71,7 +75,9 @@ def provider_with_user(hass, store): @pytest.fixture -def provider_bypass_login(hass, store): +def provider_bypass_login( + hass: HomeAssistant, store: auth_store.AuthStore +) -> tn_auth.TrustedNetworksAuthProvider: """Mock provider with allow_bypass_login config.""" return tn_auth.TrustedNetworksAuthProvider( hass, @@ -92,13 +98,21 @@ def provider_bypass_login(hass, store): @pytest.fixture -def manager(hass, store, provider): +def manager( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider: tn_auth.TrustedNetworksAuthProvider, +) -> auth.AuthManager: """Mock manager.""" return auth.AuthManager(hass, store, {(provider.type, provider.id): provider}, {}) @pytest.fixture -def manager_with_user(hass, store, provider_with_user): +def manager_with_user( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider_with_user: tn_auth.TrustedNetworksAuthProvider, +) -> auth.AuthManager: """Mock manager with trusted user.""" return auth.AuthManager( hass, @@ -109,7 +123,11 @@ def manager_with_user(hass, store, provider_with_user): @pytest.fixture -def manager_bypass_login(hass, store, provider_bypass_login): +def manager_bypass_login( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider_bypass_login: tn_auth.TrustedNetworksAuthProvider, +) -> auth.AuthManager: """Mock manager with allow bypass login.""" return auth.AuthManager( hass, @@ -119,7 +137,7 @@ def manager_bypass_login(hass, store, provider_bypass_login): ) -async def test_config_schema(): +async def test_config_schema() -> None: """Test CONFIG_SCHEMA.""" # Valid configuration tn_auth.CONFIG_SCHEMA( @@ -145,7 +163,9 @@ async def test_config_schema(): ) -async def test_trusted_networks_credentials(manager, provider) -> None: +async def test_trusted_networks_credentials( + manager: auth.AuthManager, provider: tn_auth.TrustedNetworksAuthProvider +) -> None: """Test trusted_networks credentials related functions.""" owner = await manager.async_create_user("test-owner") tn_owner_cred = await provider.async_get_or_create_credentials({"user": owner.id}) @@ -162,22 +182,24 @@ async def test_trusted_networks_credentials(manager, provider) -> None: await provider.async_get_or_create_credentials({"user": "invalid-user"}) -async def test_validate_access(provider) -> None: +async def test_validate_access(provider: tn_auth.TrustedNetworksAuthProvider) -> None: """Test validate access from trusted networks.""" provider.async_validate_access(ip_address("192.168.0.1")) provider.async_validate_access(ip_address("192.168.128.10")) provider.async_validate_access(ip_address("::1")) provider.async_validate_access(ip_address("fd01:db8::ff00:42:8329")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.0.2")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("127.0.0.1")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("2001:db8::ff00:42:8329")) -async def test_validate_access_proxy(hass: HomeAssistant, provider) -> None: +async def test_validate_access_proxy( + hass: HomeAssistant, provider: tn_auth.TrustedNetworksAuthProvider +) -> None: """Test validate access from trusted networks are blocked from proxy.""" await async_setup_component( @@ -192,15 +214,17 @@ async def test_validate_access_proxy(hass: HomeAssistant, provider) -> None: ) provider.async_validate_access(ip_address("192.168.128.2")) provider.async_validate_access(ip_address("fd00::2")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.128.0")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.128.1")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("fd00::1")) -async def test_validate_access_cloud(hass: HomeAssistant, provider) -> None: +async def test_validate_access_cloud( + hass: HomeAssistant, provider: tn_auth.TrustedNetworksAuthProvider +) -> None: """Test validate access from trusted networks are blocked from cloud.""" await async_setup_component( hass, @@ -217,21 +241,25 @@ async def test_validate_access_cloud(hass: HomeAssistant, provider) -> None: provider.async_validate_access(ip_address("192.168.128.2")) remote.is_cloud_request.set(True) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.128.2")) -async def test_validate_refresh_token(provider) -> None: +async def test_validate_refresh_token( + provider: tn_auth.TrustedNetworksAuthProvider, +) -> None: """Verify re-validation of refresh token.""" with patch.object(provider, "async_validate_access") as mock: - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_refresh_token(Mock(), None) provider.async_validate_refresh_token(Mock(), "127.0.0.1") mock.assert_called_once_with(ip_address("127.0.0.1")) -async def test_login_flow(manager, provider) -> None: +async def test_login_flow( + manager: auth.AuthManager, provider: tn_auth.TrustedNetworksAuthProvider +) -> None: """Test login flow.""" owner = await manager.async_create_user("test-owner") user = await manager.async_create_user("test-user") @@ -258,7 +286,10 @@ async def test_login_flow(manager, provider) -> None: assert step["data"]["user"] == user.id -async def test_trusted_users_login(manager_with_user, provider_with_user) -> None: +async def test_trusted_users_login( + manager_with_user: auth.AuthManager, + provider_with_user: tn_auth.TrustedNetworksAuthProvider, +) -> None: """Test available user list changed per different IP.""" owner = await manager_with_user.async_create_user("test-owner") sys_user = await manager_with_user.async_create_system_user( @@ -338,7 +369,10 @@ async def test_trusted_users_login(manager_with_user, provider_with_user) -> Non assert schema({"user": sys_user.id}) -async def test_trusted_group_login(manager_with_user, provider_with_user) -> None: +async def test_trusted_group_login( + manager_with_user: auth.AuthManager, + provider_with_user: tn_auth.TrustedNetworksAuthProvider, +) -> None: """Test config trusted_user with group_id.""" owner = await manager_with_user.async_create_user("test-owner") # create a user in user group @@ -391,7 +425,10 @@ async def test_trusted_group_login(manager_with_user, provider_with_user) -> Non assert schema({"user": user.id}) -async def test_bypass_login_flow(manager_bypass_login, provider_bypass_login) -> None: +async def test_bypass_login_flow( + manager_bypass_login: auth.AuthManager, + provider_bypass_login: tn_auth.TrustedNetworksAuthProvider, +) -> None: """Test login flow can be bypass if only one user available.""" owner = await manager_bypass_login.async_create_user("test-owner") diff --git a/tests/common.py b/tests/common.py index 30c7cc2d971..2f0c032616a 100644 --- a/tests/common.py +++ b/tests/common.py @@ -3,8 +3,22 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Coroutine, Mapping, Sequence -from contextlib import asynccontextmanager, contextmanager +from collections.abc import ( + AsyncGenerator, + Callable, + Coroutine, + Generator, + Iterable, + Iterator, + Mapping, + Sequence, +) +from contextlib import ( + AbstractAsyncContextManager, + asynccontextmanager, + contextmanager, + suppress, +) from datetime import UTC, datetime, timedelta from enum import Enum import functools as ft @@ -23,7 +37,7 @@ from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import unused_port as get_test_instance_port # noqa: F401 import pytest from syrupy import SnapshotAssertion -from typing_extensions import AsyncGenerator, Generator +from typing_extensions import TypeVar import voluptuous as vol from homeassistant import auth, bootstrap, config_entries, loader @@ -70,7 +84,6 @@ from homeassistant.helpers import ( intent, issue_registry as ir, label_registry as lr, - recorder as recorder_helper, restore_state as rs, storage, translation, @@ -83,9 +96,13 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.json import JSONEncoder, _orjson_default_encoder, json_dumps from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.setup import setup_component -from homeassistant.util.async_ import run_callback_threadsafe +from homeassistant.util.async_ import ( + _SHUTDOWN_RUN_CALLBACK_THREADSAFE, + get_scheduled_timer_handles, + run_callback_threadsafe, +) import homeassistant.util.dt as dt_util +from homeassistant.util.event_type import EventType from homeassistant.util.json import ( JsonArrayType, JsonObjectType, @@ -103,6 +120,8 @@ from .testing_config.custom_components.test_constant_deprecation import ( import_deprecated_constant, ) +_DataT = TypeVar("_DataT", bound=Mapping[str, Any], default=dict[str, Any]) + _LOGGER = logging.getLogger(__name__) INSTANCES = [] CLIENT_ID = "https://example.com/app" @@ -163,24 +182,35 @@ def get_test_config_dir(*add_path): @contextmanager def get_test_home_assistant() -> Generator[HomeAssistant]: """Return a Home Assistant object pointing at test config directory.""" - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - context_manager = async_test_home_assistant(loop) - hass = loop.run_until_complete(context_manager.__aenter__()) - + hass_created_event = threading.Event() loop_stop_event = threading.Event() + context_manager: AbstractAsyncContextManager = None + hass: HomeAssistant = None + loop: asyncio.AbstractEventLoop = None + orig_stop: Callable = None + def run_loop() -> None: - """Run event loop.""" + """Create and run event loop.""" + nonlocal context_manager, hass, loop, orig_stop + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + context_manager = async_test_home_assistant(loop) + hass = loop.run_until_complete(context_manager.__aenter__()) + + orig_stop = hass.stop + hass._stopped = Mock(set=loop.stop) + hass.start = start_hass + hass.stop = stop_hass loop._thread_ident = threading.get_ident() - hass.loop_thread_id = loop._thread_ident + + hass_created_event.set() + loop.run_forever() loop_stop_event.set() - orig_stop = hass.stop - hass._stopped = Mock(set=loop.stop) - def start_hass(*mocks: Any) -> None: """Start hass.""" asyncio.run_coroutine_threadsafe(hass.async_start(), loop).result() @@ -190,11 +220,10 @@ def get_test_home_assistant() -> Generator[HomeAssistant]: orig_stop() loop_stop_event.wait() - hass.start = start_hass - hass.stop = stop_hass - threading.Thread(name="LoopThread", target=run_loop, daemon=False).start() + hass_created_event.wait() + try: yield hass finally: @@ -366,6 +395,9 @@ async def async_test_home_assistant( finally: # Restore timezone, it is set when creating the hass object dt_util.set_default_time_zone(orig_tz) + # Remove loop shutdown indicator to not interfere with additional hass objects + with suppress(AttributeError): + delattr(hass.loop, _SHUTDOWN_RUN_CALLBACK_THREADSAFE) def async_mock_service( @@ -381,7 +413,7 @@ def async_mock_service( calls = [] @callback - def mock_service_log(call): # pylint: disable=unnecessary-lambda + def mock_service_log(call): """Mock service call.""" calls.append(call) if raise_exception is not None: @@ -515,7 +547,7 @@ def _async_fire_time_changed( hass: HomeAssistant, utc_datetime: datetime | None, fire_all: bool ) -> None: timestamp = dt_util.utc_to_timestamp(utc_datetime) - for task in list(hass.loop._scheduled): + for task in list(get_scheduled_timer_handles(hass.loop)): if not isinstance(task, asyncio.TimerHandle): continue if task.cancelled(): @@ -1162,30 +1194,6 @@ def assert_setup_component(count, domain=None): ), f"setup_component failed, expected {count} got {res_len}: {res}" -def init_recorder_component(hass, add_config=None, db_url="sqlite://"): - """Initialize the recorder.""" - # Local import to avoid processing recorder and SQLite modules when running a - # testcase which does not use the recorder. - # pylint: disable-next=import-outside-toplevel - from homeassistant.components import recorder - - config = dict(add_config) if add_config else {} - if recorder.CONF_DB_URL not in config: - config[recorder.CONF_DB_URL] = db_url - if recorder.CONF_COMMIT_INTERVAL not in config: - config[recorder.CONF_COMMIT_INTERVAL] = 0 - - with patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True): - if recorder.DOMAIN not in hass.data: - recorder_helper.async_initialize_recorder(hass) - assert setup_component(hass, recorder.DOMAIN, {recorder.DOMAIN: config}) - assert recorder.DOMAIN in hass.config.components - _LOGGER.info( - "Test recorder successfully started, database location: %s", - config[recorder.CONF_DB_URL], - ) - - def mock_restore_cache(hass: HomeAssistant, states: Sequence[State]) -> None: """Mock the DATA_RESTORE_CACHE.""" key = rs.DATA_RESTORE_STATE @@ -1454,7 +1462,7 @@ async def get_system_health_info(hass: HomeAssistant, domain: str) -> dict[str, @contextmanager -def mock_config_flow(domain: str, config_flow: type[ConfigFlow]) -> None: +def mock_config_flow(domain: str, config_flow: type[ConfigFlow]) -> Iterator[None]: """Mock a config flow handler.""" original_handler = config_entries.HANDLERS.get(domain) config_entries.HANDLERS[domain] = config_flow @@ -1522,12 +1530,14 @@ def mock_platform( module_cache[platform_path] = module or Mock() -def async_capture_events(hass: HomeAssistant, event_name: str) -> list[Event]: +def async_capture_events( + hass: HomeAssistant, event_name: EventType[_DataT] | str +) -> list[Event[_DataT]]: """Create a helper that captures events.""" - events = [] + events: list[Event[_DataT]] = [] @callback - def capture_events(event: Event) -> None: + def capture_events(event: Event[_DataT]) -> None: events.append(event) hass.bus.async_listen(event_name, capture_events) @@ -1536,14 +1546,14 @@ def async_capture_events(hass: HomeAssistant, event_name: str) -> list[Event]: @callback -def async_mock_signal( - hass: HomeAssistant, signal: SignalType[Any] | str -) -> list[tuple[Any]]: +def async_mock_signal[*_Ts]( + hass: HomeAssistant, signal: SignalType[*_Ts] | str +) -> list[tuple[*_Ts]]: """Catch all dispatches to a signal.""" - calls = [] + calls: list[tuple[*_Ts]] = [] @callback - def mock_signal_handler(*args: Any) -> None: + def mock_signal_handler(*args: *_Ts) -> None: """Mock service call.""" calls.append(args) @@ -1743,7 +1753,7 @@ def extract_stack_to_frame(extract_stack: list[Mock]) -> FrameType: def setup_test_component_platform( hass: HomeAssistant, domain: str, - entities: Sequence[Entity], + entities: Iterable[Entity], from_config_entry: bool = False, built_in: bool = True, ) -> MockPlatform: diff --git a/tests/components/abode/conftest.py b/tests/components/abode/conftest.py index 21b236540d0..097eb568d4a 100644 --- a/tests/components/abode/conftest.py +++ b/tests/components/abode/conftest.py @@ -1,11 +1,11 @@ """Configuration for Abode tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from jaraco.abode.helpers import urls as URL import pytest from requests_mock import Mocker -from typing_extensions import Generator from tests.common import load_fixture from tests.components.light.conftest import mock_light_profiles # noqa: F401 diff --git a/tests/components/abode/test_alarm_control_panel.py b/tests/components/abode/test_alarm_control_panel.py index 428e2791ee2..51e0ee46838 100644 --- a/tests/components/abode/test_alarm_control_panel.py +++ b/tests/components/abode/test_alarm_control_panel.py @@ -2,8 +2,6 @@ from unittest.mock import PropertyMock, patch -from jaraco.abode.helpers import constants as CONST - from homeassistant.components.abode import ATTR_DEVICE_ID from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.const import ( @@ -70,7 +68,7 @@ async def test_set_alarm_away(hass: HomeAssistant) -> None: "jaraco.abode.devices.alarm.Alarm.mode", new_callable=PropertyMock, ) as mock_mode: - mock_mode.return_value = CONST.MODE_AWAY + mock_mode.return_value = "away" update_callback = mock_callback.call_args[0][1] await hass.async_add_executor_job(update_callback, "area_1") @@ -100,7 +98,7 @@ async def test_set_alarm_home(hass: HomeAssistant) -> None: with patch( "jaraco.abode.devices.alarm.Alarm.mode", new_callable=PropertyMock ) as mock_mode: - mock_mode.return_value = CONST.MODE_HOME + mock_mode.return_value = "home" update_callback = mock_callback.call_args[0][1] await hass.async_add_executor_job(update_callback, "area_1") @@ -129,7 +127,7 @@ async def test_set_alarm_standby(hass: HomeAssistant) -> None: with patch( "jaraco.abode.devices.alarm.Alarm.mode", new_callable=PropertyMock ) as mock_mode: - mock_mode.return_value = CONST.MODE_STANDBY + mock_mode.return_value = "standby" update_callback = mock_callback.call_args[0][1] await hass.async_add_executor_job(update_callback, "area_1") diff --git a/tests/components/accuweather/conftest.py b/tests/components/accuweather/conftest.py index 3b0006068ea..737fd3f84b6 100644 --- a/tests/components/accuweather/conftest.py +++ b/tests/components/accuweather/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the AccuWeather tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.accuweather.const import DOMAIN diff --git a/tests/components/accuweather/snapshots/test_weather.ambr b/tests/components/accuweather/snapshots/test_weather.ambr index 49bf4008884..cbe1891d216 100644 --- a/tests/components/accuweather/snapshots/test_weather.ambr +++ b/tests/components/accuweather/snapshots/test_weather.ambr @@ -1,85 +1,4 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 58, - 'condition': 'lightning-rainy', - 'datetime': '2020-07-26T05:00:00+00:00', - 'humidity': 60, - 'precipitation': 2.5, - 'precipitation_probability': 60, - 'temperature': 29.5, - 'templow': 15.4, - 'uv_index': 5, - 'wind_bearing': 166, - 'wind_gust_speed': 29.6, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 52, - 'condition': 'partlycloudy', - 'datetime': '2020-07-27T05:00:00+00:00', - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 26.2, - 'templow': 15.9, - 'uv_index': 7, - 'wind_bearing': 297, - 'wind_gust_speed': 14.8, - 'wind_speed': 9.3, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 65, - 'condition': 'partlycloudy', - 'datetime': '2020-07-28T05:00:00+00:00', - 'humidity': 52, - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 31.7, - 'templow': 16.8, - 'uv_index': 7, - 'wind_bearing': 198, - 'wind_gust_speed': 24.1, - 'wind_speed': 16.7, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 45, - 'condition': 'partlycloudy', - 'datetime': '2020-07-29T05:00:00+00:00', - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 9, - 'temperature': 24.0, - 'templow': 11.7, - 'uv_index': 6, - 'wind_bearing': 293, - 'wind_gust_speed': 24.1, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 22.2, - 'cloud_coverage': 50, - 'condition': 'partlycloudy', - 'datetime': '2020-07-30T05:00:00+00:00', - 'humidity': 55, - 'precipitation': 0.0, - 'precipitation_probability': 1, - 'temperature': 21.4, - 'templow': 12.2, - 'uv_index': 7, - 'wind_bearing': 280, - 'wind_gust_speed': 27.8, - 'wind_speed': 18.5, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.home': dict({ diff --git a/tests/components/advantage_air/__init__.py b/tests/components/advantage_air/__init__.py index 05d98e957bb..5587c668c7e 100644 --- a/tests/components/advantage_air/__init__.py +++ b/tests/components/advantage_air/__init__.py @@ -4,6 +4,7 @@ from unittest.mock import AsyncMock, patch from homeassistant.components.advantage_air.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_json_object_fixture @@ -43,7 +44,7 @@ def patch_update(return_value=True, side_effect=None): ) -async def add_mock_config(hass): +async def add_mock_config(hass: HomeAssistant) -> MockConfigEntry: """Create a fake Advantage Air Config Entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/aemet/conftest.py b/tests/components/aemet/conftest.py index aa4f537c7fb..38f4793541c 100644 --- a/tests/components/aemet/conftest.py +++ b/tests/components/aemet/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for aemet.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/aemet/snapshots/test_diagnostics.ambr b/tests/components/aemet/snapshots/test_diagnostics.ambr index 4b678dc1da5..8d4132cad84 100644 --- a/tests/components/aemet/snapshots/test_diagnostics.ambr +++ b/tests/components/aemet/snapshots/test_diagnostics.ambr @@ -20,7 +20,7 @@ 'pref_disable_polling': False, 'source': 'user', 'title': 'Mock Title', - 'unique_id': None, + 'unique_id': '**REDACTED**', 'version': 1, }), 'coord_data': dict({ diff --git a/tests/components/aemet/snapshots/test_weather.ambr b/tests/components/aemet/snapshots/test_weather.ambr index f19f95a6e80..58c854dcda9 100644 --- a/tests/components/aemet/snapshots/test_weather.ambr +++ b/tests/components/aemet/snapshots/test_weather.ambr @@ -1,494 +1,4 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-08T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 2.0, - 'templow': -1.0, - 'wind_bearing': 90.0, - 'wind_speed': 0.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-09T23:00:00+00:00', - 'precipitation_probability': 30, - 'temperature': 4.0, - 'templow': -4.0, - 'wind_bearing': 45.0, - 'wind_speed': 20.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 3.0, - 'templow': -7.0, - 'wind_bearing': 0.0, - 'wind_speed': 5.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-11T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': -1.0, - 'templow': -13.0, - 'wind_bearing': None, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-01-12T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 6.0, - 'templow': -11.0, - 'wind_bearing': None, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-13T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 6.0, - 'templow': -7.0, - 'wind_bearing': None, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-14T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 5.0, - 'templow': -4.0, - 'wind_bearing': None, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T12:00:00+00:00', - 'precipitation': 2.7, - 'precipitation_probability': 100, - 'temperature': 0.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 22.0, - 'wind_speed': 15.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T13:00:00+00:00', - 'precipitation': 0.6, - 'precipitation_probability': 100, - 'temperature': 0.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 14.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T14:00:00+00:00', - 'precipitation': 0.8, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 20.0, - 'wind_speed': 10.0, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T15:00:00+00:00', - 'precipitation': 1.4, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 14.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T16:00:00+00:00', - 'precipitation': 1.2, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 13.0, - 'wind_speed': 9.0, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T17:00:00+00:00', - 'precipitation': 0.4, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 90.0, - 'wind_gust_speed': 13.0, - 'wind_speed': 7.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T18:00:00+00:00', - 'precipitation': 0.3, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T19:00:00+00:00', - 'precipitation': 0.1, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-09T20:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 90.0, - 'wind_gust_speed': 8.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-09T21:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 9.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-09T22:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 90.0, - 'wind_gust_speed': 11.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-09T23:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'fog', - 'datetime': '2021-01-10T00:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 10.0, - 'wind_speed': 5.0, - }), - dict({ - 'condition': 'fog', - 'datetime': '2021-01-10T01:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 0.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 11.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'fog', - 'datetime': '2021-01-10T02:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 0.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 9.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T03:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T04:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': -1.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 11.0, - 'wind_speed': 5.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -1.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 13.0, - 'wind_speed': 9.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T06:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 18.0, - 'wind_speed': 13.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T07:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T08:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 31.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T09:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 32.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': 2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 32.0, - 'wind_speed': 22.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T12:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 32.0, - 'wind_speed': 20.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T13:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 19.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T14:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 4.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 28.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T15:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T16:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T17:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T18:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T19:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T20:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T21:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 27.0, - 'wind_speed': 19.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T22:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T23:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 19.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-11T00:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 27.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T01:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 22.0, - 'wind_speed': 12.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T02:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 17.0, - 'wind_speed': 10.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T03:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 15.0, - 'wind_speed': 11.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T04:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -4.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 15.0, - 'wind_speed': 10.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T05:00:00+00:00', - 'precipitation_probability': None, - 'temperature': -4.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 15.0, - 'wind_speed': 10.0, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.aemet': dict({ diff --git a/tests/components/aemet/test_diagnostics.py b/tests/components/aemet/test_diagnostics.py index 0d94995a85b..6d007dd0465 100644 --- a/tests/components/aemet/test_diagnostics.py +++ b/tests/components/aemet/test_diagnostics.py @@ -4,6 +4,7 @@ from unittest.mock import patch import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.aemet.const import DOMAIN from homeassistant.core import HomeAssistant @@ -30,4 +31,4 @@ async def test_config_entry_diagnostics( return_value={}, ): result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/aemet/util.py b/tests/components/aemet/util.py index bb8885f7b4c..162ee657513 100644 --- a/tests/components/aemet/util.py +++ b/tests/components/aemet/util.py @@ -68,6 +68,7 @@ async def async_init_integration(hass: HomeAssistant): CONF_NAME: "AEMET", }, entry_id="7442b231f139e813fc1939281123f220", + unique_id="40.30403754--3.72935236", ) config_entry.add_to_hass(hass) diff --git a/tests/components/aftership/conftest.py b/tests/components/aftership/conftest.py index 1704b099cc2..d66ae267bfe 100644 --- a/tests/components/aftership/conftest.py +++ b/tests/components/aftership/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the AfterShip tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/agent_dvr/conftest.py b/tests/components/agent_dvr/conftest.py index a62e1738850..0ce1c008a23 100644 --- a/tests/components/agent_dvr/conftest.py +++ b/tests/components/agent_dvr/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for Agent DVR.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/airgradient/conftest.py b/tests/components/airgradient/conftest.py index c5cc46cc8eb..1899e12c8ae 100644 --- a/tests/components/airgradient/conftest.py +++ b/tests/components/airgradient/conftest.py @@ -1,10 +1,10 @@ """AirGradient tests configuration.""" +from collections.abc import Generator from unittest.mock import patch from airgradient import Config, Measures import pytest -from typing_extensions import Generator from homeassistant.components.airgradient.const import DOMAIN from homeassistant.const import CONF_HOST @@ -39,18 +39,30 @@ def mock_airgradient_client() -> Generator[AsyncMock]: client = mock_client.return_value client.host = "10.0.0.131" client.get_current_measures.return_value = Measures.from_json( - load_fixture("current_measures.json", DOMAIN) + load_fixture("current_measures_indoor.json", DOMAIN) ) client.get_config.return_value = Config.from_json( load_fixture("get_config_local.json", DOMAIN) ) + client.get_latest_firmware_version.return_value = "3.1.4" yield client +@pytest.fixture(params=["indoor", "outdoor"]) +def airgradient_devices( + mock_airgradient_client: AsyncMock, request: pytest.FixtureRequest +) -> Generator[AsyncMock]: + """Return a list of AirGradient devices.""" + mock_airgradient_client.get_current_measures.return_value = Measures.from_json( + load_fixture(f"current_measures_{request.param}.json", DOMAIN) + ) + return mock_airgradient_client + + @pytest.fixture def mock_new_airgradient_client( mock_airgradient_client: AsyncMock, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock a new AirGradient client.""" mock_airgradient_client.get_config.return_value = Config.from_json( load_fixture("get_config.json", DOMAIN) @@ -61,7 +73,7 @@ def mock_new_airgradient_client( @pytest.fixture def mock_cloud_airgradient_client( mock_airgradient_client: AsyncMock, -) -> Generator[AsyncMock]: +) -> AsyncMock: """Mock a cloud AirGradient client.""" mock_airgradient_client.get_config.return_value = Config.from_json( load_fixture("get_config_cloud.json", DOMAIN) diff --git a/tests/components/airgradient/fixtures/current_measures.json b/tests/components/airgradient/fixtures/current_measures_indoor.json similarity index 100% rename from tests/components/airgradient/fixtures/current_measures.json rename to tests/components/airgradient/fixtures/current_measures_indoor.json diff --git a/tests/components/airgradient/snapshots/test_button.ambr b/tests/components/airgradient/snapshots/test_button.ambr new file mode 100644 index 00000000000..fa3f8994c3c --- /dev/null +++ b/tests/components/airgradient/snapshots/test_button.ambr @@ -0,0 +1,139 @@ +# serializer version: 1 +# name: test_all_entities[indoor][button.airgradient_calibrate_co2_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.airgradient_calibrate_co2_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Calibrate CO2 sensor', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co2_calibration', + 'unique_id': '84fce612f5b8-co2_calibration', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[indoor][button.airgradient_calibrate_co2_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient Calibrate CO2 sensor', + }), + 'context': , + 'entity_id': 'button.airgradient_calibrate_co2_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[indoor][button.airgradient_test_led_bar-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.airgradient_test_led_bar', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test LED bar', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_bar_test', + 'unique_id': '84fce612f5b8-led_bar_test', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[indoor][button.airgradient_test_led_bar-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient Test LED bar', + }), + 'context': , + 'entity_id': 'button.airgradient_test_led_bar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[outdoor][button.airgradient_calibrate_co2_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.airgradient_calibrate_co2_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Calibrate CO2 sensor', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co2_calibration', + 'unique_id': '84fce612f5b8-co2_calibration', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[outdoor][button.airgradient_calibrate_co2_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient Calibrate CO2 sensor', + }), + 'context': , + 'entity_id': 'button.airgradient_calibrate_co2_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/airgradient/snapshots/test_init.ambr b/tests/components/airgradient/snapshots/test_init.ambr index 7109f603c9d..e47c5b38bbc 100644 --- a/tests/components/airgradient/snapshots/test_init.ambr +++ b/tests/components/airgradient/snapshots/test_init.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_device_info +# name: test_device_info[indoor] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -20,12 +20,46 @@ 'labels': set({ }), 'manufacturer': 'AirGradient', - 'model': 'I-9PSL', + 'model': 'AirGradient ONE', + 'model_id': 'I-9PSL', 'name': 'Airgradient', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': '84fce612f5b8', 'suggested_area': None, 'sw_version': '3.1.1', 'via_device_id': None, }) # --- +# name: test_device_info[outdoor] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'airgradient', + '84fce612f5b8', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'AirGradient', + 'model': 'AirGradient Open Air', + 'model_id': 'O-1PPT', + 'name': 'Airgradient', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '84fce60bec38', + 'suggested_area': None, + 'sw_version': '3.1.1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/airgradient/snapshots/test_number.ambr b/tests/components/airgradient/snapshots/test_number.ambr new file mode 100644 index 00000000000..87df8757eeb --- /dev/null +++ b/tests/components/airgradient/snapshots/test_number.ambr @@ -0,0 +1,113 @@ +# serializer version: 1 +# name: test_all_entities[number.airgradient_display_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.airgradient_display_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display brightness', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'display_brightness', + 'unique_id': '84fce612f5b8-display_brightness', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[number.airgradient_display_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient Display brightness', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.airgradient_display_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[number.airgradient_led_bar_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.airgradient_led_bar_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED bar brightness', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_bar_brightness', + 'unique_id': '84fce612f5b8-led_bar_brightness', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[number.airgradient_led_bar_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient LED bar brightness', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.airgradient_led_bar_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- diff --git a/tests/components/airgradient/snapshots/test_select.ambr b/tests/components/airgradient/snapshots/test_select.ambr index d29c7d23923..b8fca4a110b 100644 --- a/tests/components/airgradient/snapshots/test_select.ambr +++ b/tests/components/airgradient/snapshots/test_select.ambr @@ -1,5 +1,68 @@ # serializer version: 1 -# name: test_all_entities[select.airgradient_configuration_source-entry] +# name: test_all_entities[indoor][select.airgradient_co2_automatic_baseline_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '1', + '8', + '30', + '90', + '180', + '0', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.airgradient_co2_automatic_baseline_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CO2 automatic baseline duration', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co2_automatic_baseline_calibration', + 'unique_id': '84fce612f5b8-co2_automatic_baseline_calibration', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[indoor][select.airgradient_co2_automatic_baseline_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient CO2 automatic baseline duration', + 'options': list([ + '1', + '8', + '30', + '90', + '180', + '0', + ]), + }), + 'context': , + 'entity_id': 'select.airgradient_co2_automatic_baseline_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_all_entities[indoor][select.airgradient_configuration_source-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -37,7 +100,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_entities[select.airgradient_configuration_source-state] +# name: test_all_entities[indoor][select.airgradient_configuration_source-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient Configuration source', @@ -54,7 +117,7 @@ 'state': 'local', }) # --- -# name: test_all_entities[select.airgradient_display_pm_standard-entry] +# name: test_all_entities[indoor][select.airgradient_display_pm_standard-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -92,7 +155,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_entities[select.airgradient_display_pm_standard-state] +# name: test_all_entities[indoor][select.airgradient_display_pm_standard-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient Display PM standard', @@ -109,7 +172,7 @@ 'state': 'ugm3', }) # --- -# name: test_all_entities[select.airgradient_display_temperature_unit-entry] +# name: test_all_entities[indoor][select.airgradient_display_temperature_unit-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -147,7 +210,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_entities[select.airgradient_display_temperature_unit-state] +# name: test_all_entities[indoor][select.airgradient_display_temperature_unit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient Display temperature unit', @@ -164,7 +227,7 @@ 'state': 'c', }) # --- -# name: test_all_entities[select.airgradient_led_bar_mode-entry] +# name: test_all_entities[indoor][select.airgradient_led_bar_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -203,7 +266,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_entities[select.airgradient_led_bar_mode-state] +# name: test_all_entities[indoor][select.airgradient_led_bar_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient LED bar mode', @@ -221,7 +284,192 @@ 'state': 'co2', }) # --- -# name: test_all_entities_outdoor[select.airgradient_configuration_source-entry] +# name: test_all_entities[indoor][select.airgradient_nox_index_learning_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '12', + '60', + '120', + '360', + '720', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.airgradient_nox_index_learning_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'NOx index learning offset', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nox_index_learning_time_offset', + 'unique_id': '84fce612f5b8-nox_index_learning_time_offset', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[indoor][select.airgradient_nox_index_learning_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient NOx index learning offset', + 'options': list([ + '12', + '60', + '120', + '360', + '720', + ]), + }), + 'context': , + 'entity_id': 'select.airgradient_nox_index_learning_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_all_entities[indoor][select.airgradient_voc_index_learning_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '12', + '60', + '120', + '360', + '720', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.airgradient_voc_index_learning_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VOC index learning offset', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voc_index_learning_time_offset', + 'unique_id': '84fce612f5b8-voc_index_learning_time_offset', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[indoor][select.airgradient_voc_index_learning_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient VOC index learning offset', + 'options': list([ + '12', + '60', + '120', + '360', + '720', + ]), + }), + 'context': , + 'entity_id': 'select.airgradient_voc_index_learning_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_all_entities[outdoor][select.airgradient_co2_automatic_baseline_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '1', + '8', + '30', + '90', + '180', + '0', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.airgradient_co2_automatic_baseline_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CO2 automatic baseline duration', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co2_automatic_baseline_calibration', + 'unique_id': '84fce612f5b8-co2_automatic_baseline_calibration', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[outdoor][select.airgradient_co2_automatic_baseline_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient CO2 automatic baseline duration', + 'options': list([ + '1', + '8', + '30', + '90', + '180', + '0', + ]), + }), + 'context': , + 'entity_id': 'select.airgradient_co2_automatic_baseline_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_all_entities[outdoor][select.airgradient_configuration_source-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -259,7 +507,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_entities_outdoor[select.airgradient_configuration_source-state] +# name: test_all_entities[outdoor][select.airgradient_configuration_source-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient Configuration source', @@ -276,3 +524,125 @@ 'state': 'local', }) # --- +# name: test_all_entities[outdoor][select.airgradient_nox_index_learning_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '12', + '60', + '120', + '360', + '720', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.airgradient_nox_index_learning_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'NOx index learning offset', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nox_index_learning_time_offset', + 'unique_id': '84fce612f5b8-nox_index_learning_time_offset', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[outdoor][select.airgradient_nox_index_learning_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient NOx index learning offset', + 'options': list([ + '12', + '60', + '120', + '360', + '720', + ]), + }), + 'context': , + 'entity_id': 'select.airgradient_nox_index_learning_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_all_entities[outdoor][select.airgradient_voc_index_learning_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '12', + '60', + '120', + '360', + '720', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.airgradient_voc_index_learning_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VOC index learning offset', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voc_index_learning_time_offset', + 'unique_id': '84fce612f5b8-voc_index_learning_time_offset', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[outdoor][select.airgradient_voc_index_learning_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient VOC index learning offset', + 'options': list([ + '12', + '60', + '120', + '360', + '720', + ]), + }), + 'context': , + 'entity_id': 'select.airgradient_voc_index_learning_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- diff --git a/tests/components/airgradient/snapshots/test_sensor.ambr b/tests/components/airgradient/snapshots/test_sensor.ambr index b0e22e7a9af..ff83fdcc111 100644 --- a/tests/components/airgradient/snapshots/test_sensor.ambr +++ b/tests/components/airgradient/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_all_entities[sensor.airgradient_carbon_dioxide-entry] +# name: test_all_entities[indoor][sensor.airgradient_carbon_dioxide-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -34,7 +34,7 @@ 'unit_of_measurement': 'ppm', }) # --- -# name: test_all_entities[sensor.airgradient_carbon_dioxide-state] +# name: test_all_entities[indoor][sensor.airgradient_carbon_dioxide-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'carbon_dioxide', @@ -50,7 +50,214 @@ 'state': '778', }) # --- -# name: test_all_entities[sensor.airgradient_humidity-entry] +# name: test_all_entities[indoor][sensor.airgradient_carbon_dioxide_automatic_baseline_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_carbon_dioxide_automatic_baseline_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon dioxide automatic baseline calibration', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co2_automatic_baseline_calibration_days', + 'unique_id': '84fce612f5b8-co2_automatic_baseline_calibration_days', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_carbon_dioxide_automatic_baseline_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Airgradient Carbon dioxide automatic baseline calibration', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airgradient_carbon_dioxide_automatic_baseline_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_display_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_display_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display brightness', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'display_brightness', + 'unique_id': '84fce612f5b8-display_brightness', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_display_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient Display brightness', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.airgradient_display_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_display_pm_standard-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'ugm3', + 'us_aqi', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_display_pm_standard', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Display PM standard', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'display_pm_standard', + 'unique_id': '84fce612f5b8-display_pm_standard', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_display_pm_standard-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Airgradient Display PM standard', + 'options': list([ + 'ugm3', + 'us_aqi', + ]), + }), + 'context': , + 'entity_id': 'sensor.airgradient_display_pm_standard', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'ugm3', + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_display_temperature_unit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'c', + 'f', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_display_temperature_unit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Display temperature unit', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'display_temperature_unit', + 'unique_id': '84fce612f5b8-display_temperature_unit', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_display_temperature_unit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Airgradient Display temperature unit', + 'options': list([ + 'c', + 'f', + ]), + }), + 'context': , + 'entity_id': 'sensor.airgradient_display_temperature_unit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'c', + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -85,7 +292,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_all_entities[sensor.airgradient_humidity-state] +# name: test_all_entities[indoor][sensor.airgradient_humidity-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'humidity', @@ -101,7 +308,112 @@ 'state': '48.0', }) # --- -# name: test_all_entities[sensor.airgradient_nox_index-entry] +# name: test_all_entities[indoor][sensor.airgradient_led_bar_brightness-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_led_bar_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED bar brightness', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_bar_brightness', + 'unique_id': '84fce612f5b8-led_bar_brightness', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_led_bar_brightness-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient LED bar brightness', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.airgradient_led_bar_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_led_bar_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'co2', + 'pm', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_led_bar_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'LED bar mode', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_bar_mode', + 'unique_id': '84fce612f5b8-led_bar_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_led_bar_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Airgradient LED bar mode', + 'options': list([ + 'off', + 'co2', + 'pm', + ]), + }), + 'context': , + 'entity_id': 'sensor.airgradient_led_bar_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'co2', + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_nox_index-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -136,7 +448,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_entities[sensor.airgradient_nox_index-state] +# name: test_all_entities[indoor][sensor.airgradient_nox_index-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient NOx index', @@ -150,7 +462,55 @@ 'state': '1', }) # --- -# name: test_all_entities[sensor.airgradient_pm0_3-entry] +# name: test_all_entities[indoor][sensor.airgradient_nox_index_learning_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_nox_index_learning_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'NOx index learning offset', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nox_learning_offset', + 'unique_id': '84fce612f5b8-nox_learning_offset', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_nox_index_learning_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Airgradient NOx index learning offset', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airgradient_nox_index_learning_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_pm0_3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -185,7 +545,7 @@ 'unit_of_measurement': 'particles/dL', }) # --- -# name: test_all_entities[sensor.airgradient_pm0_3-state] +# name: test_all_entities[indoor][sensor.airgradient_pm0_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient PM0.3', @@ -200,57 +560,7 @@ 'state': '270', }) # --- -# name: test_all_entities[sensor.airgradient_pm0_3_count-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.airgradient_pm0_3_count', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'PM0.3 count', - 'platform': 'airgradient', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'pm003_count', - 'unique_id': '84fce612f5b8-pm003', - 'unit_of_measurement': 'particles/dL', - }) -# --- -# name: test_all_entities[sensor.airgradient_pm0_3_count-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Airgradient PM0.3 count', - 'state_class': , - 'unit_of_measurement': 'particles/dL', - }), - 'context': , - 'entity_id': 'sensor.airgradient_pm0_3_count', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '270', - }) -# --- -# name: test_all_entities[sensor.airgradient_pm1-entry] +# name: test_all_entities[indoor][sensor.airgradient_pm1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -285,7 +595,7 @@ 'unit_of_measurement': 'µg/m³', }) # --- -# name: test_all_entities[sensor.airgradient_pm1-state] +# name: test_all_entities[indoor][sensor.airgradient_pm1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'pm1', @@ -301,7 +611,7 @@ 'state': '22', }) # --- -# name: test_all_entities[sensor.airgradient_pm10-entry] +# name: test_all_entities[indoor][sensor.airgradient_pm10-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -336,7 +646,7 @@ 'unit_of_measurement': 'µg/m³', }) # --- -# name: test_all_entities[sensor.airgradient_pm10-state] +# name: test_all_entities[indoor][sensor.airgradient_pm10-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'pm10', @@ -352,7 +662,7 @@ 'state': '41', }) # --- -# name: test_all_entities[sensor.airgradient_pm2_5-entry] +# name: test_all_entities[indoor][sensor.airgradient_pm2_5-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -387,7 +697,7 @@ 'unit_of_measurement': 'µg/m³', }) # --- -# name: test_all_entities[sensor.airgradient_pm2_5-state] +# name: test_all_entities[indoor][sensor.airgradient_pm2_5-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'pm25', @@ -403,7 +713,7 @@ 'state': '34', }) # --- -# name: test_all_entities[sensor.airgradient_raw_nox-entry] +# name: test_all_entities[indoor][sensor.airgradient_raw_nox-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -438,7 +748,7 @@ 'unit_of_measurement': 'ticks', }) # --- -# name: test_all_entities[sensor.airgradient_raw_nox-state] +# name: test_all_entities[indoor][sensor.airgradient_raw_nox-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient Raw NOx', @@ -453,7 +763,7 @@ 'state': '16931', }) # --- -# name: test_all_entities[sensor.airgradient_raw_voc-entry] +# name: test_all_entities[indoor][sensor.airgradient_raw_voc-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -488,7 +798,7 @@ 'unit_of_measurement': 'ticks', }) # --- -# name: test_all_entities[sensor.airgradient_raw_voc-state] +# name: test_all_entities[indoor][sensor.airgradient_raw_voc-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient Raw VOC', @@ -503,7 +813,7 @@ 'state': '31792', }) # --- -# name: test_all_entities[sensor.airgradient_signal_strength-entry] +# name: test_all_entities[indoor][sensor.airgradient_signal_strength-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -538,7 +848,7 @@ 'unit_of_measurement': 'dBm', }) # --- -# name: test_all_entities[sensor.airgradient_signal_strength-state] +# name: test_all_entities[indoor][sensor.airgradient_signal_strength-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'signal_strength', @@ -554,7 +864,7 @@ 'state': '-52', }) # --- -# name: test_all_entities[sensor.airgradient_temperature-entry] +# name: test_all_entities[indoor][sensor.airgradient_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -589,7 +899,7 @@ 'unit_of_measurement': , }) # --- -# name: test_all_entities[sensor.airgradient_temperature-state] +# name: test_all_entities[indoor][sensor.airgradient_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', @@ -605,7 +915,7 @@ 'state': '27.96', }) # --- -# name: test_all_entities[sensor.airgradient_voc_index-entry] +# name: test_all_entities[indoor][sensor.airgradient_voc_index-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -640,7 +950,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_all_entities[sensor.airgradient_voc_index-state] +# name: test_all_entities[indoor][sensor.airgradient_voc_index-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Airgradient VOC index', @@ -654,3 +964,444 @@ 'state': '99', }) # --- +# name: test_all_entities[indoor][sensor.airgradient_voc_index_learning_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_voc_index_learning_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VOC index learning offset', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tvoc_learning_offset', + 'unique_id': '84fce612f5b8-tvoc_learning_offset', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[indoor][sensor.airgradient_voc_index_learning_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Airgradient VOC index learning offset', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airgradient_voc_index_learning_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_carbon_dioxide_automatic_baseline_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_carbon_dioxide_automatic_baseline_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon dioxide automatic baseline calibration', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co2_automatic_baseline_calibration_days', + 'unique_id': '84fce612f5b8-co2_automatic_baseline_calibration_days', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_carbon_dioxide_automatic_baseline_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Airgradient Carbon dioxide automatic baseline calibration', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airgradient_carbon_dioxide_automatic_baseline_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_nox_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airgradient_nox_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'NOx index', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nitrogen_index', + 'unique_id': '84fce612f5b8-nitrogen_index', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_nox_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient NOx index', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.airgradient_nox_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_nox_index_learning_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_nox_index_learning_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'NOx index learning offset', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nox_learning_offset', + 'unique_id': '84fce612f5b8-nox_learning_offset', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_nox_index_learning_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Airgradient NOx index learning offset', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airgradient_nox_index_learning_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_raw_nox-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airgradient_raw_nox', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Raw NOx', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'raw_nitrogen', + 'unique_id': '84fce612f5b8-nox_raw', + 'unit_of_measurement': 'ticks', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_raw_nox-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient Raw NOx', + 'state_class': , + 'unit_of_measurement': 'ticks', + }), + 'context': , + 'entity_id': 'sensor.airgradient_raw_nox', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16359', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_raw_voc-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airgradient_raw_voc', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Raw VOC', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'raw_total_volatile_organic_component', + 'unique_id': '84fce612f5b8-tvoc_raw', + 'unit_of_measurement': 'ticks', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_raw_voc-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient Raw VOC', + 'state_class': , + 'unit_of_measurement': 'ticks', + }), + 'context': , + 'entity_id': 'sensor.airgradient_raw_voc', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30802', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Signal strength', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '84fce612f5b8-signal_strength', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'Airgradient Signal strength', + 'state_class': , + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.airgradient_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-64', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_voc_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airgradient_voc_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VOC index', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_volatile_organic_component_index', + 'unique_id': '84fce612f5b8-tvoc', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_voc_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient VOC index', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.airgradient_voc_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49', + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_voc_index_learning_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.airgradient_voc_index_learning_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VOC index learning offset', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tvoc_learning_offset', + 'unique_id': '84fce612f5b8-tvoc_learning_offset', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[outdoor][sensor.airgradient_voc_index_learning_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Airgradient VOC index learning offset', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airgradient_voc_index_learning_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- diff --git a/tests/components/airgradient/snapshots/test_switch.ambr b/tests/components/airgradient/snapshots/test_switch.ambr new file mode 100644 index 00000000000..752355dbe97 --- /dev/null +++ b/tests/components/airgradient/snapshots/test_switch.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[switch.airgradient_post_data_to_airgradient-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.airgradient_post_data_to_airgradient', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Post data to Airgradient', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'post_data_to_airgradient', + 'unique_id': '84fce612f5b8-post_data_to_airgradient', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.airgradient_post_data_to_airgradient-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Airgradient Post data to Airgradient', + }), + 'context': , + 'entity_id': 'switch.airgradient_post_data_to_airgradient', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/airgradient/snapshots/test_update.ambr b/tests/components/airgradient/snapshots/test_update.ambr new file mode 100644 index 00000000000..c639a97d5dd --- /dev/null +++ b/tests/components/airgradient/snapshots/test_update.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_all_entities[update.airgradient_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.airgradient_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '84fce612f5b8-update', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[update.airgradient_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/airgradient/icon.png', + 'friendly_name': 'Airgradient Firmware', + 'in_progress': False, + 'installed_version': '3.1.1', + 'latest_version': '3.1.4', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.airgradient_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/airgradient/test_button.py b/tests/components/airgradient/test_button.py new file mode 100644 index 00000000000..7901c3a067b --- /dev/null +++ b/tests/components/airgradient/test_button.py @@ -0,0 +1,99 @@ +"""Tests for the AirGradient button platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from airgradient import Config +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + airgradient_devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_pressing_button( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test pressing button.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.airgradient_calibrate_co2_sensor", + }, + blocking=True, + ) + mock_airgradient_client.request_co2_calibration.assert_called_once() + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.airgradient_test_led_bar", + }, + blocking=True, + ) + mock_airgradient_client.request_led_bar_test.assert_called_once() + + +async def test_cloud_creates_no_button( + hass: HomeAssistant, + mock_cloud_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test cloud configuration control.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + + assert len(hass.states.async_all()) == 0 + + mock_cloud_airgradient_client.get_config.return_value = Config.from_json( + load_fixture("get_config_local.json", DOMAIN) + ) + + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 2 + + mock_cloud_airgradient_client.get_config.return_value = Config.from_json( + load_fixture("get_config_cloud.json", DOMAIN) + ) + + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/airgradient/test_config_flow.py b/tests/components/airgradient/test_config_flow.py index 217d2ac0e8c..222ac5d04af 100644 --- a/tests/components/airgradient/test_config_flow.py +++ b/tests/components/airgradient/test_config_flow.py @@ -3,8 +3,11 @@ from ipaddress import ip_address from unittest.mock import AsyncMock -from airgradient import AirGradientConnectionError, ConfigurationControl -from mashumaro import MissingField +from airgradient import ( + AirGradientConnectionError, + AirGradientParseError, + ConfigurationControl, +) from homeassistant.components.airgradient import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo @@ -141,9 +144,7 @@ async def test_flow_old_firmware_version( mock_setup_entry: AsyncMock, ) -> None: """Test flow with old firmware version.""" - mock_airgradient_client.get_current_measures.side_effect = MissingField( - "", object, object - ) + mock_airgradient_client.get_current_measures.side_effect = AirGradientParseError result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/airgradient/test_init.py b/tests/components/airgradient/test_init.py index 273f425f4fc..408e6f5f3ba 100644 --- a/tests/components/airgradient/test_init.py +++ b/tests/components/airgradient/test_init.py @@ -16,7 +16,7 @@ from tests.common import MockConfigEntry async def test_device_info( hass: HomeAssistant, snapshot: SnapshotAssertion, - mock_airgradient_client: AsyncMock, + airgradient_devices: AsyncMock, mock_config_entry: MockConfigEntry, device_registry: dr.DeviceRegistry, ) -> None: diff --git a/tests/components/airgradient/test_number.py b/tests/components/airgradient/test_number.py new file mode 100644 index 00000000000..0803c0d437f --- /dev/null +++ b/tests/components/airgradient/test_number.py @@ -0,0 +1,101 @@ +"""Tests for the AirGradient number platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from airgradient import Config +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 50}, + target={ATTR_ENTITY_ID: "number.airgradient_display_brightness"}, + blocking=True, + ) + mock_airgradient_client.set_display_brightness.assert_called_once() + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 50}, + target={ATTR_ENTITY_ID: "number.airgradient_led_bar_brightness"}, + blocking=True, + ) + mock_airgradient_client.set_led_bar_brightness.assert_called_once() + + +async def test_cloud_creates_no_number( + hass: HomeAssistant, + mock_cloud_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test cloud configuration control.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, mock_config_entry) + + assert len(hass.states.async_all()) == 0 + + mock_cloud_airgradient_client.get_config.return_value = Config.from_json( + load_fixture("get_config_local.json", DOMAIN) + ) + + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 2 + + mock_cloud_airgradient_client.get_config.return_value = Config.from_json( + load_fixture("get_config_cloud.json", DOMAIN) + ) + + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/airgradient/test_select.py b/tests/components/airgradient/test_select.py index 986295bd245..61679a15c07 100644 --- a/tests/components/airgradient/test_select.py +++ b/tests/components/airgradient/test_select.py @@ -1,8 +1,10 @@ """Tests for the AirGradient select platform.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch -from airgradient import ConfigurationControl, Measures +from airgradient import Config +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion @@ -13,19 +15,23 @@ from homeassistant.components.select import ( ) from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry, load_fixture, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_all_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, - mock_airgradient_client: AsyncMock, + airgradient_devices: AsyncMock, mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, ) -> None: @@ -36,24 +42,6 @@ async def test_all_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_all_entities_outdoor( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_airgradient_client: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - mock_airgradient_client.get_current_measures.return_value = Measures.from_json( - load_fixture("current_measures_outdoor.json", DOMAIN) - ) - with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.SELECT]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - async def test_setting_value( hass: HomeAssistant, mock_airgradient_client: AsyncMock, @@ -75,37 +63,34 @@ async def test_setting_value( assert mock_airgradient_client.get_config.call_count == 2 -async def test_setting_protected_value( +async def test_cloud_creates_no_number( hass: HomeAssistant, mock_cloud_airgradient_client: AsyncMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: - """Test setting protected value.""" - await setup_integration(hass, mock_config_entry) + """Test cloud configuration control.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, mock_config_entry) - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.airgradient_display_temperature_unit", - ATTR_OPTION: "c", - }, - blocking=True, - ) - mock_cloud_airgradient_client.set_temperature_unit.assert_not_called() + assert len(hass.states.async_all()) == 1 - mock_cloud_airgradient_client.get_config.return_value.configuration_control = ( - ConfigurationControl.LOCAL + mock_cloud_airgradient_client.get_config.return_value = Config.from_json( + load_fixture("get_config_local.json", DOMAIN) ) - await hass.services.async_call( - SELECT_DOMAIN, - SERVICE_SELECT_OPTION, - { - ATTR_ENTITY_ID: "select.airgradient_display_temperature_unit", - ATTR_OPTION: "c", - }, - blocking=True, + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 7 + + mock_cloud_airgradient_client.get_config.return_value = Config.from_json( + load_fixture("get_config_cloud.json", DOMAIN) ) - mock_cloud_airgradient_client.set_temperature_unit.assert_called_once_with("c") + + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 1 diff --git a/tests/components/airgradient/test_sensor.py b/tests/components/airgradient/test_sensor.py index 65c96a0669f..c2e53ef4de2 100644 --- a/tests/components/airgradient/test_sensor.py +++ b/tests/components/airgradient/test_sensor.py @@ -27,7 +27,7 @@ from tests.common import ( async def test_all_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, - mock_airgradient_client: AsyncMock, + airgradient_devices: AsyncMock, mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, ) -> None: @@ -53,7 +53,7 @@ async def test_create_entities( assert len(hass.states.async_all()) == 0 mock_airgradient_client.get_current_measures.return_value = Measures.from_json( - load_fixture("current_measures.json", DOMAIN) + load_fixture("current_measures_indoor.json", DOMAIN) ) freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) diff --git a/tests/components/airgradient/test_switch.py b/tests/components/airgradient/test_switch.py new file mode 100644 index 00000000000..20a1cb7470b --- /dev/null +++ b/tests/components/airgradient/test_switch.py @@ -0,0 +1,101 @@ +"""Tests for the AirGradient switch platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from airgradient import Config +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + target={ATTR_ENTITY_ID: "switch.airgradient_post_data_to_airgradient"}, + blocking=True, + ) + mock_airgradient_client.enable_sharing_data.assert_called_once() + mock_airgradient_client.enable_sharing_data.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + target={ATTR_ENTITY_ID: "switch.airgradient_post_data_to_airgradient"}, + blocking=True, + ) + mock_airgradient_client.enable_sharing_data.assert_called_once() + + +async def test_cloud_creates_no_switch( + hass: HomeAssistant, + mock_cloud_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test cloud configuration control.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_config_entry) + + assert len(hass.states.async_all()) == 0 + + mock_cloud_airgradient_client.get_config.return_value = Config.from_json( + load_fixture("get_config_local.json", DOMAIN) + ) + + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 1 + + mock_cloud_airgradient_client.get_config.return_value = Config.from_json( + load_fixture("get_config_cloud.json", DOMAIN) + ) + + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/airgradient/test_update.py b/tests/components/airgradient/test_update.py new file mode 100644 index 00000000000..020a9a82a71 --- /dev/null +++ b/tests/components/airgradient/test_update.py @@ -0,0 +1,69 @@ +"""Tests for the AirGradient update platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.UPDATE]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_update_mechanism( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test update entity.""" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("update.airgradient_firmware") + assert state.state == STATE_ON + assert state.attributes["installed_version"] == "3.1.1" + assert state.attributes["latest_version"] == "3.1.4" + mock_airgradient_client.get_latest_firmware_version.assert_called_once() + mock_airgradient_client.get_latest_firmware_version.reset_mock() + + mock_airgradient_client.get_current_measures.return_value.firmware_version = "3.1.4" + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("update.airgradient_firmware") + assert state.state == STATE_OFF + assert state.attributes["installed_version"] == "3.1.4" + assert state.attributes["latest_version"] == "3.1.4" + + mock_airgradient_client.get_latest_firmware_version.return_value = "3.1.5" + + freezer.tick(timedelta(minutes=59)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_airgradient_client.get_latest_firmware_version.assert_called_once() + state = hass.states.get("update.airgradient_firmware") + assert state.state == STATE_ON + assert state.attributes["installed_version"] == "3.1.4" + assert state.attributes["latest_version"] == "3.1.5" diff --git a/tests/components/airly/test_diagnostics.py b/tests/components/airly/test_diagnostics.py index 7364824e594..9a61bf5abee 100644 --- a/tests/components/airly/test_diagnostics.py +++ b/tests/components/airly/test_diagnostics.py @@ -1,6 +1,7 @@ """Test Airly diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -22,4 +23,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airnow/conftest.py b/tests/components/airnow/conftest.py index 676595250f1..c5d23fa7289 100644 --- a/tests/components/airnow/conftest.py +++ b/tests/components/airnow/conftest.py @@ -1,10 +1,10 @@ """Define fixtures for AirNow tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.airnow import DOMAIN from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS diff --git a/tests/components/airnow/test_diagnostics.py b/tests/components/airnow/test_diagnostics.py index 7329398e789..eb79dabe51a 100644 --- a/tests/components/airnow/test_diagnostics.py +++ b/tests/components/airnow/test_diagnostics.py @@ -4,6 +4,7 @@ from unittest.mock import patch import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -27,7 +28,6 @@ async def test_entry_diagnostics( return_value="PST", ): assert await hass.config_entries.async_setup(config_entry.entry_id) - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airq/conftest.py b/tests/components/airq/conftest.py index 5df032c0308..a132153a76f 100644 --- a/tests/components/airq/conftest.py +++ b/tests/components/airq/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for air-Q.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/airthings_ble/__init__.py b/tests/components/airthings_ble/__init__.py index 45521903a08..a736fa979e9 100644 --- a/tests/components/airthings_ble/__init__.py +++ b/tests/components/airthings_ble/__init__.py @@ -13,6 +13,7 @@ from airthings_ble import ( from homeassistant.components.airthings_ble.const import DOMAIN from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceRegistry from tests.common import MockConfigEntry, MockEntity @@ -225,7 +226,7 @@ VOC_V3 = MockEntity( ) -def create_entry(hass): +def create_entry(hass: HomeAssistant) -> MockConfigEntry: """Create a config entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/airtouch5/__init__.py b/tests/components/airtouch5/__init__.py index 2b76786e7e5..567be6af774 100644 --- a/tests/components/airtouch5/__init__.py +++ b/tests/components/airtouch5/__init__.py @@ -1 +1,13 @@ """Tests for the Airtouch 5 integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/airtouch5/conftest.py b/tests/components/airtouch5/conftest.py index d6d55689f17..fab26e3f6cc 100644 --- a/tests/components/airtouch5/conftest.py +++ b/tests/components/airtouch5/conftest.py @@ -1,9 +1,23 @@ """Common fixtures for the Airtouch 5 tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch +from airtouch5py.data_packet_factory import DataPacketFactory +from airtouch5py.packets.ac_ability import AcAbility +from airtouch5py.packets.ac_status import AcFanSpeed, AcMode, AcPowerState, AcStatus +from airtouch5py.packets.zone_name import ZoneName +from airtouch5py.packets.zone_status import ( + ControlMethod, + ZonePowerState, + ZoneStatusZone, +) import pytest -from typing_extensions import Generator + +from homeassistant.components.airtouch5.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry @pytest.fixture @@ -13,3 +27,107 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.airtouch5.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock the config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="1.1.1.1", + data={ + CONF_HOST: "1.1.1.1", + }, + ) + + +@pytest.fixture +def mock_airtouch5_client() -> Generator[AsyncMock]: + """Mock an Airtouch5 client.""" + + with ( + patch( + "homeassistant.components.airtouch5.Airtouch5SimpleClient", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.airtouch5.config_flow.Airtouch5SimpleClient", + new=mock_client, + ), + ): + client = mock_client.return_value + + # Default values for the tests using this mock : + client.data_packet_factory = DataPacketFactory() + client.ac = [ + AcAbility( + ac_number=1, + ac_name="AC 1", + start_zone_number=1, + zone_count=2, + supports_mode_cool=True, + supports_mode_fan=True, + supports_mode_dry=True, + supports_mode_heat=True, + supports_mode_auto=True, + supports_fan_speed_intelligent_auto=True, + supports_fan_speed_turbo=True, + supports_fan_speed_powerful=True, + supports_fan_speed_high=True, + supports_fan_speed_medium=True, + supports_fan_speed_low=True, + supports_fan_speed_quiet=True, + supports_fan_speed_auto=True, + min_cool_set_point=15, + max_cool_set_point=25, + min_heat_set_point=20, + max_heat_set_point=30, + ) + ] + client.latest_ac_status = { + 1: AcStatus( + ac_power_state=AcPowerState.ON, + ac_number=1, + ac_mode=AcMode.AUTO, + ac_fan_speed=AcFanSpeed.AUTO, + ac_setpoint=24, + turbo_active=False, + bypass_active=False, + spill_active=False, + timer_set=False, + temperature=24, + error_code=0, + ) + } + + client.zones = [ZoneName(1, "Zone 1"), ZoneName(2, "Zone 2")] + client.latest_zone_status = { + 1: ZoneStatusZone( + zone_power_state=ZonePowerState.ON, + zone_number=1, + control_method=ControlMethod.PERCENTAGE_CONTROL, + open_percentage=0.9, + set_point=24, + has_sensor=False, + temperature=24, + spill_active=False, + is_low_battery=False, + ), + 2: ZoneStatusZone( + zone_power_state=ZonePowerState.ON, + zone_number=1, + control_method=ControlMethod.TEMPERATURE_CONTROL, + open_percentage=1, + set_point=24, + has_sensor=True, + temperature=24, + spill_active=False, + is_low_battery=False, + ), + } + + client.connection_state_callbacks = [] + client.zone_status_callbacks = [] + client.ac_status_callbacks = [] + + yield client diff --git a/tests/components/airtouch5/snapshots/test_cover.ambr b/tests/components/airtouch5/snapshots/test_cover.ambr new file mode 100644 index 00000000000..a8e57f69527 --- /dev/null +++ b/tests/components/airtouch5/snapshots/test_cover.ambr @@ -0,0 +1,99 @@ +# serializer version: 1 +# name: test_all_entities[cover.zone_1_damper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.zone_1_damper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Damper', + 'platform': 'airtouch5', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'damper', + 'unique_id': 'zone_1_open_percentage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[cover.zone_1_damper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 90, + 'device_class': 'damper', + 'friendly_name': 'Zone 1 Damper', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.zone_1_damper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_all_entities[cover.zone_2_damper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.zone_2_damper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Damper', + 'platform': 'airtouch5', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'damper', + 'unique_id': 'zone_2_open_percentage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[cover.zone_2_damper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 100, + 'device_class': 'damper', + 'friendly_name': 'Zone 2 Damper', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.zone_2_damper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/airtouch5/test_cover.py b/tests/components/airtouch5/test_cover.py new file mode 100644 index 00000000000..295535cd95d --- /dev/null +++ b/tests/components/airtouch5/test_cover.py @@ -0,0 +1,143 @@ +"""Tests for the Airtouch5 cover platform.""" + +from collections.abc import Callable +from unittest.mock import AsyncMock, patch + +from airtouch5py.packets.zone_status import ( + ControlMethod, + ZonePowerState, + ZoneStatusZone, +) +from syrupy import SnapshotAssertion + +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_SET_COVER_POSITION, + STATE_OPEN, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_CLOSED, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +COVER_ENTITY_ID = "cover.zone_1_damper" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_airtouch5_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + + with patch("homeassistant.components.airtouch5.PLATFORMS", [Platform.COVER]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_cover_actions( + hass: HomeAssistant, + mock_airtouch5_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the actions of the Airtouch5 covers.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + mock_airtouch5_client.send_packet.assert_called_once() + mock_airtouch5_client.reset_mock() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + mock_airtouch5_client.send_packet.assert_called_once() + mock_airtouch5_client.reset_mock() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: COVER_ENTITY_ID, ATTR_POSITION: 50}, + blocking=True, + ) + mock_airtouch5_client.send_packet.assert_called_once() + mock_airtouch5_client.reset_mock() + + +async def test_cover_callbacks( + hass: HomeAssistant, + mock_airtouch5_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the callbacks of the Airtouch5 covers.""" + + await setup_integration(hass, mock_config_entry) + + # We find the callback method on the mock client + zone_status_callback: Callable[[dict[int, ZoneStatusZone]], None] = ( + mock_airtouch5_client.zone_status_callbacks[2] + ) + + # Define a method to simply call it + async def _call_zone_status_callback(open_percentage: int) -> None: + zsz = ZoneStatusZone( + zone_power_state=ZonePowerState.ON, + zone_number=1, + control_method=ControlMethod.PERCENTAGE_CONTROL, + open_percentage=open_percentage, + set_point=None, + has_sensor=False, + temperature=None, + spill_active=False, + is_low_battery=False, + ) + zone_status_callback({1: zsz}) + await hass.async_block_till_done() + + # And call it to effectively launch the callback as the server would do + + # Partly open + await _call_zone_status_callback(0.7) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_OPEN + assert state.attributes.get(ATTR_CURRENT_POSITION) == 70 + + # Fully open + await _call_zone_status_callback(1) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_OPEN + assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 + + # Fully closed + await _call_zone_status_callback(0.0) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_CLOSED + assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 + + # Partly reopened + await _call_zone_status_callback(0.3) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_OPEN + assert state.attributes.get(ATTR_CURRENT_POSITION) == 30 diff --git a/tests/components/airvisual/conftest.py b/tests/components/airvisual/conftest.py index a82dc0ab78c..cc49b60e0d8 100644 --- a/tests/components/airvisual/conftest.py +++ b/tests/components/airvisual/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for AirVisual.""" +from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.airvisual import ( CONF_CITY, diff --git a/tests/components/airvisual/test_diagnostics.py b/tests/components/airvisual/test_diagnostics.py index 072e4559705..0253f102c59 100644 --- a/tests/components/airvisual/test_diagnostics.py +++ b/tests/components/airvisual/test_diagnostics.py @@ -1,6 +1,7 @@ """Test AirVisual diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -16,7 +17,6 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airvisual_pro/conftest.py b/tests/components/airvisual_pro/conftest.py index d25e9821d91..4acf9188889 100644 --- a/tests/components/airvisual_pro/conftest.py +++ b/tests/components/airvisual_pro/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for AirVisual Pro.""" +from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.airvisual_pro.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD diff --git a/tests/components/airvisual_pro/test_diagnostics.py b/tests/components/airvisual_pro/test_diagnostics.py index dd87d00be30..372b62eaf38 100644 --- a/tests/components/airvisual_pro/test_diagnostics.py +++ b/tests/components/airvisual_pro/test_diagnostics.py @@ -1,6 +1,7 @@ """Test AirVisual Pro diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -16,7 +17,6 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index adf0176765c..2adf50558e0 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -267,10 +267,6 @@ 'temp-set': 45, 'temp-unit': 0, }), - 'new-systems': list([ - ]), - 'new-zones': list([ - ]), 'num-systems': 3, 'num-zones': 7, 'systems': dict({ diff --git a/tests/components/airzone/test_climate.py b/tests/components/airzone/test_climate.py index fa972bd3899..0f23c151e0e 100644 --- a/tests/components/airzone/test_climate.py +++ b/tests/components/airzone/test_climate.py @@ -248,7 +248,7 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None: ), ): async_fire_time_changed(hass, utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("climate.salon") assert state.attributes.get(ATTR_MAX_TEMP) == 25 diff --git a/tests/components/airzone/test_coordinator.py b/tests/components/airzone/test_coordinator.py index 06c77bebb81..583758a6bee 100644 --- a/tests/components/airzone/test_coordinator.py +++ b/tests/components/airzone/test_coordinator.py @@ -8,6 +8,7 @@ from aioairzone.exceptions import ( InvalidMethod, SystemOutOfRange, ) +from freezegun.api import FrozenDateTimeFactory from homeassistant.components.airzone.const import DOMAIN from homeassistant.components.airzone.coordinator import SCAN_INTERVAL @@ -15,7 +16,7 @@ from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.util.dt import utcnow -from .util import CONFIG, HVAC_MOCK, HVAC_VERSION_MOCK +from .util import CONFIG, HVAC_MOCK, HVAC_MOCK_NEW_ZONES, HVAC_VERSION_MOCK from tests.common import MockConfigEntry, async_fire_time_changed @@ -64,3 +65,62 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: state = hass.states.get("sensor.despacho_temperature") assert state.state == STATE_UNAVAILABLE + + +async def test_coordinator_new_devices( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test new devices on coordinator update.""" + + config_entry = MockConfigEntry( + data=CONFIG, + domain=DOMAIN, + unique_id="airzone_unique_id", + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_dhw", + side_effect=HotWaterNotAvailable, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_hvac", + return_value=HVAC_MOCK_NEW_ZONES, + ) as mock_hvac, + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_hvac_systems", + side_effect=SystemOutOfRange, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_version", + return_value=HVAC_VERSION_MOCK, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_webserver", + side_effect=InvalidMethod, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + mock_hvac.assert_called_once() + mock_hvac.reset_mock() + + state = hass.states.get("sensor.salon_temperature") + assert state.state == "19.6" + + state = hass.states.get("sensor.dorm_ppal_temperature") + assert state is None + + mock_hvac.return_value = HVAC_MOCK + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_hvac.assert_called_once() + + state = hass.states.get("sensor.salon_temperature") + assert state.state == "19.6" + + state = hass.states.get("sensor.dorm_ppal_temperature") + assert state.state == "21.1" diff --git a/tests/components/airzone/test_diagnostics.py b/tests/components/airzone/test_diagnostics.py index 6a03b9f1985..bca75bca778 100644 --- a/tests/components/airzone/test_diagnostics.py +++ b/tests/components/airzone/test_diagnostics.py @@ -4,6 +4,7 @@ from unittest.mock import patch from aioairzone.const import RAW_HVAC, RAW_VERSION, RAW_WEBSERVER from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.airzone.const import DOMAIN from homeassistant.core import HomeAssistant @@ -37,4 +38,4 @@ async def test_config_entry_diagnostics( }, ): result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airzone/test_sensor.py b/tests/components/airzone/test_sensor.py index 3d75599d2d2..352994d6313 100644 --- a/tests/components/airzone/test_sensor.py +++ b/tests/components/airzone/test_sensor.py @@ -113,7 +113,7 @@ async def test_airzone_sensors_availability(hass: HomeAssistant) -> None: ), ): async_fire_time_changed(hass, utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.dorm_ppal_temperature") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/airzone/util.py b/tests/components/airzone/util.py index 6e3e0eccc8f..2cdb7a9c6f9 100644 --- a/tests/components/airzone/util.py +++ b/tests/components/airzone/util.py @@ -1,5 +1,6 @@ """Tests for the Airzone integration.""" +from copy import deepcopy from unittest.mock import patch from aioairzone.const import ( @@ -274,6 +275,16 @@ HVAC_MOCK = { ] } +HVAC_MOCK_NEW_ZONES = { + API_SYSTEMS: [ + { + API_DATA: [ + deepcopy(HVAC_MOCK[API_SYSTEMS][0][API_DATA][0]), + ] + } + ] +} + HVAC_DHW_MOCK = { API_DATA: { API_SYSTEM_ID: 0, diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index 31065d68a47..26a606bde42 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -114,6 +114,7 @@ 'installation': 'installation1', 'is-connected': True, 'mode': 3, + 'model': 'Aidoo', 'modes': list([ 1, 2, @@ -156,6 +157,7 @@ 'installation': 'installation1', 'is-connected': True, 'mode': 2, + 'model': 'Aidoo Pro', 'modes': list([ 1, 2, @@ -345,6 +347,7 @@ 'temperature-setpoint-max': 30.0, 'temperature-setpoint-min': 15.0, 'temperature-step': 0.5, + 'user-access': 'admin', 'web-servers': list([ 'webserver1', 'webserver2', @@ -370,10 +373,12 @@ '_id': 'error-id', }), ]), + 'firmware': '3.35', 'id': 'system1', 'installation': 'installation1', 'is-connected': True, 'mode': 2, + 'model': 'c6', 'modes': list([ 2, 3, @@ -391,10 +396,12 @@ 'webserver1': dict({ 'available': True, 'connection-date': '2023-05-07T12:55:51.000Z', + 'cpu-usage': 32, 'disconnection-date': '2023-01-01T22:26:55.376Z', 'firmware': '3.44', 'id': 'webserver1', 'installation': 'installation1', + 'memory-free': 42616, 'name': 'WebServer 11:22:33:44:55:66', 'type': 'ws_az', 'wifi-channel': 36, @@ -494,6 +501,8 @@ 'temperature-setpoint-stop-air': 24.0, 'temperature-setpoint-vent-air': 24.0, 'temperature-step': 0.5, + 'thermostat-fw': '3.52', + 'thermostat-model': 'blueface', 'web-server': 'webserver1', 'ws-connected': True, 'zone': 1, @@ -557,6 +566,11 @@ 'temperature-setpoint-stop-air': 24.0, 'temperature-setpoint-vent-air': 24.0, 'temperature-step': 0.5, + 'thermostat-battery': 54, + 'thermostat-battery-low': False, + 'thermostat-coverage': 76, + 'thermostat-fw': '3.33', + 'thermostat-model': 'thinkradio', 'web-server': 'webserver1', 'ws-connected': True, 'zone': 2, diff --git a/tests/components/airzone_cloud/test_binary_sensor.py b/tests/components/airzone_cloud/test_binary_sensor.py index 8e065821057..bb2d0f78060 100644 --- a/tests/components/airzone_cloud/test_binary_sensor.py +++ b/tests/components/airzone_cloud/test_binary_sensor.py @@ -47,6 +47,9 @@ async def test_airzone_create_binary_sensors(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.dormitorio_air_quality_active") assert state.state == STATE_OFF + state = hass.states.get("binary_sensor.dormitorio_battery") + assert state.state == STATE_OFF + state = hass.states.get("binary_sensor.dormitorio_floor_demand") assert state.state == STATE_OFF diff --git a/tests/components/airzone_cloud/test_config_flow.py b/tests/components/airzone_cloud/test_config_flow.py index 86a70ced51a..04e253eb494 100644 --- a/tests/components/airzone_cloud/test_config_flow.py +++ b/tests/components/airzone_cloud/test_config_flow.py @@ -15,6 +15,7 @@ from .util import ( GET_INSTALLATION_MOCK, GET_INSTALLATIONS_MOCK, WS_ID, + mock_get_device_config, mock_get_device_status, mock_get_webserver, ) @@ -28,6 +29,10 @@ async def test_form(hass: HomeAssistant) -> None: "homeassistant.components.airzone_cloud.async_setup_entry", return_value=True, ) as mock_setup_entry, + patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", + side_effect=mock_get_device_config, + ), patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, @@ -99,6 +104,10 @@ async def test_installations_list_error(hass: HomeAssistant) -> None: "homeassistant.components.airzone_cloud.async_setup_entry", return_value=True, ), + patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", + side_effect=mock_get_device_config, + ), patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, diff --git a/tests/components/airzone_cloud/test_coordinator.py b/tests/components/airzone_cloud/test_coordinator.py index b4b7afd6086..e2b80e66672 100644 --- a/tests/components/airzone_cloud/test_coordinator.py +++ b/tests/components/airzone_cloud/test_coordinator.py @@ -14,6 +14,7 @@ from .util import ( CONFIG, GET_INSTALLATION_MOCK, GET_INSTALLATIONS_MOCK, + mock_get_device_config, mock_get_device_status, mock_get_webserver, ) @@ -32,6 +33,10 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: config_entry.add_to_hass(hass) with ( + patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", + side_effect=mock_get_device_config, + ) as mock_device_config, patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, @@ -56,11 +61,13 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + mock_device_config.assert_called() mock_device_status.assert_called() mock_installation.assert_awaited_once() mock_installations.assert_called_once() mock_webserver.assert_called() + mock_device_config.reset_mock() mock_device_status.reset_mock() mock_installation.reset_mock() mock_installations.reset_mock() diff --git a/tests/components/airzone_cloud/test_diagnostics.py b/tests/components/airzone_cloud/test_diagnostics.py index 254dba16b09..d3e23fc7f4b 100644 --- a/tests/components/airzone_cloud/test_diagnostics.py +++ b/tests/components/airzone_cloud/test_diagnostics.py @@ -15,6 +15,7 @@ from aioairzone_cloud.const import ( RAW_WEBSERVERS, ) from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.airzone_cloud.const import DOMAIN from homeassistant.const import CONF_ID @@ -111,4 +112,4 @@ async def test_config_entry_diagnostics( return_value=RAW_DATA_MOCK, ): result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airzone_cloud/test_sensor.py b/tests/components/airzone_cloud/test_sensor.py index 31fe52f3302..cf291ec23a6 100644 --- a/tests/components/airzone_cloud/test_sensor.py +++ b/tests/components/airzone_cloud/test_sensor.py @@ -21,8 +21,11 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: assert state.state == "20.0" # WebServers - state = hass.states.get("sensor.webserver_11_22_33_44_55_66_signal_strength") - assert state.state == "-56" + state = hass.states.get("sensor.webserver_11_22_33_44_55_66_cpu_usage") + assert state.state == "32" + + state = hass.states.get("sensor.webserver_11_22_33_44_55_66_free_memory") + assert state.state == "42616" state = hass.states.get("sensor.webserver_11_22_33_44_55_67_signal_strength") assert state.state == "-77" @@ -31,6 +34,9 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: state = hass.states.get("sensor.dormitorio_air_quality_index") assert state.state == "1" + state = hass.states.get("sensor.dormitorio_battery") + assert state.state == "54" + state = hass.states.get("sensor.dormitorio_pm1") assert state.state == "3" @@ -40,6 +46,9 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: state = hass.states.get("sensor.dormitorio_pm10") assert state.state == "3" + state = hass.states.get("sensor.dormitorio_signal_percentage") + assert state.state == "76" + state = hass.states.get("sensor.dormitorio_temperature") assert state.state == "25.0" diff --git a/tests/components/airzone_cloud/util.py b/tests/components/airzone_cloud/util.py index 6e7dad707f1..fb538ea7c8e 100644 --- a/tests/components/airzone_cloud/util.py +++ b/tests/components/airzone_cloud/util.py @@ -3,8 +3,9 @@ from typing import Any from unittest.mock import patch -from aioairzone_cloud.common import OperationMode +from aioairzone_cloud.common import OperationMode, UserAccessType from aioairzone_cloud.const import ( + API_ACCESS_TYPE, API_ACTIVE, API_AIR_ACTIVE, API_AQ_ACTIVE, @@ -23,12 +24,16 @@ from aioairzone_cloud.const import ( API_CELSIUS, API_CONFIG, API_CONNECTION_DATE, + API_CPU_WS, API_DEVICE_ID, API_DEVICES, API_DISCONNECTION_DATE, API_DOUBLE_SET_POINT, API_ERRORS, API_FAH, + API_FREE, + API_FREE_MEM, + API_GENERAL, API_GROUP_ID, API_GROUPS, API_HUMIDITY, @@ -44,6 +49,8 @@ from aioairzone_cloud.const import ( API_POWER, API_POWERFUL_MODE, API_RAD_ACTIVE, + API_RADIO_BATTERY_PERCENT, + API_RADIO_COVERAGE_PERCENT, API_RANGE_MAX_AIR, API_RANGE_MIN_AIR, API_RANGE_SP_MAX_ACS, @@ -79,8 +86,12 @@ from aioairzone_cloud.const import ( API_STAT_SSID, API_STATUS, API_STEP, + API_SYSTEM_FW, API_SYSTEM_NUMBER, + API_SYSTEM_TYPE, API_TANK_TEMP, + API_THERMOSTAT_FW, + API_THERMOSTAT_TYPE, API_TYPE, API_WARNINGS, API_WS_CONNECTED, @@ -184,6 +195,7 @@ GET_INSTALLATIONS_MOCK = { { API_INSTALLATION_ID: CONFIG[CONF_ID], API_NAME: "House", + API_ACCESS_TYPE: UserAccessType.ADMIN, API_WS_IDS: [ WS_ID, WS_ID_AIDOO, @@ -202,6 +214,12 @@ GET_WEBSERVER_MOCK = { API_STAT_AP_MAC: "00:00:00:00:00:00", }, API_STATUS: { + API_CPU_WS: { + API_GENERAL: 32, + }, + API_FREE_MEM: { + API_FREE: 42616, + }, API_IS_CONNECTED: True, API_STAT_QUALITY: 4, API_STAT_RSSI: -56, @@ -245,6 +263,30 @@ GET_WEBSERVER_MOCK_AIDOO_PRO = { } +def mock_get_device_config(device: Device) -> dict[str, Any]: + """Mock API device config.""" + + if device.get_id() == "system1": + return { + API_SYSTEM_FW: "3.35", + API_SYSTEM_TYPE: "c6", + } + if device.get_id() == "zone1": + return { + API_THERMOSTAT_FW: "3.52", + API_THERMOSTAT_TYPE: "blueface", + } + if device.get_id() == "zone2": + return { + API_THERMOSTAT_FW: "3.33", + API_THERMOSTAT_TYPE: "thinkradio", + API_RADIO_BATTERY_PERCENT: 54, + API_RADIO_COVERAGE_PERCENT: 76, + } + + return {} + + def mock_get_device_status(device: Device) -> dict[str, Any]: """Mock API device status.""" @@ -470,6 +512,10 @@ async def async_init_integration( config_entry.add_to_hass(hass) with ( + patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", + side_effect=mock_get_device_config, + ), patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, diff --git a/tests/components/aladdin_connect/conftest.py b/tests/components/aladdin_connect/conftest.py deleted file mode 100644 index c7e5190d527..00000000000 --- a/tests/components/aladdin_connect/conftest.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Test fixtures for the Aladdin Connect Garage Door integration.""" - -from unittest.mock import AsyncMock, patch - -import pytest -from typing_extensions import Generator - -from homeassistant.components.aladdin_connect import DOMAIN - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.aladdin_connect.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Return an Aladdin Connect config entry.""" - return MockConfigEntry( - domain=DOMAIN, - data={}, - title="test@test.com", - unique_id="aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee", - version=2, - ) diff --git a/tests/components/aladdin_connect/test_config_flow.py b/tests/components/aladdin_connect/test_config_flow.py deleted file mode 100644 index 1537e0f35da..00000000000 --- a/tests/components/aladdin_connect/test_config_flow.py +++ /dev/null @@ -1,225 +0,0 @@ -"""Test the Aladdin Connect Garage Door config flow.""" - -from unittest.mock import AsyncMock - -import pytest - -from homeassistant.components.aladdin_connect.const import ( - DOMAIN, - OAUTH2_AUTHORIZE, - OAUTH2_TOKEN, -) -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigFlowResult -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_entry_oauth2_flow -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - -CLIENT_ID = "1234" -CLIENT_SECRET = "5678" - -EXAMPLE_TOKEN = ( - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhYWFhYWFhYS1iYmJiLWNjY2MtZGRk" - "ZC1lZWVlZWVlZWVlZWUiLCJuYW1lIjoiSm9obiBEb2UiLCJpYXQiOjE1MTYyMzkwMjIsInVzZXJuYW" - "1lIjoidGVzdEB0ZXN0LmNvbSJ9.CTU1YItIrUl8nSM3koJxlFJr5CjLghgc9gS6h45D8dE" -) - - -@pytest.fixture -async def setup_credentials(hass: HomeAssistant) -> None: - """Fixture to setup credentials.""" - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential(CLIENT_ID, CLIENT_SECRET), - ) - - -async def _oauth_actions( - hass: HomeAssistant, - result: ConfigFlowResult, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, -) -> None: - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - - assert result["url"] == ( - f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "refresh_token": "mock-refresh-token", - "access_token": EXAMPLE_TOKEN, - "type": "Bearer", - "expires_in": 60, - }, - ) - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_full_flow( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - setup_credentials: None, - mock_setup_entry: AsyncMock, -) -> None: - """Check full flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "test@test.com" - assert result["data"]["token"]["access_token"] == EXAMPLE_TOKEN - assert result["data"]["token"]["refresh_token"] == "mock-refresh-token" - assert result["result"].unique_id == "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_duplicate_entry( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - setup_credentials: None, - mock_config_entry: MockConfigEntry, -) -> None: - """Test we abort with duplicate entry.""" - mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_reauth( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - setup_credentials: None, - mock_config_entry: MockConfigEntry, - mock_setup_entry: AsyncMock, -) -> None: - """Test reauthentication.""" - mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_reauth_wrong_account( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - setup_credentials: None, - mock_setup_entry: AsyncMock, -) -> None: - """Test reauthentication with wrong account.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data={}, - title="test@test.com", - unique_id="aaaaaaaa-bbbb-ffff-dddd-eeeeeeeeeeee", - version=2, - ) - config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "wrong_account" - - -@pytest.mark.usefixtures("current_request_with_host") -async def test_reauth_old_account( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - setup_credentials: None, - mock_setup_entry: AsyncMock, -) -> None: - """Test reauthentication with old account.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data={}, - title="test@test.com", - unique_id="test@test.com", - version=2, - ) - config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - await _oauth_actions(hass, result, hass_client_no_auth, aioclient_mock) - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert config_entry.unique_id == "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" diff --git a/tests/components/aladdin_connect/test_init.py b/tests/components/aladdin_connect/test_init.py new file mode 100644 index 00000000000..b01af287b7b --- /dev/null +++ b/tests/components/aladdin_connect/test_init.py @@ -0,0 +1,50 @@ +"""Tests for the Aladdin Connect integration.""" + +from homeassistant.components.aladdin_connect import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from tests.common import MockConfigEntry + + +async def test_aladdin_connect_repair_issue( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test the Aladdin Connect configuration entry loading/unloading handles the repair.""" + config_entry_1 = MockConfigEntry( + title="Example 1", + domain=DOMAIN, + ) + config_entry_1.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_1.entry_id) + await hass.async_block_till_done() + assert config_entry_1.state is ConfigEntryState.LOADED + + # Add a second one + config_entry_2 = MockConfigEntry( + title="Example 2", + domain=DOMAIN, + ) + config_entry_2.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_2.entry_id) + await hass.async_block_till_done() + + assert config_entry_2.state is ConfigEntryState.LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) + + # Remove the first one + await hass.config_entries.async_remove(config_entry_1.entry_id) + await hass.async_block_till_done() + + assert config_entry_1.state is ConfigEntryState.NOT_LOADED + assert config_entry_2.state is ConfigEntryState.LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) + + # Remove the second one + await hass.config_entries.async_remove(config_entry_2.entry_id) + await hass.async_block_till_done() + + assert config_entry_1.state is ConfigEntryState.NOT_LOADED + assert config_entry_2.state is ConfigEntryState.NOT_LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None diff --git a/tests/components/alarm_control_panel/common.py b/tests/components/alarm_control_panel/common.py index 9ec419d8cf0..36e9918f54c 100644 --- a/tests/components/alarm_control_panel/common.py +++ b/tests/components/alarm_control_panel/common.py @@ -27,11 +27,14 @@ from homeassistant.const import ( STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, ) +from homeassistant.core import HomeAssistant from tests.common import MockEntity -async def async_alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_disarm( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -42,7 +45,9 @@ async def async_alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_DISARM, data, blocking=True) -async def async_alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_home( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -53,7 +58,9 @@ async def async_alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_HOME, data, blocking=True) -async def async_alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_away( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -64,7 +71,9 @@ async def async_alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_AWAY, data, blocking=True) -async def async_alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_night( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -75,7 +84,9 @@ async def async_alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_NIGHT, data, blocking=True) -async def async_alarm_arm_vacation(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_vacation( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for vacation mode.""" data = {} if code: @@ -88,7 +99,9 @@ async def async_alarm_arm_vacation(hass, code=None, entity_id=ENTITY_MATCH_ALL): ) -async def async_alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_trigger( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -99,7 +112,9 @@ async def async_alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_TRIGGER, data, blocking=True) -async def async_alarm_arm_custom_bypass(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_custom_bypass( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: diff --git a/tests/components/alarm_control_panel/conftest.py b/tests/components/alarm_control_panel/conftest.py index 620b74dd80e..3e82b935493 100644 --- a/tests/components/alarm_control_panel/conftest.py +++ b/tests/components/alarm_control_panel/conftest.py @@ -1,9 +1,9 @@ """Fixturs for Alarm Control Panel tests.""" +from collections.abc import Generator from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, @@ -129,7 +129,7 @@ async def code_arm_required() -> bool: @pytest.fixture(name="supported_features") -async def lock_supported_features() -> AlarmControlPanelEntityFeature: +async def alarm_control_panel_supported_features() -> AlarmControlPanelEntityFeature: """Return the supported features for the test alarm control panel entity.""" return ( AlarmControlPanelEntityFeature.ARM_AWAY @@ -142,7 +142,7 @@ async def lock_supported_features() -> AlarmControlPanelEntityFeature: @pytest.fixture(name="mock_alarm_control_panel_entity") -async def setup_lock_platform_test_entity( +async def setup_alarm_control_panel_platform_test_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, code_format: CodeFormat | None, diff --git a/tests/components/alexa/test_auth.py b/tests/components/alexa/test_auth.py index 8d4308ba792..b3aa645bfcb 100644 --- a/tests/components/alexa/test_auth.py +++ b/tests/components/alexa/test_auth.py @@ -10,14 +10,14 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def run_auth_get_access_token( - hass, - aioclient_mock, - expires_in, - client_id, - client_secret, - accept_grant_code, - refresh_token, -): + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + expires_in: int, + client_id: str, + client_secret: str, + accept_grant_code: str, + refresh_token: str, +) -> None: """Do auth and request a new token for tests.""" aioclient_mock.post( TEST_TOKEN_URL, diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index 15a4bd6d9a1..162149f095b 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -48,6 +48,41 @@ from .test_common import ( from tests.common import async_mock_service +@pytest.mark.parametrize( + ( + "current_activity", + "activity_list", + ), + [ + ("TV", ["TV", "MUSIC", "DVD"]), + ("TV", ["TV"]), + ], +) +async def test_discovery_remote( + hass: HomeAssistant, current_activity: str, activity_list: list[str] +) -> None: + """Test discory for a remote entity.""" + request = get_new_request("Alexa.Discovery", "Discover") + # setup test device + hass.states.async_set( + "remote.test", + "off", + { + "current_activity": current_activity, + "activity_list": activity_list, + }, + ) + msg = await smart_home.async_handle_message(hass, get_default_config(hass), request) + assert "event" in msg + msg = msg["event"] + assert len(msg["payload"]["endpoints"]) == 1 + endpoint = msg["payload"]["endpoints"][0] + assert endpoint["endpointId"] == "remote#test" + interfaces = {capability["interface"] for capability in endpoint["capabilities"]} + assert "Alexa.PowerController" in interfaces + assert "Alexa.ModeController" in interfaces + + @pytest.mark.parametrize("adjust", ["-5", "5", "-80"]) async def test_api_adjust_brightness(hass: HomeAssistant, adjust: str) -> None: """Test api adjust brightness process.""" @@ -199,7 +234,6 @@ async def test_api_increase_color_temp( ("media_player", "GAME CONSOLE", ["tv", "game console", 10000], 1), ("media_player", "SATELLITE TV", ["satellite-tv", "game console", None], 0), ("media_player", "SATELLITE TV", ["satellite_tv", "game console"], 0), - ("media_player", "BAD DEVICE", ["satellite_tv", "game console"], None), ], ) async def test_api_select_input( @@ -220,18 +254,6 @@ async def test_api_select_input( }, ) - # test where no source matches - if idx is None: - await assert_request_fails( - "Alexa.InputController", - "SelectInput", - "media_player#test", - "media_player.select_source", - hass, - payload={"input": payload}, - ) - return - call, _ = await assert_request_calls_service( "Alexa.InputController", "SelectInput", @@ -243,6 +265,130 @@ async def test_api_select_input( assert call.data["source"] == source_list[idx] +@pytest.mark.parametrize( + ("source_list"), + [(["satellite_tv", "game console"]), ([])], +) +async def test_api_select_input_fails( + hass: HomeAssistant, + source_list: list[Any], +) -> None: + """Test api set input process fails.""" + hass.states.async_set( + "media_player.test", + "off", + { + "friendly_name": "Test media player", + "source": "unknown", + "source_list": source_list, + }, + ) + await assert_request_fails( + "Alexa.InputController", + "SelectInput", + "media_player#test", + "media_player.select_source", + hass, + payload={"input": "BAD DEVICE"}, + ) + + +@pytest.mark.parametrize( + ("activity", "activity_list", "target_activity_index"), + [ + ("TV", ["TV", "MUSIC", "DVD"], 0), + ("MUSIC", ["TV", "MUSIC", "DVD", 1000], 1), + ("DVD", ["TV", "MUSIC", "DVD", None], 2), + ("TV", ["TV"], 0), + ], +) +async def test_api_select_activity( + hass: HomeAssistant, + activity: str, + activity_list: list[str], + target_activity_index: int | None, +) -> None: + """Test api set activity process.""" + hass.states.async_set( + "remote.test", + "off", + { + "current_activity": activity, + "activity_list": activity_list, + }, + ) + call, _ = await assert_request_calls_service( + "Alexa.ModeController", + "SetMode", + "remote#test", + "remote.turn_on", + hass, + payload={"mode": f"activity.{activity}"}, + instance="remote.activity", + ) + assert call.data["activity"] == activity_list[target_activity_index] + + +@pytest.mark.parametrize(("activity_list"), [(["TV", "MUSIC", "DVD"]), ([])]) +async def test_api_select_activity_fails( + hass: HomeAssistant, activity_list: list[str] +) -> None: + """Test api set activity process fails.""" + hass.states.async_set( + "remote.test", + "off", + { + "current_activity": None, + "activity_list": activity_list, + }, + ) + await assert_request_fails( + "Alexa.ModeController", + "SetMode", + "remote#test", + "remote.turn_on", + hass, + payload={"mode": "activity.BAD"}, + instance="remote.activity", + ) + + +@pytest.mark.parametrize( + ( + "current_state", + "target_name", + "target_service", + ), + [ + ("on", "TurnOff", "turn_off"), + ("off", "TurnOn", "turn_on"), + ], +) +async def test_api_remote_set_power_state( + hass: HomeAssistant, + current_state: str, + target_name: str, + target_service: str, +) -> None: + """Test api remote set power state process.""" + hass.states.async_set( + "remote.test", + current_state, + { + "current_activity": ["TV", "MUSIC", "DVD"], + "activity_list": "TV", + }, + ) + + _, msg = await assert_request_calls_service( + "Alexa.PowerController", + target_name, + "remote#test", + f"remote.{target_service}", + hass, + ) + + async def test_report_lock_state(hass: HomeAssistant) -> None: """Test LockController implements lockState property.""" hass.states.async_set("lock.locked", STATE_LOCKED, {}) @@ -619,6 +765,62 @@ async def test_report_fan_direction(hass: HomeAssistant) -> None: properties.assert_equal("Alexa.ModeController", "mode", "direction.forward") +async def test_report_remote_power(hass: HomeAssistant) -> None: + """Test ModeController reports remote power state correctly.""" + hass.states.async_set( + "remote.off", + "off", + {"current_activity": "TV", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + hass.states.async_set( + "remote.on", + "on", + {"current_activity": "TV", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + + properties = await reported_properties(hass, "remote#off") + properties.assert_equal("Alexa.PowerController", "powerState", "OFF") + + properties = await reported_properties(hass, "remote#on") + properties.assert_equal("Alexa.PowerController", "powerState", "ON") + + +async def test_report_remote_activity(hass: HomeAssistant) -> None: + """Test ModeController reports remote activity correctly.""" + hass.states.async_set( + "remote.unknown", + "on", + {"current_activity": "UNKNOWN"}, + ) + hass.states.async_set( + "remote.tv", + "on", + {"current_activity": "TV", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + hass.states.async_set( + "remote.music", + "on", + {"current_activity": "MUSIC", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + hass.states.async_set( + "remote.dvd", + "on", + {"current_activity": "DVD", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + + properties = await reported_properties(hass, "remote#unknown") + properties.assert_not_has_property("Alexa.ModeController", "mode") + + properties = await reported_properties(hass, "remote#tv") + properties.assert_equal("Alexa.ModeController", "mode", "activity.TV") + + properties = await reported_properties(hass, "remote#music") + properties.assert_equal("Alexa.ModeController", "mode", "activity.MUSIC") + + properties = await reported_properties(hass, "remote#dvd") + properties.assert_equal("Alexa.ModeController", "mode", "activity.DVD") + + async def test_report_cover_range_value(hass: HomeAssistant) -> None: """Test RangeController reports cover position correctly.""" hass.states.async_set( diff --git a/tests/components/alexa/test_common.py b/tests/components/alexa/test_common.py index 9fdcc1c89c1..e78f2cba40f 100644 --- a/tests/components/alexa/test_common.py +++ b/tests/components/alexa/test_common.py @@ -1,5 +1,8 @@ """Test helpers for the Alexa integration.""" +from __future__ import annotations + +from typing import Any from unittest.mock import Mock from uuid import uuid4 @@ -7,7 +10,7 @@ import pytest from homeassistant.components.alexa import config, smart_home from homeassistant.components.alexa.const import CONF_ENDPOINT, CONF_FILTER, CONF_LOCALE -from homeassistant.core import Context, callback +from homeassistant.core import Context, HomeAssistant, ServiceCall, callback from homeassistant.helpers import entityfilter from tests.common import async_mock_service @@ -28,7 +31,7 @@ class MockConfig(smart_home.AlexaConfig): "camera.test": {"display_categories": "CAMERA"}, } - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: """Mock Alexa config.""" super().__init__( hass, @@ -62,7 +65,7 @@ class MockConfig(smart_home.AlexaConfig): """Accept a grant.""" -def get_default_config(hass): +def get_default_config(hass: HomeAssistant) -> MockConfig: """Return a MockConfig instance.""" return MockConfig(hass) @@ -93,15 +96,15 @@ def get_new_request(namespace, name, endpoint=None): async def assert_request_calls_service( - namespace, - name, - endpoint, - service, - hass, + namespace: str, + name: str, + endpoint: str, + service: str, + hass: HomeAssistant, response_type="Response", - payload=None, - instance=None, -): + payload: dict[str, Any] | None = None, + instance: str | None = None, +) -> tuple[ServiceCall, dict[str, Any]]: """Assert an API request calls a hass service.""" context = Context() request = get_new_request(namespace, name, endpoint) @@ -129,8 +132,14 @@ async def assert_request_calls_service( async def assert_request_fails( - namespace, name, endpoint, service_not_called, hass, payload=None, instance=None -): + namespace: str, + name: str, + endpoint: str, + service_not_called: str, + hass: HomeAssistant, + payload: dict[str, Any] | None = None, + instance: str | None = None, +) -> None: """Assert an API request returns an ErrorResponse.""" request = get_new_request(namespace, name, endpoint) if payload: @@ -152,8 +161,12 @@ async def assert_request_fails( async def assert_power_controller_works( - endpoint, on_service, off_service, hass, timestamp -): + endpoint: str, + on_service: str, + off_service: str, + hass: HomeAssistant, + timestamp: str, +) -> None: """Assert PowerController API requests work.""" _, response = await assert_request_calls_service( "Alexa.PowerController", "TurnOn", endpoint, on_service, hass @@ -169,8 +182,12 @@ async def assert_power_controller_works( async def assert_scene_controller_works( - endpoint, activate_service, deactivate_service, hass, timestamp -): + endpoint: str, + activate_service: str, + deactivate_service: str, + hass: HomeAssistant, + timestamp: str, +) -> None: """Assert SceneController API requests work.""" _, response = await assert_request_calls_service( "Alexa.SceneController", @@ -196,7 +213,9 @@ async def assert_scene_controller_works( assert response["event"]["payload"]["timestamp"] == timestamp -async def reported_properties(hass, endpoint, return_full_response=False): +async def reported_properties( + hass: HomeAssistant, endpoint: str, return_full_response: bool = False +) -> ReportedProperties: """Use ReportState to get properties and return them. The result is a ReportedProperties instance, which has methods to make @@ -213,7 +232,7 @@ async def reported_properties(hass, endpoint, return_full_response=False): class ReportedProperties: """Class to help assert reported properties.""" - def __init__(self, properties): + def __init__(self, properties) -> None: """Initialize class.""" self.properties = properties diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index d502dce7d01..6ccf265dcdc 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -120,7 +120,9 @@ async def test_wrong_version(hass: HomeAssistant) -> None: await smart_home.async_handle_message(hass, get_default_config(hass), msg) -async def discovery_test(device, hass, expected_endpoints=1): +async def discovery_test( + device, hass: HomeAssistant, expected_endpoints: int = 1 +) -> dict[str, Any] | list[dict[str, Any]] | None: """Test alexa discovery request.""" request = get_new_request("Alexa.Discovery", "Discover") @@ -1979,7 +1981,7 @@ async def test_cover_position( "friendly_name": "Test cover range", "device_class": "blind", "supported_features": supported_features, - "position": position, + "current_position": position, }, ) appliance = await discovery_test(device, hass) @@ -2296,7 +2298,7 @@ async def test_cover_position_range( "friendly_name": "Test cover range", "device_class": "blind", "supported_features": 7, - "position": 30, + "current_position": 30, }, ) appliance = await discovery_test(device, hass) @@ -2601,8 +2603,15 @@ async def test_stop_valve( async def assert_percentage_changes( - hass, adjustments, namespace, name, endpoint, parameter, service, changed_parameter -): + hass: HomeAssistant, + adjustments, + namespace, + name, + endpoint, + parameter, + service, + changed_parameter, +) -> None: """Assert an API request making percentage changes works. AdjustPercentage, AdjustBrightness, etc. are examples of such requests. @@ -2616,8 +2625,15 @@ async def assert_percentage_changes( async def assert_range_changes( - hass, adjustments, namespace, name, endpoint, service, changed_parameter, instance -): + hass: HomeAssistant, + adjustments: list[tuple[int | str, int, bool]], + namespace: str, + name: str, + endpoint: str, + service: str, + changed_parameter: str | None, + instance: str, +) -> None: """Assert an API request making range changes works. AdjustRangeValue are examples of such requests. @@ -4658,7 +4674,7 @@ async def test_cover_semantics_position_and_tilt(hass: HomeAssistant) -> None: "friendly_name": "Test cover semantics", "device_class": "blind", "supported_features": 255, - "position": 30, + "current_position": 30, "tilt_position": 30, }, ) diff --git a/tests/components/alexa/test_smart_home_http.py b/tests/components/alexa/test_smart_home_http.py index 1c30c72e72c..20d9b30dda5 100644 --- a/tests/components/alexa/test_smart_home_http.py +++ b/tests/components/alexa/test_smart_home_http.py @@ -5,6 +5,7 @@ import json import logging from typing import Any +from aiohttp import ClientResponse import pytest from homeassistant.components.alexa import DOMAIN, smart_home @@ -17,7 +18,9 @@ from .test_common import get_new_request from tests.typing import ClientSessionGenerator -async def do_http_discovery(config, hass, hass_client): +async def do_http_discovery( + config: dict[str, Any], hass: HomeAssistant, hass_client: ClientSessionGenerator +) -> ClientResponse: """Submit a request to the Smart Home HTTP API.""" await async_setup_component(hass, DOMAIN, config) http_client = await hass_client() diff --git a/tests/components/amberelectric/conftest.py b/tests/components/amberelectric/conftest.py index 9de865fae6c..ce4073db71b 100644 --- a/tests/components/amberelectric/conftest.py +++ b/tests/components/amberelectric/conftest.py @@ -1,9 +1,9 @@ """Provide common Amber fixtures.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/amberelectric/test_binary_sensor.py b/tests/components/amberelectric/test_binary_sensor.py index 1e5eb572e07..2c1ee22b644 100644 --- a/tests/components/amberelectric/test_binary_sensor.py +++ b/tests/components/amberelectric/test_binary_sensor.py @@ -8,6 +8,7 @@ from unittest.mock import Mock, patch from amberelectric.model.channel import ChannelType from amberelectric.model.current_interval import CurrentInterval from amberelectric.model.interval import SpikeStatus +from amberelectric.model.tariff_information import TariffInformation from dateutil import parser import pytest @@ -111,7 +112,7 @@ async def setup_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: @pytest.mark.usefixtures("setup_no_spike") def test_no_spike_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 sensor = hass.states.get("binary_sensor.mock_title_price_spike") assert sensor assert sensor.state == "off" @@ -122,7 +123,7 @@ def test_no_spike_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_potential_spike") def test_potential_spike_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 sensor = hass.states.get("binary_sensor.mock_title_price_spike") assert sensor assert sensor.state == "off" @@ -133,9 +134,85 @@ def test_potential_spike_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_spike") def test_spike_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 sensor = hass.states.get("binary_sensor.mock_title_price_spike") assert sensor assert sensor.state == "on" assert sensor.attributes["icon"] == "mdi:power-plug-off" assert sensor.attributes["spike_status"] == "spike" + + +@pytest.fixture +async def setup_inactive_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock]: + """Set up general channel.""" + MockConfigEntry( + domain="amberelectric", + data={ + CONF_SITE_NAME: "mock_title", + CONF_API_TOKEN: MOCK_API_TOKEN, + CONF_SITE_ID: GENERAL_ONLY_SITE_ID, + }, + ).add_to_hass(hass) + + instance = Mock() + with patch( + "amberelectric.api.AmberApi.create", + return_value=instance, + ) as mock_update: + general_channel: list[CurrentInterval] = [ + generate_current_interval( + ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") + ), + ] + general_channel[0].tariff_information = TariffInformation(demandWindow=False) + instance.get_current_price = Mock(return_value=general_channel) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + yield mock_update.return_value + + +@pytest.fixture +async def setup_active_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock]: + """Set up general channel.""" + MockConfigEntry( + domain="amberelectric", + data={ + CONF_SITE_NAME: "mock_title", + CONF_API_TOKEN: MOCK_API_TOKEN, + CONF_SITE_ID: GENERAL_ONLY_SITE_ID, + }, + ).add_to_hass(hass) + + instance = Mock() + with patch( + "amberelectric.api.AmberApi.create", + return_value=instance, + ) as mock_update: + general_channel: list[CurrentInterval] = [ + generate_current_interval( + ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") + ), + ] + general_channel[0].tariff_information = TariffInformation(demandWindow=True) + instance.get_current_price = Mock(return_value=general_channel) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + yield mock_update.return_value + + +@pytest.mark.usefixtures("setup_inactive_demand_window") +def test_inactive_demand_window_sensor(hass: HomeAssistant) -> None: + """Testing the creation of the Amber demand_window sensor.""" + assert len(hass.states.async_all()) == 6 + sensor = hass.states.get("binary_sensor.mock_title_demand_window") + assert sensor + assert sensor.state == "off" + + +@pytest.mark.usefixtures("setup_active_demand_window") +def test_active_demand_window_sensor(hass: HomeAssistant) -> None: + """Testing the creation of the Amber demand_window sensor.""" + assert len(hass.states.async_all()) == 6 + sensor = hass.states.get("binary_sensor.mock_title_demand_window") + assert sensor + assert sensor.state == "on" diff --git a/tests/components/amberelectric/test_sensor.py b/tests/components/amberelectric/test_sensor.py index 3c0910f0afc..3a5626d14d5 100644 --- a/tests/components/amberelectric/test_sensor.py +++ b/tests/components/amberelectric/test_sensor.py @@ -105,7 +105,7 @@ async def setup_general_and_feed_in(hass: HomeAssistant) -> AsyncGenerator[Mock] async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) -> None: """Test the General Price sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 price = hass.states.get("sensor.mock_title_general_price") assert price assert price.state == "0.08" @@ -143,7 +143,7 @@ async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) -> @pytest.mark.usefixtures("setup_general_and_controlled_load") async def test_general_and_controlled_load_price_sensor(hass: HomeAssistant) -> None: """Test the Controlled Price sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_controlled_load_price") assert price assert price.state == "0.08" @@ -165,7 +165,7 @@ async def test_general_and_controlled_load_price_sensor(hass: HomeAssistant) -> @pytest.mark.usefixtures("setup_general_and_feed_in") async def test_general_and_feed_in_price_sensor(hass: HomeAssistant) -> None: """Test the Feed In sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_feed_in_price") assert price assert price.state == "-0.08" @@ -188,7 +188,7 @@ async def test_general_forecast_sensor( hass: HomeAssistant, setup_general: Mock ) -> None: """Test the General Forecast sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 price = hass.states.get("sensor.mock_title_general_forecast") assert price assert price.state == "0.09" @@ -230,7 +230,7 @@ async def test_general_forecast_sensor( @pytest.mark.usefixtures("setup_general_and_controlled_load") async def test_controlled_load_forecast_sensor(hass: HomeAssistant) -> None: """Test the Controlled Load Forecast sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_controlled_load_forecast") assert price assert price.state == "0.09" @@ -254,7 +254,7 @@ async def test_controlled_load_forecast_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_general_and_feed_in") async def test_feed_in_forecast_sensor(hass: HomeAssistant) -> None: """Test the Feed In Forecast sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_feed_in_forecast") assert price assert price.state == "-0.09" @@ -278,7 +278,7 @@ async def test_feed_in_forecast_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_general") def test_renewable_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 sensor = hass.states.get("sensor.mock_title_renewables") assert sensor assert sensor.state == "51" @@ -287,7 +287,7 @@ def test_renewable_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_general") def test_general_price_descriptor_descriptor_sensor(hass: HomeAssistant) -> None: """Test the General Price Descriptor sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 price = hass.states.get("sensor.mock_title_general_price_descriptor") assert price assert price.state == "extremely_low" @@ -298,7 +298,7 @@ def test_general_and_controlled_load_price_descriptor_sensor( hass: HomeAssistant, ) -> None: """Test the Controlled Price Descriptor sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_controlled_load_price_descriptor") assert price assert price.state == "extremely_low" @@ -307,7 +307,7 @@ def test_general_and_controlled_load_price_descriptor_sensor( @pytest.mark.usefixtures("setup_general_and_feed_in") def test_general_and_feed_in_price_descriptor_sensor(hass: HomeAssistant) -> None: """Test the Feed In Price Descriptor sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_feed_in_price_descriptor") assert price assert price.state == "extremely_low" diff --git a/tests/components/ambient_network/conftest.py b/tests/components/ambient_network/conftest.py index 2900f8ae5fe..9fc001252a0 100644 --- a/tests/components/ambient_network/conftest.py +++ b/tests/components/ambient_network/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the Ambient Weather Network integration tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch from aioambient import OpenAPI import pytest -from typing_extensions import Generator from homeassistant.components import ambient_network from homeassistant.core import HomeAssistant diff --git a/tests/components/ambient_station/conftest.py b/tests/components/ambient_station/conftest.py index e4f067108a5..160c05ad996 100644 --- a/tests/components/ambient_station/conftest.py +++ b/tests/components/ambient_station/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for Ambient PWS.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.ambient_station.const import CONF_APP_KEY, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/ambient_station/test_config_flow.py b/tests/components/ambient_station/test_config_flow.py index 19ae9828c22..e4c8efabc20 100644 --- a/tests/components/ambient_station/test_config_flow.py +++ b/tests/components/ambient_station/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from aioambient.errors import AmbientError import pytest -from homeassistant.components.ambient_station import CONF_APP_KEY, DOMAIN +from homeassistant.components.ambient_station.const import CONF_APP_KEY, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant diff --git a/tests/components/ambient_station/test_diagnostics.py b/tests/components/ambient_station/test_diagnostics.py index 05161ba32cd..82db72eb9ca 100644 --- a/tests/components/ambient_station/test_diagnostics.py +++ b/tests/components/ambient_station/test_diagnostics.py @@ -1,6 +1,7 @@ """Test Ambient PWS diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.ambient_station import AmbientStationConfigEntry from homeassistant.core import HomeAssistant @@ -20,7 +21,6 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" ambient = config_entry.runtime_data ambient.stations = data_station - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/analytics/test_analytics.py b/tests/components/analytics/test_analytics.py index 60882cda874..28272cd8866 100644 --- a/tests/components/analytics/test_analytics.py +++ b/tests/components/analytics/test_analytics.py @@ -19,7 +19,6 @@ from homeassistant.components.analytics.const import ( ATTR_STATISTICS, ATTR_USAGE, ) -from homeassistant.components.recorder import Recorder from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -36,7 +35,7 @@ MOCK_VERSION_NIGHTLY = "1970.1.0.dev19700101" @pytest.fixture(autouse=True) -def uuid_mock() -> Generator[Any, Any, None]: +def uuid_mock() -> Generator[None]: """Mock the UUID.""" with patch("uuid.UUID.hex", new_callable=PropertyMock) as hex_mock: hex_mock.return_value = MOCK_UUID @@ -44,7 +43,7 @@ def uuid_mock() -> Generator[Any, Any, None]: @pytest.fixture(autouse=True) -def ha_version_mock() -> Generator[Any, Any, None]: +def ha_version_mock() -> Generator[None]: """Mock the core version.""" with patch( "homeassistant.components.analytics.analytics.HA_VERSION", @@ -54,7 +53,7 @@ def ha_version_mock() -> Generator[Any, Any, None]: @pytest.fixture -def installation_type_mock() -> Generator[Any, Any, None]: +def installation_type_mock() -> Generator[None]: """Mock the async_get_system_info.""" with patch( "homeassistant.components.analytics.analytics.async_get_system_info", @@ -160,11 +159,11 @@ async def test_failed_to_send_raises( assert "Error sending analytics" in caplog.text +@pytest.mark.usefixtures("installation_type_mock") async def test_send_base( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -231,11 +230,11 @@ async def test_send_base_with_supervisor( assert snapshot == submitted_data +@pytest.mark.usefixtures("installation_type_mock") async def test_send_usage( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send usage preferences are defined.""" @@ -331,11 +330,11 @@ async def test_send_usage_with_supervisor( assert snapshot == submitted_data +@pytest.mark.usefixtures("installation_type_mock") async def test_send_statistics( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send statistics preferences are defined.""" @@ -382,12 +381,11 @@ async def test_send_statistics_one_integration_fails( assert post_call[2]["integration_count"] == 0 -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") async def test_send_statistics_disabled_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send statistics with disabled integration.""" @@ -420,12 +418,11 @@ async def test_send_statistics_disabled_integration( assert snapshot == submitted_data -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") async def test_send_statistics_ignored_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send statistics with ignored integration.""" @@ -566,12 +563,11 @@ async def test_reusing_uuid( assert analytics.uuid == "NOT_MOCK_UUID" -@pytest.mark.usefixtures("enable_custom_integrations") +@pytest.mark.usefixtures("enable_custom_integrations", "installation_type_mock") async def test_custom_integrations( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test sending custom integrations.""" @@ -651,12 +647,11 @@ async def test_nightly_endpoint( assert str(payload[1]) == ANALYTICS_ENDPOINT_URL -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") async def test_send_with_no_energy( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -688,12 +683,11 @@ async def test_send_with_no_energy( assert snapshot == submitted_data -@pytest.mark.usefixtures("recorder_mock", "mock_hass_config") +@pytest.mark.usefixtures("recorder_mock", "installation_type_mock", "mock_hass_config") async def test_send_with_no_energy_config( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -720,12 +714,11 @@ async def test_send_with_no_energy_config( ) -@pytest.mark.usefixtures("recorder_mock", "mock_hass_config") +@pytest.mark.usefixtures("recorder_mock", "installation_type_mock", "mock_hass_config") async def test_send_with_energy_config( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -752,12 +745,11 @@ async def test_send_with_energy_config( ) -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") async def test_send_usage_with_certificate( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send usage preferences with certificate.""" @@ -779,12 +771,11 @@ async def test_send_usage_with_certificate( assert snapshot == submitted_data +@pytest.mark.usefixtures("recorder_mock", "installation_type_mock") async def test_send_with_recorder( - recorder_mock: Recorder, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test recorder information.""" @@ -849,11 +840,11 @@ async def test_timeout_while_sending( assert "Timeout sending analytics" in caplog.text +@pytest.mark.usefixtures("installation_type_mock") async def test_not_check_config_entries_if_yaml( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test skip config entry check if defined in yaml.""" diff --git a/tests/components/analytics_insights/conftest.py b/tests/components/analytics_insights/conftest.py index 75d47c41f4e..fcdda95e9bd 100644 --- a/tests/components/analytics_insights/conftest.py +++ b/tests/components/analytics_insights/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the Homeassistant Analytics tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from python_homeassistant_analytics import CurrentAnalytics from python_homeassistant_analytics.models import CustomIntegration, Integration -from typing_extensions import Generator from homeassistant.components.analytics_insights.const import ( CONF_TRACKED_CUSTOM_INTEGRATIONS, diff --git a/tests/components/androidtv/conftest.py b/tests/components/androidtv/conftest.py index befb9db7a8c..a075ed66079 100644 --- a/tests/components/androidtv/conftest.py +++ b/tests/components/androidtv/conftest.py @@ -1,9 +1,9 @@ """Fixtures for the Android TV integration tests.""" +from collections.abc import Generator from unittest.mock import Mock, patch import pytest -from typing_extensions import Generator from . import patchers diff --git a/tests/components/androidtv/patchers.py b/tests/components/androidtv/patchers.py index 90a13523ebe..500b9e75cb3 100644 --- a/tests/components/androidtv/patchers.py +++ b/tests/components/androidtv/patchers.py @@ -1,5 +1,6 @@ """Define patches used for androidtv tests.""" +from typing import Any from unittest.mock import patch from androidtv.adb_manager.adb_manager_async import DeviceAsync @@ -25,7 +26,7 @@ PROPS_DEV_MAC = "ether ab:cd:ef:gh:ij:kl brd" class AdbDeviceTcpAsyncFake: """A fake of the `adb_shell.adb_device_async.AdbDeviceTcpAsync` class.""" - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a fake `adb_shell.adb_device_async.AdbDeviceTcpAsync` instance.""" self.available = False @@ -37,7 +38,7 @@ class AdbDeviceTcpAsyncFake: """Try to connect to a device.""" raise NotImplementedError - async def shell(self, cmd, *args, **kwargs): + async def shell(self, cmd, *args, **kwargs) -> bytes | str | None: """Send an ADB shell command.""" return None diff --git a/tests/components/androidtv/test_config_flow.py b/tests/components/androidtv/test_config_flow.py index e2b5207c590..b73fee9fb10 100644 --- a/tests/components/androidtv/test_config_flow.py +++ b/tests/components/androidtv/test_config_flow.py @@ -73,7 +73,7 @@ CONNECT_METHOD = ( class MockConfigDevice: """Mock class to emulate Android device.""" - def __init__(self, eth_mac=ETH_MAC, wifi_mac=None): + def __init__(self, eth_mac=ETH_MAC, wifi_mac=None) -> None: """Initialize a fake device to test config flow.""" self.available = True self.device_properties = {PROP_ETHMAC: eth_mac, PROP_WIFIMAC: wifi_mac} diff --git a/tests/components/androidtv_remote/conftest.py b/tests/components/androidtv_remote/conftest.py index aa5583927d1..05e40991ff9 100644 --- a/tests/components/androidtv_remote/conftest.py +++ b/tests/components/androidtv_remote/conftest.py @@ -1,10 +1,9 @@ """Fixtures for the Android TV Remote integration tests.""" -from collections.abc import Callable +from collections.abc import Callable, Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.androidtv_remote.const import DOMAIN from homeassistant.config_entries import ConfigEntryState diff --git a/tests/components/androidtv_remote/test_media_player.py b/tests/components/androidtv_remote/test_media_player.py index ad7c049e32f..46678f18fd3 100644 --- a/tests/components/androidtv_remote/test_media_player.py +++ b/tests/components/androidtv_remote/test_media_player.py @@ -345,7 +345,7 @@ async def test_browse_media( ) response = await client.receive_json() assert response["success"] - assert { + assert response["result"] == { "title": "Applications", "media_class": "directory", "media_content_type": "apps", @@ -377,7 +377,7 @@ async def test_browse_media( "thumbnail": "", }, ], - } == response["result"] + } async def test_media_player_connection_closed( diff --git a/tests/components/anthropic/__init__.py b/tests/components/anthropic/__init__.py new file mode 100644 index 00000000000..99d7a5785a8 --- /dev/null +++ b/tests/components/anthropic/__init__.py @@ -0,0 +1 @@ +"""Tests for the Anthropic integration.""" diff --git a/tests/components/anthropic/conftest.py b/tests/components/anthropic/conftest.py new file mode 100644 index 00000000000..ce6b98c480c --- /dev/null +++ b/tests/components/anthropic/conftest.py @@ -0,0 +1,57 @@ +"""Tests helpers.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.core import HomeAssistant +from homeassistant.helpers import llm +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Mock a config entry.""" + entry = MockConfigEntry( + title="Claude", + domain="anthropic", + data={ + "api_key": "bla", + }, + ) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: + """Mock a config entry with assist.""" + hass.config_entries.async_update_entry( + mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} + ) + return mock_config_entry + + +@pytest.fixture +async def mock_init_component( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> AsyncGenerator[None]: + """Initialize integration.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ): + assert await async_setup_component(hass, "anthropic", {}) + await hass.async_block_till_done() + yield + + +@pytest.fixture(autouse=True) +async def setup_ha(hass: HomeAssistant) -> None: + """Set up Home Assistant.""" + assert await async_setup_component(hass, "homeassistant", {}) diff --git a/tests/components/anthropic/snapshots/test_conversation.ambr b/tests/components/anthropic/snapshots/test_conversation.ambr new file mode 100644 index 00000000000..e4dd7cd00bb --- /dev/null +++ b/tests/components/anthropic/snapshots/test_conversation.ambr @@ -0,0 +1,34 @@ +# serializer version: 1 +# name: test_unknown_hass_api + dict({ + 'conversation_id': None, + 'response': IntentResponse( + card=dict({ + }), + error_code=, + failed_results=list([ + ]), + intent=None, + intent_targets=list([ + ]), + language='en', + matched_states=list([ + ]), + reprompt=dict({ + }), + response_type=, + speech=dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Error preparing LLM API: API non-existing not found', + }), + }), + speech_slots=dict({ + }), + success_results=list([ + ]), + unmatched_states=list([ + ]), + ), + }) +# --- diff --git a/tests/components/anthropic/test_config_flow.py b/tests/components/anthropic/test_config_flow.py new file mode 100644 index 00000000000..df27352b7b2 --- /dev/null +++ b/tests/components/anthropic/test_config_flow.py @@ -0,0 +1,239 @@ +"""Test the Anthropic config flow.""" + +from unittest.mock import AsyncMock, patch + +from anthropic import ( + APIConnectionError, + APIResponseValidationError, + APITimeoutError, + AuthenticationError, + BadRequestError, + InternalServerError, +) +from httpx import URL, Request, Response +import pytest + +from homeassistant import config_entries +from homeassistant.components.anthropic.config_flow import RECOMMENDED_OPTIONS +from homeassistant.components.anthropic.const import ( + CONF_CHAT_MODEL, + CONF_MAX_TOKENS, + CONF_PROMPT, + CONF_RECOMMENDED, + CONF_TEMPERATURE, + DOMAIN, + RECOMMENDED_CHAT_MODEL, + RECOMMENDED_MAX_TOKENS, +) +from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form(hass: HomeAssistant) -> None: + """Test we get the form.""" + # Pretend we already set up a config entry. + hass.config.components.add("anthropic") + MockConfigEntry( + domain=DOMAIN, + state=config_entries.ConfigEntryState.LOADED, + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] is None + + with ( + patch( + "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + ), + patch( + "homeassistant.components.anthropic.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "api_key": "bla", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + "api_key": "bla", + } + assert result2["options"] == RECOMMENDED_OPTIONS + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test the options form.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + "prompt": "Speak like a pirate", + "max_tokens": 200, + }, + ) + await hass.async_block_till_done() + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"]["prompt"] == "Speak like a pirate" + assert options["data"]["max_tokens"] == 200 + assert options["data"][CONF_CHAT_MODEL] == RECOMMENDED_CHAT_MODEL + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (APIConnectionError(request=None), "cannot_connect"), + (APITimeoutError(request=None), "timeout_connect"), + ( + BadRequestError( + message="Your credit balance is too low to access the Claude API. Please go to Plans & Billing to upgrade or purchase credits.", + response=Response( + status_code=400, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "invalid_request_error"}}, + ), + "invalid_request_error", + ), + ( + AuthenticationError( + message="invalid x-api-key", + response=Response( + status_code=401, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "authentication_error"}}, + ), + "authentication_error", + ), + ( + InternalServerError( + message=None, + response=Response( + status_code=500, + request=Request(method="POST", url=URL()), + ), + body=None, + ), + "unknown", + ), + ( + APIResponseValidationError( + response=Response( + status_code=200, + request=Request(method="POST", url=URL()), + ), + body=None, + ), + "unknown", + ), + ], +) +async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> None: + """Test we handle invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=side_effect, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "api_key": "bla", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": error} + + +@pytest.mark.parametrize( + ("current_options", "new_options", "expected_options"), + [ + ( + { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: "none", + CONF_PROMPT: "bla", + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + }, + ), + ( + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + }, + { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: "assist", + CONF_PROMPT: "", + }, + { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: "assist", + CONF_PROMPT: "", + }, + ), + ], +) +async def test_options_switching( + hass: HomeAssistant, + mock_config_entry, + mock_init_component, + current_options, + new_options, + expected_options, +) -> None: + """Test the options form.""" + hass.config_entries.async_update_entry(mock_config_entry, options=current_options) + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + if current_options.get(CONF_RECOMMENDED) != new_options.get(CONF_RECOMMENDED): + options_flow = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + **current_options, + CONF_RECOMMENDED: new_options[CONF_RECOMMENDED], + }, + ) + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + new_options, + ) + await hass.async_block_till_done() + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"] == expected_options diff --git a/tests/components/anthropic/test_conversation.py b/tests/components/anthropic/test_conversation.py new file mode 100644 index 00000000000..65ede877281 --- /dev/null +++ b/tests/components/anthropic/test_conversation.py @@ -0,0 +1,487 @@ +"""Tests for the Anthropic integration.""" + +from unittest.mock import AsyncMock, Mock, patch + +from anthropic import RateLimitError +from anthropic.types import Message, TextBlock, ToolUseBlock, Usage +from freezegun import freeze_time +from httpx import URL, Request, Response +from syrupy.assertion import SnapshotAssertion +import voluptuous as vol + +from homeassistant.components import conversation +from homeassistant.components.conversation import trace +from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.core import Context, HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import intent, llm +from homeassistant.setup import async_setup_component +from homeassistant.util import ulid + +from tests.common import MockConfigEntry + + +async def test_entity( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test entity properties.""" + state = hass.states.get("conversation.claude") + assert state + assert state.attributes["supported_features"] == 0 + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "assist", + }, + ) + with patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ): + await hass.config_entries.async_reload(mock_config_entry.entry_id) + + state = hass.states.get("conversation.claude") + assert state + assert ( + state.attributes["supported_features"] + == conversation.ConversationEntityFeature.CONTROL + ) + + +async def test_error_handling( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component +) -> None: + """Test that the default prompt works.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=RateLimitError( + message=None, + response=Response( + status_code=429, request=Request(method="POST", url=URL()) + ), + body=None, + ), + ): + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert result.response.error_code == "unknown", result + + +async def test_template_error( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that template error handling works.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + "prompt": "talk like a {% if True %}smarthome{% else %}pirate please.", + }, + ) + with patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert result.response.error_code == "unknown", result + + +async def test_template_variables( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that template variables work.""" + context = Context(user_id="12345") + mock_user = Mock() + mock_user.id = "12345" + mock_user.name = "Test User" + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + "prompt": ( + "The user name is {{ user_name }}. " + "The user id is {{ llm_context.context.user_id }}." + ), + }, + ) + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ) as mock_create, + patch("homeassistant.auth.AuthManager.async_get_user", return_value=mock_user), + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + result = await conversation.async_converse( + hass, "hello", None, context, agent_id="conversation.claude" + ) + + assert ( + result.response.response_type == intent.IntentResponseType.ACTION_DONE + ), result + assert "The user name is Test User." in mock_create.mock_calls[1][2]["system"] + assert "The user id is 12345." in mock_create.mock_calls[1][2]["system"] + + +async def test_conversation_agent( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test Anthropic Agent.""" + agent = conversation.agent_manager.async_get_agent(hass, "conversation.claude") + assert agent.supported_languages == "*" + + +@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") +async def test_function_call( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test function call from the assistant.""" + agent_id = "conversation.claude" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.return_value = "Test response" + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + for content in message["content"]: + if not isinstance(content, str) and content["type"] == "tool_use": + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock( + type="text", + text="I have successfully called the function", + ) + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock(type="text", text="Certainly, calling it now!"), + ToolUseBlock( + type="tool_use", + id="toolu_0123456789AbCdEfGhIjKlM", + name="test_tool", + input={"param1": "test_value"}, + ), + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="tool_use", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create, + freeze_time("2024-06-03 23:00:00"), + ): + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert "Today's date is 2024-06-03." in mock_create.mock_calls[1][2]["system"] + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert mock_create.mock_calls[1][2]["messages"][2] == { + "role": "user", + "content": [ + { + "content": '"Test response"', + "tool_use_id": "toolu_0123456789AbCdEfGhIjKlM", + "type": "tool_result", + } + ], + } + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args={"param1": "test_value"}, + ), + llm.LLMContext( + platform="anthropic", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + # Test Conversation tracing + traces = trace.async_get_traces() + assert traces + last_trace = traces[-1].as_dict() + trace_events = last_trace.get("events", []) + assert [event["event_type"] for event in trace_events] == [ + trace.ConversationTraceEventType.ASYNC_PROCESS, + trace.ConversationTraceEventType.AGENT_DETAIL, + trace.ConversationTraceEventType.TOOL_CALL, + ] + # AGENT_DETAIL event contains the raw prompt passed to the model + detail_event = trace_events[1] + assert "Answer in plain text" in detail_event["data"]["system"] + assert "Today's date is 2024-06-03." in trace_events[1]["data"]["system"] + + # Call it again, make sure we have updated prompt + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create, + freeze_time("2024-06-04 23:00:00"), + ): + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert "Today's date is 2024-06-04." in mock_create.mock_calls[1][2]["system"] + # Test old assert message not updated + assert "Today's date is 2024-06-03." in trace_events[1]["data"]["system"] + + +@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") +async def test_function_exception( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test function call with exception.""" + agent_id = "conversation.claude" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.side_effect = HomeAssistantError("Test tool exception") + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + for content in message["content"]: + if not isinstance(content, str) and content["type"] == "tool_use": + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock( + type="text", + text="There was an error calling the function", + ) + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock(type="text", text="Certainly, calling it now!"), + ToolUseBlock( + type="tool_use", + id="toolu_0123456789AbCdEfGhIjKlM", + name="test_tool", + input={"param1": "test_value"}, + ), + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="tool_use", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create: + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert mock_create.mock_calls[1][2]["messages"][2] == { + "role": "user", + "content": [ + { + "content": '{"error": "HomeAssistantError", "error_text": "Test tool exception"}', + "tool_use_id": "toolu_0123456789AbCdEfGhIjKlM", + "type": "tool_result", + } + ], + } + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args={"param1": "test_value"}, + ), + llm.LLMContext( + platform="anthropic", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + +async def test_assist_api_tools_conversion( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test that we are able to convert actual tools from Assist API.""" + for component in ( + "intent", + "todo", + "light", + "shopping_list", + "humidifier", + "climate", + "media_player", + "vacuum", + "cover", + "weather", + ): + assert await async_setup_component(hass, component, {}) + + agent_id = "conversation.claude" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + return_value=Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[TextBlock(type="text", text="Hello, how can I help you?")], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ), + ) as mock_create: + await conversation.async_converse( + hass, "hello", None, Context(), agent_id=agent_id + ) + + tools = mock_create.mock_calls[0][2]["tools"] + assert tools + + +async def test_unknown_hass_api( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + mock_init_component, +) -> None: + """Test when we reference an API that no longer exists.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "non-existing", + }, + ) + + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + assert result == snapshot + + +@patch("anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock) +async def test_conversation_id( + mock_create, + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test conversation ID is honored.""" + result = await conversation.async_converse( + hass, "hello", None, None, agent_id="conversation.claude" + ) + + conversation_id = result.conversation_id + + result = await conversation.async_converse( + hass, "hello", conversation_id, None, agent_id="conversation.claude" + ) + + assert result.conversation_id == conversation_id + + unknown_id = ulid.ulid() + + result = await conversation.async_converse( + hass, "hello", unknown_id, None, agent_id="conversation.claude" + ) + + assert result.conversation_id != unknown_id + + result = await conversation.async_converse( + hass, "hello", "koala", None, agent_id="conversation.claude" + ) + + assert result.conversation_id == "koala" diff --git a/tests/components/anthropic/test_init.py b/tests/components/anthropic/test_init.py new file mode 100644 index 00000000000..ee87bb708d0 --- /dev/null +++ b/tests/components/anthropic/test_init.py @@ -0,0 +1,64 @@ +"""Tests for the Anthropic integration.""" + +from unittest.mock import AsyncMock, patch + +from anthropic import ( + APIConnectionError, + APITimeoutError, + AuthenticationError, + BadRequestError, +) +from httpx import URL, Request, Response +import pytest + +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (APIConnectionError(request=None), "Connection error"), + (APITimeoutError(request=None), "Request timed out"), + ( + BadRequestError( + message="Your credit balance is too low to access the Claude API. Please go to Plans & Billing to upgrade or purchase credits.", + response=Response( + status_code=400, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "invalid_request_error"}}, + ), + "anthropic integration not ready yet: Your credit balance is too low to access the Claude API", + ), + ( + AuthenticationError( + message="invalid x-api-key", + response=Response( + status_code=401, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "authentication_error"}}, + ), + "Invalid API key", + ), + ], +) +async def test_init_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, + side_effect, + error, +) -> None: + """Test initialization errors.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=side_effect, + ): + assert await async_setup_component(hass, "anthropic", {}) + await hass.async_block_till_done() + assert error in caplog.text diff --git a/tests/components/aosmith/conftest.py b/tests/components/aosmith/conftest.py index d67ae1ea627..7efbe0c58b2 100644 --- a/tests/components/aosmith/conftest.py +++ b/tests/components/aosmith/conftest.py @@ -1,5 +1,6 @@ """Common fixtures for the A. O. Smith tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from py_aosmith import AOSmithAPIClient @@ -14,7 +15,6 @@ from py_aosmith.models import ( SupportedOperationModeInfo, ) import pytest -from typing_extensions import Generator from homeassistant.components.aosmith.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/aosmith/snapshots/test_device.ambr b/tests/components/aosmith/snapshots/test_device.ambr index f6e2625afdb..dec33a92fe2 100644 --- a/tests/components/aosmith/snapshots/test_device.ambr +++ b/tests/components/aosmith/snapshots/test_device.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': 'A. O. Smith', 'model': 'HPTS-50 200 202172000', + 'model_id': None, 'name': 'My water heater', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'serial', 'suggested_area': 'Basement', 'sw_version': '2.14', diff --git a/tests/components/aosmith/test_sensor.py b/tests/components/aosmith/test_sensor.py index a77e4e4576d..1dc632b5e84 100644 --- a/tests/components/aosmith/test_sensor.py +++ b/tests/components/aosmith/test_sensor.py @@ -1,10 +1,10 @@ """Tests for the sensor platform of the A. O. Smith integration.""" +from collections.abc import AsyncGenerator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import AsyncGenerator from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -14,7 +14,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.fixture(autouse=True) -async def platforms() -> AsyncGenerator[list[str]]: +async def platforms() -> AsyncGenerator[None]: """Return the platforms to be loaded for this test.""" with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.SENSOR]): yield diff --git a/tests/components/aosmith/test_water_heater.py b/tests/components/aosmith/test_water_heater.py index ab4a4a33bca..69ad8004fc2 100644 --- a/tests/components/aosmith/test_water_heater.py +++ b/tests/components/aosmith/test_water_heater.py @@ -1,11 +1,11 @@ """Tests for the water heater platform of the A. O. Smith integration.""" +from collections.abc import AsyncGenerator from unittest.mock import MagicMock, patch from py_aosmith.models import OperationMode import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import AsyncGenerator from homeassistant.components.water_heater import ( ATTR_AWAY_MODE, @@ -29,7 +29,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.fixture(autouse=True) -async def platforms() -> AsyncGenerator[list[str]]: +async def platforms() -> AsyncGenerator[None]: """Return the platforms to be loaded for this test.""" with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.WATER_HEATER]): yield diff --git a/tests/components/apache_kafka/test_init.py b/tests/components/apache_kafka/test_init.py index 2b702046054..cffe08ffd4a 100644 --- a/tests/components/apache_kafka/test_init.py +++ b/tests/components/apache_kafka/test_init.py @@ -3,8 +3,9 @@ from __future__ import annotations from asyncio import AbstractEventLoop -from collections.abc import Callable +from collections.abc import Callable, Generator from dataclasses import dataclass +from typing import Any from unittest.mock import patch import pytest @@ -41,7 +42,7 @@ class MockKafkaClient: @pytest.fixture(name="mock_client") -def mock_client_fixture(): +def mock_client_fixture() -> Generator[MockKafkaClient]: """Mock the apache kafka client.""" with ( patch(f"{PRODUCER_PATH}.start") as start, @@ -89,7 +90,7 @@ async def test_full_config(hass: HomeAssistant, mock_client: MockKafkaClient) -> mock_client.start.assert_called_once() -async def _setup(hass, filter_config): +async def _setup(hass: HomeAssistant, filter_config: dict[str, Any]) -> None: """Shared set up for filtering tests.""" config = {apache_kafka.DOMAIN: {"filter": filter_config}} config[apache_kafka.DOMAIN].update(MIN_CONFIG) @@ -98,7 +99,9 @@ async def _setup(hass, filter_config): await hass.async_block_till_done() -async def _run_filter_tests(hass, tests, mock_client): +async def _run_filter_tests( + hass: HomeAssistant, tests: list[FilterTest], mock_client: MockKafkaClient +) -> None: """Run a series of filter tests on apache kafka.""" for test in tests: hass.states.async_set(test.id, STATE_ON) diff --git a/tests/components/apcupsd/test_binary_sensor.py b/tests/components/apcupsd/test_binary_sensor.py index 7616a960b21..02351109603 100644 --- a/tests/components/apcupsd/test_binary_sensor.py +++ b/tests/components/apcupsd/test_binary_sensor.py @@ -1,5 +1,7 @@ """Test binary sensors of APCUPSd integration.""" +import pytest + from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util import slugify @@ -31,3 +33,22 @@ async def test_no_binary_sensor(hass: HomeAssistant) -> None: device_slug = slugify(MOCK_STATUS["UPSNAME"]) state = hass.states.get(f"binary_sensor.{device_slug}_online_status") assert state is None + + +@pytest.mark.parametrize( + ("override", "expected"), + [ + ("0x008", "on"), + ("0x02040010 Status Flag", "off"), + ], +) +async def test_statflag(hass: HomeAssistant, override: str, expected: str) -> None: + """Test binary sensor for different STATFLAG values.""" + status = MOCK_STATUS.copy() + status["STATFLAG"] = override + await async_init_integration(hass, status=status) + + device_slug = slugify(MOCK_STATUS["UPSNAME"]) + assert ( + hass.states.get(f"binary_sensor.{device_slug}_online_status").state == expected + ) diff --git a/tests/components/apcupsd/test_sensor.py b/tests/components/apcupsd/test_sensor.py index 0c7d174a5e8..0fe7f12ad27 100644 --- a/tests/components/apcupsd/test_sensor.py +++ b/tests/components/apcupsd/test_sensor.py @@ -15,6 +15,7 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, STATE_UNAVAILABLE, + STATE_UNKNOWN, UnitOfElectricPotential, UnitOfPower, UnitOfTime, @@ -25,7 +26,7 @@ from homeassistant.setup import async_setup_component from homeassistant.util import slugify from homeassistant.util.dt import utcnow -from . import MOCK_STATUS, async_init_integration +from . import MOCK_MINIMAL_STATUS, MOCK_STATUS, async_init_integration from tests.common import async_fire_time_changed @@ -237,3 +238,34 @@ async def test_multiple_manual_update_entity(hass: HomeAssistant) -> None: blocking=True, ) assert mock_request_status.call_count == 1 + + +async def test_sensor_unknown(hass: HomeAssistant) -> None: + """Test if our integration can properly certain sensors as unknown when it becomes so.""" + await async_init_integration(hass, status=MOCK_MINIMAL_STATUS) + + assert hass.states.get("sensor.mode").state == MOCK_MINIMAL_STATUS["UPSMODE"] + # Last self test sensor should be added even if our status does not report it initially (it is + # a sensor that appears only after a periodical or manual self test is performed). + assert hass.states.get("sensor.last_self_test") is not None + assert hass.states.get("sensor.last_self_test").state == STATE_UNKNOWN + + # Simulate an event (a self test) such that "LASTSTEST" field is being reported, the state of + # the sensor should be properly updated with the corresponding value. + with patch("aioapcaccess.request_status") as mock_request_status: + mock_request_status.return_value = MOCK_MINIMAL_STATUS | { + "LASTSTEST": "1970-01-01 00:00:00 0000" + } + future = utcnow() + timedelta(minutes=2) + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + assert hass.states.get("sensor.last_self_test").state == "1970-01-01 00:00:00 0000" + + # Simulate another event (e.g., daemon restart) such that "LASTSTEST" is no longer reported. + with patch("aioapcaccess.request_status") as mock_request_status: + mock_request_status.return_value = MOCK_MINIMAL_STATUS + future = utcnow() + timedelta(minutes=2) + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + # The state should become unknown again. + assert hass.states.get("sensor.last_self_test").state == STATE_UNKNOWN diff --git a/tests/components/api/test_init.py b/tests/components/api/test_init.py index a1453315dbf..abce262fd12 100644 --- a/tests/components/api/test_init.py +++ b/tests/components/api/test_init.py @@ -3,6 +3,7 @@ import asyncio from http import HTTPStatus import json +from typing import Any from unittest.mock import patch from aiohttp import ServerDisconnectedError, web @@ -355,6 +356,67 @@ async def test_api_call_service_with_data( assert state["attributes"] == {"data": 1} +SERVICE_DICT = {"changed_states": [], "service_response": {"foo": "bar"}} +RESP_REQUIRED = { + "message": ( + "Service call requires responses but caller did not ask for " + "responses. Add ?return_response to query parameters." + ) +} +RESP_UNSUPPORTED = { + "message": "Service does not support responses. Remove return_response from request." +} + + +@pytest.mark.parametrize( + ( + "supports_response", + "requested_response", + "expected_number_of_service_calls", + "expected_status", + "expected_response", + ), + [ + (ha.SupportsResponse.ONLY, True, 1, HTTPStatus.OK, SERVICE_DICT), + (ha.SupportsResponse.ONLY, False, 0, HTTPStatus.BAD_REQUEST, RESP_REQUIRED), + (ha.SupportsResponse.OPTIONAL, True, 1, HTTPStatus.OK, SERVICE_DICT), + (ha.SupportsResponse.OPTIONAL, False, 1, HTTPStatus.OK, []), + (ha.SupportsResponse.NONE, True, 0, HTTPStatus.BAD_REQUEST, RESP_UNSUPPORTED), + (ha.SupportsResponse.NONE, False, 1, HTTPStatus.OK, []), + ], +) +async def test_api_call_service_returns_response_requested_response( + hass: HomeAssistant, + mock_api_client: TestClient, + supports_response: ha.SupportsResponse, + requested_response: bool, + expected_number_of_service_calls: int, + expected_status: int, + expected_response: Any, +) -> None: + """Test if the API allows us to call a service.""" + test_value = [] + + @ha.callback + def listener(service_call): + """Record that our service got called.""" + test_value.append(1) + return {"foo": "bar"} + + hass.services.async_register( + "test_domain", "test_service", listener, supports_response=supports_response + ) + + resp = await mock_api_client.post( + "/api/services/test_domain/test_service" + + ("?return_response" if requested_response else "") + ) + assert resp.status == expected_status + await hass.async_block_till_done() + assert len(test_value) == expected_number_of_service_calls + assert await resp.json() == expected_response + + async def test_api_call_service_client_closed( hass: HomeAssistant, mock_api_client: TestClient ) -> None: @@ -770,4 +832,43 @@ async def test_api_core_state(hass: HomeAssistant, mock_api_client: TestClient) resp = await mock_api_client.get("/api/core/state") assert resp.status == HTTPStatus.OK json = await resp.json() - assert json["state"] == "RUNNING" + assert json == { + "state": "RUNNING", + "recorder_state": {"migration_in_progress": False, "migration_is_live": False}, + } + + +@pytest.mark.parametrize( + ("migration_in_progress", "migration_is_live"), + [ + (False, False), + (False, True), + (True, False), + (True, True), + ], +) +async def test_api_core_state_recorder_migrating( + hass: HomeAssistant, + mock_api_client: TestClient, + migration_in_progress: bool, + migration_is_live: bool, +) -> None: + """Test getting core status.""" + with ( + patch( + "homeassistant.helpers.recorder.async_migration_in_progress", + return_value=migration_in_progress, + ), + patch( + "homeassistant.helpers.recorder.async_migration_is_live", + return_value=migration_is_live, + ), + ): + resp = await mock_api_client.get("/api/core/state") + assert resp.status == HTTPStatus.OK + json = await resp.json() + expected_recorder_state = { + "migration_in_progress": migration_in_progress, + "migration_is_live": migration_is_live, + } + assert json == {"state": "RUNNING", "recorder_state": expected_recorder_state} diff --git a/tests/components/apple_tv/common.py b/tests/components/apple_tv/common.py index ddb8c1348d9..8a81536c792 100644 --- a/tests/components/apple_tv/common.py +++ b/tests/components/apple_tv/common.py @@ -1,5 +1,7 @@ """Test code shared between test files.""" +from typing import Any + from pyatv import conf, const, interface from pyatv.const import Protocol @@ -7,7 +9,7 @@ from pyatv.const import Protocol class MockPairingHandler(interface.PairingHandler): """Mock for PairingHandler in pyatv.""" - def __init__(self, *args): + def __init__(self, *args: Any) -> None: """Initialize a new MockPairingHandler.""" super().__init__(*args) self.pin_code = None diff --git a/tests/components/apple_tv/conftest.py b/tests/components/apple_tv/conftest.py index 36061924db5..78982a8d51c 100644 --- a/tests/components/apple_tv/conftest.py +++ b/tests/components/apple_tv/conftest.py @@ -1,12 +1,12 @@ """Fixtures for component.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pyatv import conf from pyatv.const import PairingRequirement, Protocol from pyatv.support import http import pytest -from typing_extensions import Generator from .common import MockPairingHandler, airplay_service, create_conf, mrp_service diff --git a/tests/components/apple_tv/test_config_flow.py b/tests/components/apple_tv/test_config_flow.py index b8f49e7c8f5..f37042a6f50 100644 --- a/tests/components/apple_tv/test_config_flow.py +++ b/tests/components/apple_tv/test_config_flow.py @@ -1,12 +1,12 @@ """Test config flow.""" +from collections.abc import Generator from ipaddress import IPv4Address, ip_address from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from pyatv import exceptions from pyatv.const import PairingRequirement, Protocol import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components import zeroconf diff --git a/tests/components/application_credentials/test_init.py b/tests/components/application_credentials/test_init.py index c427b1d07e0..d90084fa7c9 100644 --- a/tests/components/application_credentials/test_init.py +++ b/tests/components/application_credentials/test_init.py @@ -2,13 +2,12 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Generator import logging from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow from homeassistant.components.application_credentials import ( @@ -125,7 +124,12 @@ def config_flow_handler( class OAuthFixture: """Fixture to facilitate testing an OAuth flow.""" - def __init__(self, hass, hass_client, aioclient_mock): + def __init__( + self, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + ) -> None: """Initialize OAuthFixture.""" self.hass = hass self.hass_client = hass_client @@ -185,7 +189,7 @@ async def oauth_fixture( class Client: """Test client with helper methods for application credentials websocket.""" - def __init__(self, client): + def __init__(self, client) -> None: """Initialize Client.""" self.client = client self.id = 0 diff --git a/tests/components/apprise/test_notify.py b/tests/components/apprise/test_notify.py index 7d37d7a5d99..d73fa72d6c7 100644 --- a/tests/components/apprise/test_notify.py +++ b/tests/components/apprise/test_notify.py @@ -1,14 +1,27 @@ """The tests for the apprise notification platform.""" +import logging from pathlib import Path from unittest.mock import MagicMock, patch +import pytest + from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component BASE_COMPONENT = "notify" +@pytest.fixture(autouse=True) +def reset_log_level(): + """Set and reset log level after each test case.""" + logger = logging.getLogger("apprise") + orig_level = logger.level + logger.setLevel(logging.DEBUG) + yield + logger.setLevel(orig_level) + + async def test_apprise_config_load_fail01(hass: HomeAssistant) -> None: """Test apprise configuration failures 1.""" diff --git a/tests/components/aprilaire/test_config_flow.py b/tests/components/aprilaire/test_config_flow.py index c9cba2b3fd6..e4b7c167256 100644 --- a/tests/components/aprilaire/test_config_flow.py +++ b/tests/components/aprilaire/test_config_flow.py @@ -104,7 +104,7 @@ async def test_config_flow_data(client: AprilaireClient, hass: HomeAssistant) -> abort_if_unique_id_configured_mock.assert_called_once() create_entry_mock.assert_called_once_with( - title="Aprilaire", + title="AprilAire", data={ "host": "localhost", "port": 7000, diff --git a/tests/components/aprs/test_device_tracker.py b/tests/components/aprs/test_device_tracker.py index 4cdff41598f..4142195b0b9 100644 --- a/tests/components/aprs/test_device_tracker.py +++ b/tests/components/aprs/test_device_tracker.py @@ -1,11 +1,11 @@ """Test APRS device tracker.""" +from collections.abc import Generator from unittest.mock import MagicMock, Mock, patch import aprslib from aprslib import IS import pytest -from typing_extensions import Generator from homeassistant.components.aprs import device_tracker from homeassistant.core import HomeAssistant diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index cd04346c070..7e6140e8279 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the APsystems Local API tests.""" -from unittest.mock import AsyncMock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch -from APsystemsEZ1 import ReturnDeviceInfo, ReturnOutputData +from APsystemsEZ1 import ReturnAlarmInfo, ReturnDeviceInfo, ReturnOutputData, Status import pytest -from typing_extensions import Generator from homeassistant.components.apsystems.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS @@ -23,7 +23,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_apsystems() -> Generator[AsyncMock, None, None]: +def mock_apsystems() -> Generator[MagicMock]: """Mock APSystems lib.""" with ( patch( @@ -52,6 +52,13 @@ def mock_apsystems() -> Generator[AsyncMock, None, None]: e2=6.0, te2=7.0, ) + mock_api.get_alarm_info.return_value = ReturnAlarmInfo( + og=Status.normal, + isce1=Status.alarm, + isce2=Status.normal, + oe=Status.alarm, + ) + mock_api.get_device_power_status.return_value = Status.normal yield mock_api diff --git a/tests/components/apsystems/snapshots/test_binary_sensor.ambr b/tests/components/apsystems/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..0875c88976b --- /dev/null +++ b/tests/components/apsystems/snapshots/test_binary_sensor.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.mock_title_dc_1_short_circuit_error_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_dc_1_short_circuit_error_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC 1 short circuit error status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_1_short_circuit_error_status', + 'unique_id': 'MY_SERIAL_NUMBER_dc_1_short_circuit_error_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_dc_1_short_circuit_error_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title DC 1 short circuit error status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_dc_1_short_circuit_error_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_dc_2_short_circuit_error_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_dc_2_short_circuit_error_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC 2 short circuit error status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_2_short_circuit_error_status', + 'unique_id': 'MY_SERIAL_NUMBER_dc_2_short_circuit_error_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_dc_2_short_circuit_error_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title DC 2 short circuit error status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_dc_2_short_circuit_error_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_off_grid_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_off_grid_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off grid status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_grid_status', + 'unique_id': 'MY_SERIAL_NUMBER_off_grid_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_off_grid_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Off grid status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_off_grid_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_output_fault_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_output_fault_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Output fault status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'output_fault_status', + 'unique_id': 'MY_SERIAL_NUMBER_output_fault_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_output_fault_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Output fault status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_output_fault_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/apsystems/snapshots/test_switch.ambr b/tests/components/apsystems/snapshots/test_switch.ambr new file mode 100644 index 00000000000..6daa9fd6e14 --- /dev/null +++ b/tests/components/apsystems/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_all_entities[switch.mock_title_inverter_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_inverter_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inverter status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_status', + 'unique_id': 'MY_SERIAL_NUMBER_inverter_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.mock_title_inverter_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title Inverter status', + }), + 'context': , + 'entity_id': 'switch.mock_title_inverter_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/apsystems/test_binary_sensor.py b/tests/components/apsystems/test_binary_sensor.py new file mode 100644 index 00000000000..0c6fbffc93c --- /dev/null +++ b/tests/components/apsystems/test_binary_sensor.py @@ -0,0 +1,31 @@ +"""Test the APSystem binary sensor module.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.apsystems.PLATFORMS", + [Platform.BINARY_SENSOR], + ): + await setup_integration(hass, mock_config_entry) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) diff --git a/tests/components/apsystems/test_config_flow.py b/tests/components/apsystems/test_config_flow.py index e3fcdf67dcc..3d78524a529 100644 --- a/tests/components/apsystems/test_config_flow.py +++ b/tests/components/apsystems/test_config_flow.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock from homeassistant.components.apsystems.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_IP_ADDRESS +from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -27,6 +27,24 @@ async def test_form_create_success( assert result["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" +async def test_form_create_success_custom_port( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_apsystems: AsyncMock +) -> None: + """Test we handle creating with custom port with success.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_IP_ADDRESS: "127.0.0.1", + CONF_PORT: 8042, + }, + ) + assert result["result"].unique_id == "MY_SERIAL_NUMBER" + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" + assert result["data"].get(CONF_PORT) == 8042 + + async def test_form_cannot_connect_and_recover( hass: HomeAssistant, mock_apsystems: AsyncMock, mock_setup_entry: AsyncMock ) -> None: @@ -57,6 +75,33 @@ async def test_form_cannot_connect_and_recover( assert result2["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" +async def test_form_cannot_connect_and_recover_custom_port( + hass: HomeAssistant, mock_apsystems: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle cannot connect error but recovering with custom port.""" + + mock_apsystems.get_device_info.side_effect = TimeoutError + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_IP_ADDRESS: "127.0.0.2", CONF_PORT: 8042}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_apsystems.get_device_info.side_effect = None + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_IP_ADDRESS: "127.0.0.1", CONF_PORT: 8042}, + ) + assert result2["result"].unique_id == "MY_SERIAL_NUMBER" + assert result2.get("type") is FlowResultType.CREATE_ENTRY + assert result2["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" + assert result2["data"].get(CONF_PORT) == 8042 + + async def test_form_unique_id_already_configured( hass: HomeAssistant, mock_setup_entry: AsyncMock, diff --git a/tests/components/apsystems/test_switch.py b/tests/components/apsystems/test_switch.py new file mode 100644 index 00000000000..afd889fe958 --- /dev/null +++ b/tests/components/apsystems/test_switch.py @@ -0,0 +1,31 @@ +"""Test the APSystem switch module.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.apsystems.PLATFORMS", + [Platform.SWITCH], + ): + await setup_integration(hass, mock_config_entry) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) diff --git a/tests/components/aquacell/conftest.py b/tests/components/aquacell/conftest.py index db27f51dc03..f5a741ceed8 100644 --- a/tests/components/aquacell/conftest.py +++ b/tests/components/aquacell/conftest.py @@ -2,7 +2,7 @@ from collections.abc import Generator from datetime import datetime -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from aioaquacell import AquacellApi, Softener import pytest @@ -19,7 +19,7 @@ from tests.common import MockConfigEntry, load_json_array_fixture @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.aquacell.async_setup_entry", return_value=True @@ -28,7 +28,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_aquacell_api() -> Generator[AsyncMock, None, None]: +def mock_aquacell_api() -> Generator[MagicMock]: """Build a fixture for the Aquacell API that authenticates successfully and returns a single softener.""" with ( patch( diff --git a/tests/components/arcam_fmj/conftest.py b/tests/components/arcam_fmj/conftest.py index 66850933cc7..ca4af1b00a3 100644 --- a/tests/components/arcam_fmj/conftest.py +++ b/tests/components/arcam_fmj/conftest.py @@ -1,11 +1,11 @@ """Tests for the arcam_fmj component.""" +from collections.abc import AsyncGenerator from unittest.mock import Mock, patch from arcam.fmj.client import Client from arcam.fmj.state import State import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.arcam_fmj.const import DEFAULT_NAME from homeassistant.components.arcam_fmj.media_player import ArcamFmj @@ -99,6 +99,7 @@ async def player_setup_fixture( return state_1 if zone == 2: return state_2 + raise ValueError(f"Unknown player zone: {zone}") await async_setup_component(hass, "homeassistant", {}) diff --git a/tests/components/arcam_fmj/test_config_flow.py b/tests/components/arcam_fmj/test_config_flow.py index 26e93354900..60c68c5e102 100644 --- a/tests/components/arcam_fmj/test_config_flow.py +++ b/tests/components/arcam_fmj/test_config_flow.py @@ -1,15 +1,14 @@ """Tests for the Arcam FMJ config flow module.""" +from collections.abc import Generator from dataclasses import replace from unittest.mock import AsyncMock, MagicMock, patch from arcam.fmj.client import ConnectionFailed import pytest -from typing_extensions import Generator from homeassistant.components import ssdp -from homeassistant.components.arcam_fmj.config_flow import get_entry_client -from homeassistant.components.arcam_fmj.const import DOMAIN, DOMAIN_DATA_ENTRIES +from homeassistant.components.arcam_fmj.const import DOMAIN from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE from homeassistant.core import HomeAssistant @@ -215,12 +214,3 @@ async def test_user_wrong( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == f"Arcam FMJ ({MOCK_HOST})" assert result["result"].unique_id is None - - -async def test_get_entry_client(hass: HomeAssistant) -> None: - """Test helper for configuration.""" - entry = MockConfigEntry( - domain=DOMAIN, data=MOCK_CONFIG_ENTRY, title=MOCK_NAME, unique_id=MOCK_UUID - ) - hass.data[DOMAIN_DATA_ENTRIES] = {entry.entry_id: "dummy"} - assert get_entry_client(hass, entry) == "dummy" diff --git a/tests/components/arve/conftest.py b/tests/components/arve/conftest.py index 40a5f98291b..8fc35e37000 100644 --- a/tests/components/arve/conftest.py +++ b/tests/components/arve/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Arve tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from asyncarve import ArveCustomer, ArveDevices, ArveSensPro, ArveSensProData import pytest -from typing_extensions import Generator from homeassistant.components.arve.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index f19e70a8ec1..b7bf83a7ed0 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -2,16 +2,21 @@ from __future__ import annotations -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Generator from pathlib import Path from typing import Any from unittest.mock import AsyncMock import pytest -from typing_extensions import Generator from homeassistant.components import stt, tts, wake_word from homeassistant.components.assist_pipeline import DOMAIN, select as assist_select +from homeassistant.components.assist_pipeline.const import ( + BYTES_PER_CHUNK, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, +) from homeassistant.components.assist_pipeline.pipeline import ( PipelineData, PipelineStorageCollection, @@ -34,11 +39,12 @@ from tests.common import ( _TRANSCRIPT = "test transcript" +BYTES_ONE_SECOND = SAMPLE_RATE * SAMPLE_WIDTH * SAMPLE_CHANNELS + @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir class BaseProvider: @@ -147,7 +153,7 @@ class MockTTSPlatform(MockPlatform): PLATFORM_SCHEMA = tts.PLATFORM_SCHEMA - def __init__(self, *, async_get_engine, **kwargs): + def __init__(self, *, async_get_engine, **kwargs: Any) -> None: """Initialize the tts platform.""" super().__init__(**kwargs) self.async_get_engine = async_get_engine @@ -174,7 +180,7 @@ def mock_stt_provider_entity() -> MockSttProviderEntity: class MockSttPlatform(MockPlatform): """Provide a fake STT platform.""" - def __init__(self, *, async_get_engine, **kwargs): + def __init__(self, *, async_get_engine, **kwargs: Any) -> None: """Initialize the stt platform.""" super().__init__(**kwargs) self.async_get_engine = async_get_engine @@ -463,3 +469,8 @@ def pipeline_data(hass: HomeAssistant, init_components) -> PipelineData: def pipeline_storage(pipeline_data) -> PipelineStorageCollection: """Return pipeline storage collection.""" return pipeline_data.pipeline_store + + +def make_10ms_chunk(header: bytes) -> bytes: + """Return 10ms of zeros with the given header.""" + return header + bytes(BYTES_PER_CHUNK - len(header)) diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index 2c506215c68..fb1ca6db121 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -440,7 +440,7 @@ # --- # name: test_device_capture_override.2 dict({ - 'audio': 'Y2h1bmsx', + 'audio': 'Y2h1bmsxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=', 'channels': 1, 'rate': 16000, 'type': 'audio', @@ -663,7 +663,7 @@ # name: test_stt_stream_failed.2 None # --- -# name: test_text_only_pipeline +# name: test_text_only_pipeline[extra_msg0] dict({ 'language': 'en', 'pipeline': , @@ -673,7 +673,7 @@ }), }) # --- -# name: test_text_only_pipeline.1 +# name: test_text_only_pipeline[extra_msg0].1 dict({ 'conversation_id': 'mock-conversation-id', 'device_id': 'mock-device-id', @@ -682,7 +682,7 @@ 'language': 'en', }) # --- -# name: test_text_only_pipeline.2 +# name: test_text_only_pipeline[extra_msg0].2 dict({ 'intent_output': dict({ 'conversation_id': None, @@ -704,7 +704,51 @@ }), }) # --- -# name: test_text_only_pipeline.3 +# name: test_text_only_pipeline[extra_msg0].3 + None +# --- +# name: test_text_only_pipeline[extra_msg1] + dict({ + 'language': 'en', + 'pipeline': , + 'runner_data': dict({ + 'stt_binary_handler_id': None, + 'timeout': 300, + }), + }) +# --- +# name: test_text_only_pipeline[extra_msg1].1 + dict({ + 'conversation_id': 'mock-conversation-id', + 'device_id': 'mock-device-id', + 'engine': 'conversation.home_assistant', + 'intent_input': 'Are the lights on?', + 'language': 'en', + }) +# --- +# name: test_text_only_pipeline[extra_msg1].2 + dict({ + 'intent_output': dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any area called are', + }), + }), + }), + }), + }) +# --- +# name: test_text_only_pipeline[extra_msg1].3 None # --- # name: test_text_pipeline_timeout diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index f9b91af3bf1..4206a288331 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -13,6 +13,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import assist_pipeline, media_source, stt, tts from homeassistant.components.assist_pipeline.const import ( + BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, DOMAIN, ) @@ -20,16 +21,16 @@ from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component from .conftest import ( + BYTES_ONE_SECOND, MockSttProvider, MockSttProviderEntity, MockTTSProvider, MockWakeWordEntity, + make_10ms_chunk, ) from tests.typing import ClientSessionGenerator, WebSocketGenerator -BYTES_ONE_SECOND = 16000 * 2 - def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: """Process events to remove dynamic values.""" @@ -58,8 +59,8 @@ async def test_pipeline_from_audio_stream_auto( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -75,13 +76,13 @@ async def test_pipeline_from_audio_stream_auto( channel=stt.AudioChannels.CHANNEL_MONO, ), stt_stream=audio_data(), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot - assert mock_stt_provider.received == [b"part1", b"part2"] + assert len(mock_stt_provider.received) == 2 + assert mock_stt_provider.received[0].startswith(b"part1") + assert mock_stt_provider.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_legacy( @@ -100,8 +101,8 @@ async def test_pipeline_from_audio_stream_legacy( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline using an stt entity @@ -140,13 +141,13 @@ async def test_pipeline_from_audio_stream_legacy( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot - assert mock_stt_provider.received == [b"part1", b"part2"] + assert len(mock_stt_provider.received) == 2 + assert mock_stt_provider.received[0].startswith(b"part1") + assert mock_stt_provider.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_entity( @@ -165,8 +166,8 @@ async def test_pipeline_from_audio_stream_entity( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline using an stt entity @@ -205,13 +206,13 @@ async def test_pipeline_from_audio_stream_entity( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot - assert mock_stt_provider_entity.received == [b"part1", b"part2"] + assert len(mock_stt_provider_entity.received) == 2 + assert mock_stt_provider_entity.received[0].startswith(b"part1") + assert mock_stt_provider_entity.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_no_stt( @@ -230,8 +231,8 @@ async def test_pipeline_from_audio_stream_no_stt( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline without stt support @@ -271,9 +272,7 @@ async def test_pipeline_from_audio_stream_no_stt( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert not events @@ -293,8 +292,8 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Try to use the created pipeline @@ -335,24 +334,25 @@ async def test_pipeline_from_audio_stream_wake_word( # [0, 2, ...] wake_chunk_2 = bytes(it.islice(it.cycle(range(0, 256, 2)), BYTES_ONE_SECOND)) - bytes_per_chunk = int(0.01 * BYTES_ONE_SECOND) + samples_per_chunk = 160 # 10ms @ 16Khz + bytes_per_chunk = samples_per_chunk * 2 # 16-bit async def audio_data(): - # 1 second in 10 ms chunks + # 1 second in chunks i = 0 while i < len(wake_chunk_1): yield wake_chunk_1[i : i + bytes_per_chunk] i += bytes_per_chunk - # 1 second in 30 ms chunks + # 1 second in chunks i = 0 while i < len(wake_chunk_2): yield wake_chunk_2[i : i + bytes_per_chunk] i += bytes_per_chunk - yield b"wake word!" - yield b"part1" - yield b"part2" + for header in (b"wake word!", b"part1", b"part2"): + yield make_10ms_chunk(header) + yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -372,9 +372,7 @@ async def test_pipeline_from_audio_stream_wake_word( wake_word_settings=assist_pipeline.WakeWordSettings( audio_seconds_to_buffer=1.5 ), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot @@ -390,7 +388,9 @@ async def test_pipeline_from_audio_stream_wake_word( ) assert first_chunk == wake_chunk_1[len(wake_chunk_1) // 2 :] + wake_chunk_2 - assert mock_stt_provider.received[-3:] == [b"queued audio", b"part1", b"part2"] + assert mock_stt_provider.received[-3] == b"queued audio" + assert mock_stt_provider.received[-2].startswith(b"part1") + assert mock_stt_provider.received[-1].startswith(b"part2") async def test_pipeline_save_audio( @@ -413,13 +413,11 @@ async def test_pipeline_save_audio( pipeline = assist_pipeline.async_get_pipeline(hass) events: list[assist_pipeline.PipelineEvent] = [] - # Pad out to an even number of bytes since these "samples" will be saved - # as 16-bit values. async def audio_data(): - yield b"wake word_" + yield make_10ms_chunk(b"wake word") # queued audio - yield b"part1_" - yield b"part2_" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -438,9 +436,7 @@ async def test_pipeline_save_audio( pipeline_id=pipeline.id, start_stage=assist_pipeline.PipelineStage.WAKE_WORD, end_stage=assist_pipeline.PipelineStage.STT, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) pipeline_dirs = list(temp_dir.iterdir()) @@ -464,12 +460,16 @@ async def test_pipeline_save_audio( # Verify wake file with wave.open(str(wake_file), "rb") as wake_wav: wake_data = wake_wav.readframes(wake_wav.getnframes()) - assert wake_data == b"wake word_" + assert wake_data.startswith(b"wake word") # Verify stt file with wave.open(str(stt_file), "rb") as stt_wav: stt_data = stt_wav.readframes(stt_wav.getnframes()) - assert stt_data == b"queued audiopart1_part2_" + assert stt_data.startswith(b"queued audio") + stt_data = stt_data[len(b"queued audio") :] + assert stt_data.startswith(b"part1") + stt_data = stt_data[BYTES_PER_CHUNK:] + assert stt_data.startswith(b"part2") async def test_pipeline_saved_audio_with_device_id( @@ -652,10 +652,10 @@ async def test_wake_word_detection_aborted( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"silence!" - yield b"wake word!" - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"silence!") + yield make_10ms_chunk(b"wake word!") + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" pipeline_store = pipeline_data.pipeline_store @@ -685,9 +685,7 @@ async def test_wake_word_detection_aborted( wake_word_settings=assist_pipeline.WakeWordSettings( audio_seconds_to_buffer=1.5 ), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ), ) await pipeline_input.validate() diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index 3e1e99412d8..45a661c0f07 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -1,10 +1,10 @@ """Websocket tests for Voice Assistant integration.""" +from collections.abc import AsyncGenerator from typing import Any from unittest.mock import ANY, patch import pytest -from typing_extensions import AsyncGenerator from homeassistant.components import conversation from homeassistant.components.assist_pipeline.const import DOMAIN diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index 139ae915263..db039ab3140 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -1,11 +1,9 @@ """Tests for voice command segmenter.""" import itertools as it -from unittest.mock import patch from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, - VoiceActivityDetector, VoiceCommandSegmenter, chunk_samples, ) @@ -19,15 +17,12 @@ def test_silence() -> None: # True return value indicates voice command has not finished assert segmenter.process(_ONE_SECOND * 3, False) + assert not segmenter.in_command def test_speech() -> None: """Test that silence + speech + silence triggers a voice command.""" - def is_speech(chunk): - """Anything non-zero is speech.""" - return sum(chunk) > 0 - segmenter = VoiceCommandSegmenter() # silence @@ -35,68 +30,52 @@ def test_speech() -> None: # "speech" assert segmenter.process(_ONE_SECOND, True) + assert segmenter.in_command # silence # False return value indicates voice command is finished assert not segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command def test_audio_buffer() -> None: """Test audio buffer wrapping.""" - class DisabledVad(VoiceActivityDetector): - def is_speech(self, chunk): - return False + samples_per_chunk = 160 # 10 ms + bytes_per_chunk = samples_per_chunk * 2 + leftover_buffer = AudioBuffer(bytes_per_chunk) - @property - def samples_per_chunk(self): - return 160 # 10 ms + # Partially fill audio buffer + half_chunk = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk // 2)) + chunks = list(chunk_samples(half_chunk, bytes_per_chunk, leftover_buffer)) - vad = DisabledVad() - bytes_per_chunk = vad.samples_per_chunk * 2 - vad_buffer = AudioBuffer(bytes_per_chunk) - segmenter = VoiceCommandSegmenter() + assert not chunks + assert leftover_buffer.bytes() == half_chunk - with patch.object(vad, "is_speech", return_value=False) as mock_process: - # Partially fill audio buffer - half_chunk = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk // 2)) - segmenter.process_with_vad(half_chunk, vad, vad_buffer) + # Fill and wrap with 1/4 chunk left over + three_quarters_chunk = bytes( + it.islice(it.cycle(range(256)), int(0.75 * bytes_per_chunk)) + ) + chunks = list(chunk_samples(three_quarters_chunk, bytes_per_chunk, leftover_buffer)) - assert not mock_process.called - assert vad_buffer is not None - assert vad_buffer.bytes() == half_chunk + assert len(chunks) == 1 + assert ( + leftover_buffer.bytes() + == three_quarters_chunk[len(three_quarters_chunk) - (bytes_per_chunk // 4) :] + ) + assert chunks[0] == half_chunk + three_quarters_chunk[: bytes_per_chunk // 2] - # Fill and wrap with 1/4 chunk left over - three_quarters_chunk = bytes( - it.islice(it.cycle(range(256)), int(0.75 * bytes_per_chunk)) - ) - segmenter.process_with_vad(three_quarters_chunk, vad, vad_buffer) + # Run 2 chunks through + leftover_buffer.clear() + assert len(leftover_buffer) == 0 - assert mock_process.call_count == 1 - assert ( - vad_buffer.bytes() - == three_quarters_chunk[ - len(three_quarters_chunk) - (bytes_per_chunk // 4) : - ] - ) - assert ( - mock_process.call_args[0][0] - == half_chunk + three_quarters_chunk[: bytes_per_chunk // 2] - ) + two_chunks = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk * 2)) + chunks = list(chunk_samples(two_chunks, bytes_per_chunk, leftover_buffer)) - # Run 2 chunks through - segmenter.reset() - vad_buffer.clear() - assert len(vad_buffer) == 0 - - mock_process.reset_mock() - two_chunks = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk * 2)) - segmenter.process_with_vad(two_chunks, vad, vad_buffer) - - assert mock_process.call_count == 2 - assert len(vad_buffer) == 0 - assert mock_process.call_args_list[0][0][0] == two_chunks[:bytes_per_chunk] - assert mock_process.call_args_list[1][0][0] == two_chunks[bytes_per_chunk:] + assert len(chunks) == 2 + assert len(leftover_buffer) == 0 + assert chunks[0] == two_chunks[:bytes_per_chunk] + assert chunks[1] == two_chunks[bytes_per_chunk:] def test_partial_chunk() -> None: @@ -127,41 +106,103 @@ def test_chunk_samples_leftover() -> None: assert leftover_chunk_buffer.bytes() == bytes([5, 6]) -def test_vad_no_chunking() -> None: - """Test VAD that doesn't require chunking.""" +def test_silence_seconds() -> None: + """Test end of voice command silence seconds.""" - class VadNoChunk(VoiceActivityDetector): - def is_speech(self, chunk: bytes) -> bool: - return sum(chunk) > 0 + segmenter = VoiceCommandSegmenter(silence_seconds=1.0) - @property - def samples_per_chunk(self) -> int | None: - return None + # silence + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # "speech" + assert segmenter.process(_ONE_SECOND, True) + assert segmenter.in_command + + # not enough silence to end + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.in_command + + # exactly enough silence now + assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.in_command + + +def test_silence_reset() -> None: + """Test that speech resets end of voice command detection.""" + + segmenter = VoiceCommandSegmenter(silence_seconds=1.0, reset_seconds=0.5) + + # silence + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # "speech" + assert segmenter.process(_ONE_SECOND, True) + assert segmenter.in_command + + # not enough silence to end + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.in_command + + # speech should reset silence detection + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.in_command + + # not enough silence to end + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.in_command + + # exactly enough silence now + assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.in_command + + +def test_speech_reset() -> None: + """Test that silence resets start of voice command detection.""" - vad = VadNoChunk() segmenter = VoiceCommandSegmenter( - speech_seconds=1.0, silence_seconds=1.0, reset_seconds=0.5 + silence_seconds=1.0, reset_seconds=0.5, speech_seconds=1.0 ) - silence = bytes([0] * 16000) - speech = bytes([255] * (16000 // 2)) - # Test with differently-sized chunks - assert vad.is_speech(speech) - assert not vad.is_speech(silence) + # silence + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command - # Simulate voice command - assert segmenter.process_with_vad(silence, vad, None) - # begin - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - # reset with silence - assert segmenter.process_with_vad(silence, vad, None) - # resume - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - # end - assert segmenter.process_with_vad(silence, vad, None) - assert not segmenter.process_with_vad(silence, vad, None) + # not enough speech to start voice command + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.in_command + + # silence should reset speech detection + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # not enough speech to start voice command + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.in_command + + # exactly enough speech now + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.in_command + + +def test_timeout() -> None: + """Test that voice command detection times out.""" + + segmenter = VoiceCommandSegmenter(timeout_seconds=1.0) + + # not enough to time out + assert not segmenter.timed_out + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.timed_out + + # enough to time out + assert not segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.timed_out + + # flag resets with more audio + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.timed_out + + assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.timed_out diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index e08dd9685ea..2da914f4252 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -5,9 +5,15 @@ import base64 from typing import Any from unittest.mock import ANY, patch +import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.assist_pipeline.const import DOMAIN +from homeassistant.components.assist_pipeline.const import ( + DOMAIN, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, +) from homeassistant.components.assist_pipeline.pipeline import ( DeviceAudioQueue, Pipeline, @@ -17,17 +23,31 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr -from .conftest import MockWakeWordEntity, MockWakeWordEntity2 +from .conftest import ( + BYTES_ONE_SECOND, + BYTES_PER_CHUNK, + MockWakeWordEntity, + MockWakeWordEntity2, + make_10ms_chunk, +) from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator +@pytest.mark.parametrize( + "extra_msg", + [ + {}, + {"pipeline": "conversation.home_assistant"}, + ], +) async def test_text_only_pipeline( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, init_components, snapshot: SnapshotAssertion, + extra_msg: dict[str, Any], ) -> None: """Test events from a pipeline run with text input (no STT/TTS).""" events = [] @@ -42,6 +62,7 @@ async def test_text_only_pipeline( "conversation_id": "mock-conversation-id", "device_id": "mock-device-id", } + | extra_msg ) # result @@ -195,7 +216,7 @@ async def test_audio_pipeline_with_wake_word_timeout( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "timeout": 1, }, } @@ -219,7 +240,7 @@ async def test_audio_pipeline_with_wake_word_timeout( events.append(msg["event"]) # 2 seconds of silence - await client.send_bytes(bytes([1]) + bytes(16000 * 2 * 2)) + await client.send_bytes(bytes([1]) + bytes(2 * BYTES_ONE_SECOND)) # Time out error msg = await client.receive_json() @@ -249,12 +270,7 @@ async def test_audio_pipeline_with_wake_word_no_timeout( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "timeout": 0, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "timeout": 0, "no_vad": True}, } ) @@ -277,9 +293,10 @@ async def test_audio_pipeline_with_wake_word_no_timeout( events.append(msg["event"]) # "audio" - await client.send_bytes(bytes([handler_id]) + b"wake word") + await client.send_bytes(bytes([handler_id]) + make_10ms_chunk(b"wake word")) - msg = await client.receive_json() + async with asyncio.timeout(1): + msg = await client.receive_json() assert msg["event"]["type"] == "wake_word-end" assert msg["event"]["data"] == snapshot events.append(msg["event"]) @@ -360,7 +377,7 @@ async def test_audio_pipeline_no_wake_word_engine( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, }, } ) @@ -397,7 +414,7 @@ async def test_audio_pipeline_no_wake_word_entity( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, }, } ) @@ -1180,6 +1197,31 @@ async def test_get_pipeline( "wake_word_id": None, } + # Get conversation agent as pipeline + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline/get", + "pipeline_id": "conversation.home_assistant", + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == { + "conversation_engine": "conversation.home_assistant", + "conversation_language": "en", + "id": ANY, + "language": "en", + "name": "Home Assistant", + # It found these defaults + "stt_engine": "test", + "stt_language": "en-US", + "tts_engine": "test", + "tts_language": "en-US", + "tts_voice": "james_earl_jones", + "wake_word_entity": None, + "wake_word_id": None, + } + await client.send_json_auto_id( { "type": "assist_pipeline/pipeline/get", @@ -1741,7 +1783,7 @@ async def test_audio_pipeline_with_enhancements( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, # Enhancements "noise_suppression_level": 2, "auto_gain_dbfs": 15, @@ -1771,7 +1813,7 @@ async def test_audio_pipeline_with_enhancements( # One second of silence. # This will pass through the audio enhancement pipeline, but we don't test # the actual output. - await client.send_bytes(bytes([handler_id]) + bytes(16000 * 2)) + await client.send_bytes(bytes([handler_id]) + bytes(BYTES_ONE_SECOND)) # End of audio stream (handler id + empty payload) await client.send_bytes(bytes([handler_id])) @@ -1841,11 +1883,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1854,11 +1892,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1892,8 +1926,8 @@ async def test_wake_word_cooldown_same_id( assert msg["event"]["data"] == snapshot # Wake both up at the same time - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events error_data: dict[str, Any] | None = None @@ -1932,11 +1966,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1945,11 +1975,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1983,8 +2009,8 @@ async def test_wake_word_cooldown_different_ids( assert msg["event"]["data"] == snapshot # Wake both up at the same time, but they will have different wake word ids - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events msg = await client_1.receive_json() @@ -2059,11 +2085,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_1, "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -2074,11 +2096,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_2, "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -2113,8 +2131,8 @@ async def test_wake_word_cooldown_different_entities( # Wake both up at the same time. # They will have the same wake word id, but different entities. - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events error_data: dict[str, Any] | None = None @@ -2152,7 +2170,11 @@ async def test_device_capture( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + audio_chunks = [ + make_10ms_chunk(b"chunk1"), + make_10ms_chunk(b"chunk2"), + make_10ms_chunk(b"chunk3"), + ] # Start capture client_capture = await hass_ws_client(hass) @@ -2175,11 +2197,7 @@ async def test_device_capture( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2230,9 +2248,9 @@ async def test_device_capture( # Verify audio chunks for i, audio_chunk in enumerate(audio_chunks): assert events[i]["type"] == "audio" - assert events[i]["rate"] == 16000 - assert events[i]["width"] == 2 - assert events[i]["channels"] == 1 + assert events[i]["rate"] == SAMPLE_RATE + assert events[i]["width"] == SAMPLE_WIDTH + assert events[i]["channels"] == SAMPLE_CHANNELS # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2257,7 +2275,11 @@ async def test_device_capture_override( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + audio_chunks = [ + make_10ms_chunk(b"chunk1"), + make_10ms_chunk(b"chunk2"), + make_10ms_chunk(b"chunk3"), + ] # Start first capture client_capture_1 = await hass_ws_client(hass) @@ -2280,11 +2302,7 @@ async def test_device_capture_override( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2367,9 +2385,9 @@ async def test_device_capture_override( # Verify all but first audio chunk for i, audio_chunk in enumerate(audio_chunks[1:]): assert events[i]["type"] == "audio" - assert events[i]["rate"] == 16000 - assert events[i]["width"] == 2 - assert events[i]["channels"] == 1 + assert events[i]["rate"] == SAMPLE_RATE + assert events[i]["width"] == SAMPLE_WIDTH + assert events[i]["channels"] == SAMPLE_CHANNELS # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2429,11 +2447,7 @@ async def test_device_capture_queue_full( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2454,8 +2468,8 @@ async def test_device_capture_queue_full( assert msg["event"]["type"] == "stt-start" assert msg["event"]["data"] == snapshot - # Single sample will "overflow" the queue - await client_pipeline.send_bytes(bytes([handler_id, 0, 0])) + # Single chunk will "overflow" the queue + await client_pipeline.send_bytes(bytes([handler_id]) + bytes(BYTES_PER_CHUNK)) # End of audio stream await client_pipeline.send_bytes(bytes([handler_id])) @@ -2563,7 +2577,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2575,7 +2589,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2634,7 +2648,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2646,7 +2660,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "hey_jarvis", }, } diff --git a/tests/components/asterisk_mbox/__init__.py b/tests/components/asterisk_mbox/__init__.py deleted file mode 100644 index 79e3675ad07..00000000000 --- a/tests/components/asterisk_mbox/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the asterisk component.""" diff --git a/tests/components/asterisk_mbox/const.py b/tests/components/asterisk_mbox/const.py deleted file mode 100644 index 945c6b28d30..00000000000 --- a/tests/components/asterisk_mbox/const.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Asterisk tests constants.""" - -from homeassistant.components.asterisk_mbox import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT - -CONFIG = { - DOMAIN: { - CONF_HOST: "localhost", - CONF_PASSWORD: "password", - CONF_PORT: 1234, - } -} diff --git a/tests/components/asterisk_mbox/test_init.py b/tests/components/asterisk_mbox/test_init.py deleted file mode 100644 index 4800ada0ec4..00000000000 --- a/tests/components/asterisk_mbox/test_init.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Test mailbox.""" - -from unittest.mock import Mock, patch - -import pytest -from typing_extensions import Generator - -from homeassistant.components.asterisk_mbox import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from .const import CONFIG - - -@pytest.fixture -def client() -> Generator[Mock]: - """Mock client.""" - with patch( - "homeassistant.components.asterisk_mbox.asteriskClient", autospec=True - ) as client: - yield client - - -async def test_repair_issue_is_created( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - client: Mock, -) -> None: - """Test repair issue is created.""" - assert await async_setup_component(hass, DOMAIN, CONFIG) - await hass.async_block_till_done() - assert ( - DOMAIN, - "deprecated_integration", - ) in issue_registry.issues diff --git a/tests/components/atag/conftest.py b/tests/components/atag/conftest.py index 83ba3e37aad..63476c4846d 100644 --- a/tests/components/atag/conftest.py +++ b/tests/components/atag/conftest.py @@ -1,10 +1,10 @@ """Provide common Atag fixtures.""" import asyncio +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/august/fixtures/get_lock.low_keypad_battery.json b/tests/components/august/fixtures/get_lock.low_keypad_battery.json index 08bdfaa76ed..43b5513a527 100644 --- a/tests/components/august/fixtures/get_lock.low_keypad_battery.json +++ b/tests/components/august/fixtures/get_lock.low_keypad_battery.json @@ -36,7 +36,7 @@ "currentFirmwareVersion": "2.27.0", "battery": {}, "batteryLevel": "Low", - "batteryRaw": 170 + "batteryRaw": 128 }, "OfflineKeys": { "created": [], diff --git a/tests/components/august/mocks.py b/tests/components/august/mocks.py index 62c01d38d0c..a0f5b55a607 100644 --- a/tests/components/august/mocks.py +++ b/tests/components/august/mocks.py @@ -25,7 +25,7 @@ from yalexs.activity import ( DoorOperationActivity, LockOperationActivity, ) -from yalexs.authenticator import AuthenticationState +from yalexs.authenticator_common import AuthenticationState from yalexs.const import Brand from yalexs.doorbell import Doorbell, DoorbellDetail from yalexs.lock import Lock, LockDetail @@ -58,6 +58,10 @@ def _mock_authenticator(auth_state): return authenticator +def _timetoken(): + return str(time.time_ns())[:-2] + + @patch("yalexs.manager.gateway.ApiAsync") @patch("yalexs.manager.gateway.AuthenticatorAsync.async_authenticate") async def _mock_setup_august( diff --git a/tests/components/august/test_binary_sensor.py b/tests/components/august/test_binary_sensor.py index 377a5bf2897..33d582de8d8 100644 --- a/tests/components/august/test_binary_sensor.py +++ b/tests/components/august/test_binary_sensor.py @@ -1,7 +1,6 @@ """The binary_sensor tests for the august platform.""" import datetime -import time from unittest.mock import Mock, patch from yalexs.pubnub_async import AugustPubNub @@ -25,15 +24,12 @@ from .mocks import ( _mock_doorbell_from_fixture, _mock_doorsense_enabled_august_lock_detail, _mock_lock_from_fixture, + _timetoken, ) from tests.common import async_fire_time_changed -def _timetoken(): - return str(time.time_ns())[:-2] - - async def test_doorsense(hass: HomeAssistant) -> None: """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_lock_from_fixture( @@ -98,7 +94,7 @@ async def test_create_doorbell(hass: HomeAssistant) -> None: ) assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF binary_sensor_k98gidt45gul_name_motion = hass.states.get( @@ -125,7 +121,7 @@ async def test_create_doorbell_offline(hass: HomeAssistant) -> None: ) assert binary_sensor_tmt100_name_online.state == STATE_OFF binary_sensor_tmt100_name_ding = hass.states.get( - "binary_sensor.tmt100_name_occupancy" + "binary_sensor.tmt100_name_doorbell_ding" ) assert binary_sensor_tmt100_name_ding.state == STATE_UNAVAILABLE @@ -147,13 +143,13 @@ async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: ) assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) with patch( - "homeassistant.components.august.binary_sensor._native_datetime", + "homeassistant.components.august.util._native_datetime", return_value=native_time, ): async_fire_time_changed(hass, new_time) @@ -177,7 +173,7 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: ) assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF @@ -245,14 +241,14 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) with patch( - "homeassistant.components.august.binary_sensor._native_datetime", + "homeassistant.components.august.util._native_datetime", return_value=native_time, ): async_fire_time_changed(hass, new_time) @@ -276,20 +272,20 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_ON new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) with patch( - "homeassistant.components.august.binary_sensor._native_datetime", + "homeassistant.components.august.util._native_datetime", return_value=native_time, ): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF @@ -407,6 +403,6 @@ async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: await _create_august_with_devices(hass, [lock_one]) ding_sensor = hass.states.get( - "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_occupancy" + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" ) assert ding_sensor.state == STATE_OFF diff --git a/tests/components/august/test_config_flow.py b/tests/components/august/test_config_flow.py index aec08864c65..fdebb8d5c46 100644 --- a/tests/components/august/test_config_flow.py +++ b/tests/components/august/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from yalexs.authenticator import ValidationResult +from yalexs.authenticator_common import ValidationResult from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation from homeassistant import config_entries diff --git a/tests/components/august/test_event.py b/tests/components/august/test_event.py new file mode 100644 index 00000000000..61b7560f462 --- /dev/null +++ b/tests/components/august/test_event.py @@ -0,0 +1,182 @@ +"""The event tests for the august.""" + +import datetime +from unittest.mock import Mock, patch + +from yalexs.pubnub_async import AugustPubNub + +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_august_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_lock_from_fixture, + _timetoken, +) + +from tests.common import async_fire_time_changed + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_august_with_devices(hass, [doorbell_one]) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + +async def test_create_doorbell_offline(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_august_with_devices(hass, [doorbell_one]) + motion_state = hass.states.get("event.tmt100_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNAVAILABLE + doorbell_state = hass.states.get("event.tmt100_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNAVAILABLE + + +async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + activities = await _mock_activities_from_fixture( + hass, "get_activity.doorbell_motion.json" + ) + await _create_august_with_devices(hass, [doorbell_one], activities=activities) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.august.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state.state == isotime + + +async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: + """Test creation of a doorbell that can be updated via pubnub.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + pubnub = AugustPubNub() + + await _create_august_with_devices(hass, [doorbell_one], pubnub=pubnub) + assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + pubnub.message( + pubnub, + Mock( + channel=doorbell_one.pubsub_channel, + timetoken=_timetoken(), + message={ + "status": "doorbell_motion_detected", + "data": { + "event": "doorbell_motion_detected", + "image": { + "height": 640, + "width": 480, + "format": "jpg", + "created_at": "2021-03-16T02:36:26.886Z", + "bytes": 14061, + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" + ), + "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", + "etag": "09e839331c4ea59eef28081f2caa0e90", + }, + "doorbellName": "Front Door", + "callID": None, + "origin": "mars-api", + "mutableContent": True, + }, + }, + ), + ) + + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.august.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + + pubnub.message( + pubnub, + Mock( + channel=doorbell_one.pubsub_channel, + timetoken=_timetoken(), + message={ + "status": "buttonpush", + }, + ), + ) + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + isotime = motion_state.state + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.august.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + assert motion_state.state == isotime + + +async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: + """Test creation of a lock with a doorbell.""" + lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") + await _create_august_with_devices(hass, [lock_one]) + + doorbell_state = hass.states.get( + "event.a6697750d607098bae8d6baa11ef8063_name_doorbell" + ) + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN diff --git a/tests/components/august/test_gateway.py b/tests/components/august/test_gateway.py index e605fd74f0a..74266397ed5 100644 --- a/tests/components/august/test_gateway.py +++ b/tests/components/august/test_gateway.py @@ -50,5 +50,5 @@ async def _patched_refresh_access_token( ) await august_gateway.async_refresh_access_token_if_needed() refresh_access_token_mock.assert_called() - assert august_gateway.access_token == new_token + assert await august_gateway.async_get_access_token() == new_token assert august_gateway.authentication.access_token_expires == new_token_expire_time diff --git a/tests/components/august/test_sensor.py b/tests/components/august/test_sensor.py index 0227ee64ef1..67223e9dff0 100644 --- a/tests/components/august/test_sensor.py +++ b/tests/components/august/test_sensor.py @@ -88,7 +88,7 @@ async def test_create_lock_with_linked_keypad( assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "60" + assert state.state == "62" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry diff --git a/tests/components/aurora/conftest.py b/tests/components/aurora/conftest.py index 916f0925c4a..462203193f2 100644 --- a/tests/components/aurora/conftest.py +++ b/tests/components/aurora/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Aurora tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.aurora.const import CONF_THRESHOLD, DOMAIN from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE diff --git a/tests/components/autarco/__init__.py b/tests/components/autarco/__init__.py new file mode 100644 index 00000000000..208e5999fc7 --- /dev/null +++ b/tests/components/autarco/__init__.py @@ -0,0 +1,12 @@ +"""Tests for the Autarco integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the integration.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/autarco/conftest.py b/tests/components/autarco/conftest.py new file mode 100644 index 00000000000..c7a95d7aa23 --- /dev/null +++ b/tests/components/autarco/conftest.py @@ -0,0 +1,82 @@ +"""Common fixtures for the Autarco tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from autarco import AccountSite, Inverter, Solar +import pytest + +from homeassistant.components.autarco.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.autarco.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_autarco_client() -> Generator[AsyncMock]: + """Mock a Autarco client.""" + with ( + patch( + "homeassistant.components.autarco.Autarco", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.autarco.config_flow.Autarco", + new=mock_client, + ), + ): + client = mock_client.return_value + client.get_account.return_value = [ + AccountSite( + site_id=1, + public_key="key-public", + system_name="test-system", + retailer="test-retailer", + health="OK", + ) + ] + client.get_solar.return_value = Solar( + power_production=200, + energy_production_today=4, + energy_production_month=58, + energy_production_total=10379, + ) + client.get_inverters.return_value = { + "test-serial-1": Inverter( + serial_number="test-serial-1", + out_ac_power=200, + out_ac_energy_total=10379, + grid_turned_off=False, + health="OK", + ), + "test-serial-2": Inverter( + serial_number="test-serial-2", + out_ac_power=500, + out_ac_energy_total=10379, + grid_turned_off=False, + health="OK", + ), + } + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Autarco", + data={ + CONF_EMAIL: "test@autarco.com", + CONF_PASSWORD: "test-password", + }, + ) diff --git a/tests/components/autarco/snapshots/test_diagnostics.ambr b/tests/components/autarco/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..53d9f96fb86 --- /dev/null +++ b/tests/components/autarco/snapshots/test_diagnostics.ambr @@ -0,0 +1,34 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'sites_data': list([ + dict({ + 'health': 'OK', + 'id': 1, + 'inverters': list([ + dict({ + 'grid_turned_off': False, + 'health': 'OK', + 'out_ac_energy_total': 10379, + 'out_ac_power': 200, + 'serial_number': 'test-serial-1', + }), + dict({ + 'grid_turned_off': False, + 'health': 'OK', + 'out_ac_energy_total': 10379, + 'out_ac_power': 500, + 'serial_number': 'test-serial-2', + }), + ]), + 'name': 'test-system', + 'solar': dict({ + 'energy_production_month': 58, + 'energy_production_today': 4, + 'energy_production_total': 10379, + 'power_production': 200, + }), + }), + ]), + }) +# --- diff --git a/tests/components/autarco/snapshots/test_sensor.ambr b/tests/components/autarco/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..0aa093d6a6d --- /dev/null +++ b/tests/components/autarco/snapshots/test_sensor.ambr @@ -0,0 +1,403 @@ +# serializer version: 1 +# name: test_all_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-1_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-1 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_1_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-1_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_1_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-1 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-2_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-2 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-2_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-2 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '500', + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production month', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_month', + 'unique_id': '1_solar_energy_production_month', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production month', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '58', + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production today', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_today', + 'unique_id': '1_solar_energy_production_today', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production today', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_total', + 'unique_id': '1_solar_energy_production_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_all_sensors[sensor.solar_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power production', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_production', + 'unique_id': '1_solar_power_production', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Solar Power production', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- diff --git a/tests/components/autarco/test_config_flow.py b/tests/components/autarco/test_config_flow.py new file mode 100644 index 00000000000..621ad7f55c8 --- /dev/null +++ b/tests/components/autarco/test_config_flow.py @@ -0,0 +1,101 @@ +"""Test the Autarco config flow.""" + +from unittest.mock import AsyncMock + +from autarco import AutarcoAuthenticationError, AutarcoConnectionError +import pytest + +from homeassistant.components.autarco.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_user_flow( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "test@autarco.com" + assert result.get("data") == { + CONF_EMAIL: "test@autarco.com", + CONF_PASSWORD: "test-password", + } + assert len(mock_autarco_client.get_account.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_autarco_client: AsyncMock, +) -> None: + """Test abort when setting up duplicate entry.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (AutarcoConnectionError, "cannot_connect"), + (AutarcoAuthenticationError, "invalid_auth"), + ], +) +async def test_exceptions( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test exceptions.""" + mock_autarco_client.get_account.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + mock_autarco_client.get_account.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.CREATE_ENTRY diff --git a/tests/components/autarco/test_diagnostics.py b/tests/components/autarco/test_diagnostics.py new file mode 100644 index 00000000000..1d12a2c1894 --- /dev/null +++ b/tests/components/autarco/test_diagnostics.py @@ -0,0 +1,30 @@ +"""Test Autarco diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot diff --git a/tests/components/autarco/test_init.py b/tests/components/autarco/test_init.py new file mode 100644 index 00000000000..81c5f947251 --- /dev/null +++ b/tests/components/autarco/test_init.py @@ -0,0 +1,28 @@ +"""Test the Autarco init module.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/autarco/test_sensor.py b/tests/components/autarco/test_sensor.py new file mode 100644 index 00000000000..e5e823501b9 --- /dev/null +++ b/tests/components/autarco/test_sensor.py @@ -0,0 +1,27 @@ +"""Test the sensor provided by the Autarco integration.""" + +from unittest.mock import MagicMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_sensors( + hass: HomeAssistant, + mock_autarco_client: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Autarco sensors.""" + with patch("homeassistant.components.autarco.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/auth/test_init.py b/tests/components/auth/test_init.py index d0ca4699e0e..718bb369b53 100644 --- a/tests/components/auth/test_init.py +++ b/tests/components/auth/test_init.py @@ -13,6 +13,7 @@ from homeassistant.auth.models import ( TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN, TOKEN_TYPE_NORMAL, Credentials, + RefreshToken, ) from homeassistant.components import auth from homeassistant.core import HomeAssistant @@ -37,7 +38,7 @@ def mock_credential(): ) -async def async_setup_user_refresh_token(hass): +async def async_setup_user_refresh_token(hass: HomeAssistant) -> RefreshToken: """Create a testing user with a connected credential.""" user = await hass.auth.async_create_user("Test User") @@ -598,8 +599,8 @@ async def test_ws_delete_all_refresh_tokens( hass_admin_credential: Credentials, hass_ws_client: WebSocketGenerator, hass_access_token: str, - delete_token_type: dict[str:str], - delete_current_token: dict[str:bool], + delete_token_type: dict[str, str], + delete_current_token: dict[str, bool], expected_remaining_normal_tokens: int, expected_remaining_long_lived_tokens: int, ) -> None: diff --git a/tests/components/auth/test_init_link_user.py b/tests/components/auth/test_init_link_user.py index d1a5fa51af2..a8f04c2720d 100644 --- a/tests/components/auth/test_init_link_user.py +++ b/tests/components/auth/test_init_link_user.py @@ -1,6 +1,7 @@ """Tests for the link user flow.""" from http import HTTPStatus +from typing import Any from unittest.mock import patch from homeassistant.core import HomeAssistant @@ -11,7 +12,9 @@ from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI from tests.typing import ClientSessionGenerator -async def async_get_code(hass, aiohttp_client): +async def async_get_code( + hass: HomeAssistant, aiohttp_client: ClientSessionGenerator +) -> dict[str, Any]: """Return authorization code for link user tests.""" config = [ { diff --git a/tests/components/automation/test_blueprint.py b/tests/components/automation/test_blueprint.py index ee3fa631d00..2c92d7a5242 100644 --- a/tests/components/automation/test_blueprint.py +++ b/tests/components/automation/test_blueprint.py @@ -1,8 +1,10 @@ """Test built-in blueprints.""" import asyncio +from collections.abc import Iterator import contextlib from datetime import timedelta +from os import PathLike import pathlib from typing import Any from unittest.mock import patch @@ -23,7 +25,9 @@ BUILTIN_BLUEPRINT_FOLDER = pathlib.Path(automation.__file__).parent / "blueprint @contextlib.contextmanager -def patch_blueprint(blueprint_path: str, data_path): +def patch_blueprint( + blueprint_path: str, data_path: str | PathLike[str] +) -> Iterator[None]: """Patch blueprint loading from a different source.""" orig_load = models.DomainBlueprints._load_blueprint diff --git a/tests/components/automation/test_init.py b/tests/components/automation/test_init.py index 0c300540644..d8f04f10458 100644 --- a/tests/components/automation/test_init.py +++ b/tests/components/automation/test_init.py @@ -88,7 +88,7 @@ async def test_service_data_not_a_dict( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "data": 100}, + "action": {"action": "test.automation", "data": 100}, } }, ) @@ -111,7 +111,7 @@ async def test_service_data_single_template( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": "{{ { 'foo': 'bar' } }}", }, } @@ -136,7 +136,7 @@ async def test_service_specify_data( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": { "some": ( "{{ trigger.platform }} - {{ trigger.event.event_type }}" @@ -170,7 +170,7 @@ async def test_service_specify_entity_id( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -178,7 +178,7 @@ async def test_service_specify_entity_id( hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 - assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] async def test_service_specify_entity_id_list( @@ -192,7 +192,7 @@ async def test_service_specify_entity_id_list( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } @@ -202,7 +202,7 @@ async def test_service_specify_entity_id_list( hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 - assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world", "hello.world2"] async def test_two_triggers(hass: HomeAssistant, calls: list[ServiceCall]) -> None: @@ -216,7 +216,7 @@ async def test_two_triggers(hass: HomeAssistant, calls: list[ServiceCall]) -> No {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -245,7 +245,7 @@ async def test_trigger_service_ignoring_condition( "entity_id": "non.existing", "above": "1", }, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -301,7 +301,7 @@ async def test_two_conditions_with_and( "below": 150, }, ], - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -333,7 +333,7 @@ async def test_shorthand_conditions_template( automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -360,11 +360,11 @@ async def test_automation_list_setting( automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] }, @@ -390,8 +390,8 @@ async def test_automation_calling_two_actions( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ - {"service": "test.automation", "data": {"position": 0}}, - {"service": "test.automation", "data": {"position": 1}}, + {"action": "test.automation", "data": {"position": 0}}, + {"action": "test.automation", "data": {"position": 1}}, ], } }, @@ -420,7 +420,7 @@ async def test_shared_context(hass: HomeAssistant, calls: list[ServiceCall]) -> { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] }, @@ -486,7 +486,7 @@ async def test_services(hass: HomeAssistant, calls: list[ServiceCall]) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -569,7 +569,7 @@ async def test_reload_config_service( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -597,7 +597,7 @@ async def test_reload_config_service( "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -650,7 +650,7 @@ async def test_reload_config_when_invalid_config( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -690,7 +690,7 @@ async def test_reload_config_handles_load_fails( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -735,7 +735,7 @@ async def test_automation_stops( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.automation"}, + {"action": "test.automation"}, ], } } @@ -811,7 +811,7 @@ async def test_reload_unchanged_does_not_stop( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.automation"}, + {"action": "test.automation"}, ], } } @@ -858,7 +858,7 @@ async def test_reload_single_unchanged_does_not_stop( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.automation"}, + {"action": "test.automation"}, ], } } @@ -905,7 +905,7 @@ async def test_reload_single_add_automation( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], } } assert await async_setup_component(hass, automation.DOMAIN, config1) @@ -942,25 +942,25 @@ async def test_reload_single_parallel_calls( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_sun"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "id": "moon", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_moon"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "id": "mars", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_mars"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "id": "venus", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_venus"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, ] } @@ -1055,7 +1055,7 @@ async def test_reload_single_remove_automation( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], } } config2 = {automation.DOMAIN: {}} @@ -1093,12 +1093,12 @@ async def test_reload_moved_automation_without_alias( automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "alias": "automation_with_alias", "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, ] } @@ -1149,17 +1149,17 @@ async def test_reload_identical_automations_without_id( { "alias": "dolly", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "alias": "dolly", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "alias": "dolly", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, ] } @@ -1246,12 +1246,12 @@ async def test_reload_identical_automations_without_id( [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, # An automation using templates { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "{{ 'test.automation' }}"}], + "action": [{"action": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1278,13 +1278,13 @@ async def test_reload_identical_automations_without_id( { "id": "sun", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, # An automation using templates { "id": "sun", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "{{ 'test.automation' }}"}], + "action": [{"action": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1424,12 +1424,12 @@ async def test_automation_restore_state(hass: HomeAssistant) -> None: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] } @@ -1474,7 +1474,7 @@ async def test_initial_value_off(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1499,7 +1499,7 @@ async def test_initial_value_on(hass: HomeAssistant) -> None: "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } @@ -1528,7 +1528,7 @@ async def test_initial_value_off_but_restore_on(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1553,7 +1553,7 @@ async def test_initial_value_on_but_restore_off(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1576,7 +1576,7 @@ async def test_no_initial_value_and_restore_off(hass: HomeAssistant) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1600,7 +1600,7 @@ async def test_automation_is_on_if_no_initial_state_or_restore( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1623,7 +1623,7 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1641,7 +1641,7 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 - assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] @pytest.mark.parametrize( @@ -1714,7 +1714,7 @@ async def test_automation_bad_config_validation( "alias": "good_automation", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -1756,7 +1756,7 @@ async def test_automation_bad_config_validation( "alias": "bad_automation", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -1785,7 +1785,7 @@ async def test_automation_with_error_in_script( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1811,7 +1811,7 @@ async def test_automation_with_error_in_script_2( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": None, "entity_id": "hello.world"}, + "action": {"action": None, "entity_id": "hello.world"}, } }, ) @@ -1842,19 +1842,19 @@ async def test_automation_restore_last_triggered_with_initial_state( "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] } @@ -2013,11 +2013,11 @@ async def test_extraction_functions( }, "action": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_first"}, }, { @@ -2027,15 +2027,15 @@ async def test_extraction_functions( "type": "turn_on", }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, ], @@ -2087,7 +2087,7 @@ async def test_extraction_functions( }, "action": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -2140,7 +2140,7 @@ async def test_extraction_functions( }, "action": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -2150,27 +2150,27 @@ async def test_extraction_functions( }, {"scene": "scene.hello"}, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-last"}, }, ], @@ -2289,7 +2289,7 @@ async def test_automation_variables( }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2308,7 +2308,7 @@ async def test_automation_variables( "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { - "service": "test.automation", + "action": "test.automation", }, }, { @@ -2317,7 +2317,7 @@ async def test_automation_variables( }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { - "service": "test.automation", + "action": "test.automation", }, }, ] @@ -2373,7 +2373,7 @@ async def test_automation_trigger_variables( }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2391,7 +2391,7 @@ async def test_automation_trigger_variables( }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2438,7 +2438,7 @@ async def test_automation_bad_trigger_variables( }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", }, }, ] @@ -2465,7 +2465,7 @@ async def test_automation_this_var_always( { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "this_template": "{{this.entity_id}}", }, @@ -2542,7 +2542,7 @@ async def test_blueprint_automation( "Blueprint 'Call service based on event' generated invalid automation", ( "value should be a string for dictionary value @" - " data['action'][0]['service']" + " data['action'][0]['action']" ), ), ], @@ -2640,7 +2640,7 @@ async def test_trigger_service(hass: HomeAssistant, calls: list[ServiceCall]) -> "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } @@ -2679,14 +2679,14 @@ async def test_trigger_condition_implicit_id( { "conditions": {"condition": "trigger", "id": [0, "2"]}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "one"}, }, }, { "conditions": {"condition": "trigger", "id": "1"}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "two"}, }, }, @@ -2730,14 +2730,14 @@ async def test_trigger_condition_explicit_id( { "conditions": {"condition": "trigger", "id": "one"}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "one"}, }, }, { "conditions": {"condition": "trigger", "id": "two"}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "two"}, }, }, @@ -2822,8 +2822,8 @@ async def test_recursive_automation_starting_script( f" {automation_runs} }}}}" ) }, - {"service": "script.script1"}, - {"service": "test.script_done"}, + {"action": "script.script1"}, + {"action": "test.script_done"}, ], }, } @@ -2840,9 +2840,9 @@ async def test_recursive_automation_starting_script( {"platform": "event", "event_type": "trigger_automation"}, ], "action": [ - {"service": "test.automation_started"}, + {"action": "test.automation_started"}, {"delay": 0.001}, - {"service": "script.script1"}, + {"action": "script.script1"}, ], } }, @@ -2923,7 +2923,7 @@ async def test_recursive_automation( ], "action": [ {"event": "trigger_automation"}, - {"service": "test.automation_done"}, + {"action": "test.automation_done"}, ], } }, @@ -2985,7 +2985,7 @@ async def test_recursive_automation_restart_mode( ], "action": [ {"event": "trigger_automation"}, - {"service": "test.automation_done"}, + {"action": "test.automation_done"}, ], } }, @@ -3021,7 +3021,7 @@ async def test_websocket_config( config = { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "data": 100}, + "action": {"action": "test.automation", "data": 100}, } assert await async_setup_component( hass, automation.DOMAIN, {automation.DOMAIN: config} @@ -3095,7 +3095,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non "from": "on", }, "action": { - "service": "automation.turn_off", + "action": "automation.turn_off", "target": { "entity_id": "automation.automation_1", }, @@ -3118,7 +3118,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non }, }, "action": { - "service": "persistent_notification.create", + "action": "persistent_notification.create", "metadata": {}, "data": { "message": "Test race", @@ -3185,7 +3185,7 @@ async def test_two_automations_call_restart_script_same_time( "fire_toggle": { "sequence": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test_1"}, } ] @@ -3206,7 +3206,7 @@ async def test_two_automations_call_restart_script_same_time( "to": "on", }, "action": { - "service": "script.fire_toggle", + "action": "script.fire_toggle", }, "id": "automation_0", "mode": "single", @@ -3218,7 +3218,7 @@ async def test_two_automations_call_restart_script_same_time( "to": "on", }, "action": { - "service": "script.fire_toggle", + "action": "script.fire_toggle", }, "id": "automation_1", "mode": "single", @@ -3229,6 +3229,7 @@ async def test_two_automations_call_restart_script_same_time( hass.states.async_set("binary_sensor.presence", "on") await hass.async_block_till_done() + await hass.async_block_till_done() assert len(events) == 2 cancel() @@ -3300,3 +3301,29 @@ async def test_two_automation_call_restart_script_right_after_each_other( hass.states.async_set("input_boolean.test_2", "on") await hass.async_block_till_done() assert len(events) == 1 + + +async def test_action_service_backward_compatibility( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: + """Test we can still use the service call method.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": { + "service": "test.automation", + "entity_id": "hello.world", + "data": {"event": "{{ trigger.event.event_type }}"}, + }, + } + }, + ) + + hass.bus.async_fire("test_event") + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] + assert calls[0].data.get("event") == "test_event" diff --git a/tests/components/automation/test_recorder.py b/tests/components/automation/test_recorder.py index fc45e6aee5b..be354abe9d2 100644 --- a/tests/components/automation/test_recorder.py +++ b/tests/components/automation/test_recorder.py @@ -40,7 +40,7 @@ async def test_exclude_attributes( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -48,7 +48,7 @@ async def test_exclude_attributes( hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 - assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] await async_wait_recording_done(hass) states = await hass.async_add_executor_job( diff --git a/tests/components/aws/test_init.py b/tests/components/aws/test_init.py index 9589ad6c037..820b08e51b4 100644 --- a/tests/components/aws/test_init.py +++ b/tests/components/aws/test_init.py @@ -1,6 +1,7 @@ """Tests for the aws component config and setup.""" import json +from typing import Any from unittest.mock import AsyncMock, MagicMock, call, patch as async_patch from homeassistant.core import HomeAssistant @@ -10,7 +11,7 @@ from homeassistant.setup import async_setup_component class MockAioSession: """Mock AioSession.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init a mock session.""" self.get_user = AsyncMock() self.invoke = AsyncMock() diff --git a/tests/components/axis/conftest.py b/tests/components/axis/conftest.py index b306e25c434..c3377c15955 100644 --- a/tests/components/axis/conftest.py +++ b/tests/components/axis/conftest.py @@ -2,19 +2,17 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Coroutine, Generator from copy import deepcopy from types import MappingProxyType -from typing import Any +from typing import Any, Protocol from unittest.mock import AsyncMock, patch from axis.rtsp import Signal, State import pytest import respx -from typing_extensions import Generator from homeassistant.components.axis.const import DOMAIN as AXIS_DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MODEL, @@ -48,9 +46,33 @@ from .const import ( from tests.common import MockConfigEntry +type ConfigEntryFactoryType = Callable[[], Coroutine[Any, Any, MockConfigEntry]] +type RtspStateType = Callable[[bool], None] -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: + +class RtspEventMock(Protocol): + """Fixture to allow mocking received RTSP events.""" + + def __call__( + self, + topic: str, + data_type: str, + data_value: str, + operation: str = "Initialized", + source_name: str = "", + source_idx: str = "", + ) -> None: + """Send RTSP event.""" + + +class _RtspClientMock(Protocol): + async def __call__( + self, data: dict[str, Any] | None = None, state: str = "" + ) -> None: ... + + +@pytest.fixture(name="mock_setup_entry") +def fixture_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.axis.async_setup_entry", return_value=True @@ -62,14 +84,13 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, +def fixture_config_entry( config_entry_data: MappingProxyType[str, Any], config_entry_options: MappingProxyType[str, Any], config_entry_version: int, -) -> ConfigEntry: +) -> MockConfigEntry: """Define a config entry fixture.""" - config_entry = MockConfigEntry( + return MockConfigEntry( domain=AXIS_DOMAIN, entry_id="676abe5b73621446e6550a2e86ffe3dd", unique_id=FORMATTED_MAC, @@ -77,18 +98,16 @@ def config_entry_fixture( options=config_entry_options, version=config_entry_version, ) - config_entry.add_to_hass(hass) - return config_entry @pytest.fixture(name="config_entry_version") -def config_entry_version_fixture() -> int: +def fixture_config_entry_version() -> int: """Define a config entry version fixture.""" return 3 @pytest.fixture(name="config_entry_data") -def config_entry_data_fixture() -> MappingProxyType[str, Any]: +def fixture_config_entry_data() -> MappingProxyType[str, Any]: """Define a config entry data fixture.""" return { CONF_HOST: DEFAULT_HOST, @@ -101,7 +120,7 @@ def config_entry_data_fixture() -> MappingProxyType[str, Any]: @pytest.fixture(name="config_entry_options") -def config_entry_options_fixture() -> MappingProxyType[str, Any]: +def fixture_config_entry_options() -> MappingProxyType[str, Any]: """Define a config entry options fixture.""" return {} @@ -109,8 +128,15 @@ def config_entry_options_fixture() -> MappingProxyType[str, Any]: # Axis API fixtures -@pytest.fixture(name="mock_vapix_requests") -def default_request_fixture( +@pytest.fixture(autouse=True) +def reset_mock_requests() -> Generator[None]: + """Reset respx mock routes after the test.""" + yield + respx.mock.clear() + + +@pytest.fixture(name="mock_requests") +def fixture_request( respx_mock: respx.MockRouter, port_management_payload: dict[str, Any], param_properties_payload: str, @@ -215,7 +241,7 @@ def api_discovery_items() -> dict[str, Any]: @pytest.fixture(autouse=True) -def api_discovery_fixture(api_discovery_items: dict[str, Any]) -> None: +def fixture_api_discovery(api_discovery_items: dict[str, Any]) -> None: """Apidiscovery mock response.""" data = deepcopy(API_DISCOVERY_RESPONSE) if api_discovery_items: @@ -224,64 +250,66 @@ def api_discovery_fixture(api_discovery_items: dict[str, Any]) -> None: @pytest.fixture(name="port_management_payload") -def io_port_management_data_fixture() -> dict[str, Any]: +def fixture_io_port_management_data() -> dict[str, Any]: """Property parameter data.""" return PORT_MANAGEMENT_RESPONSE @pytest.fixture(name="param_properties_payload") -def param_properties_data_fixture() -> str: +def fixture_param_properties_data() -> str: """Property parameter data.""" return PROPERTIES_RESPONSE @pytest.fixture(name="param_ports_payload") -def param_ports_data_fixture() -> str: +def fixture_param_ports_data() -> str: """Property parameter data.""" return PORTS_RESPONSE @pytest.fixture(name="mqtt_status_code") -def mqtt_status_code_fixture() -> int: +def fixture_mqtt_status_code() -> int: """Property parameter data.""" return 200 -@pytest.fixture(name="setup_default_vapix_requests") -def default_vapix_requests_fixture(mock_vapix_requests: Callable[[str], None]) -> None: +@pytest.fixture(name="mock_default_requests") +def fixture_default_requests(mock_requests: Callable[[str], None]) -> None: """Mock default Vapix requests responses.""" - mock_vapix_requests(DEFAULT_HOST) + mock_requests(DEFAULT_HOST) -@pytest.fixture(name="prepare_config_entry") -async def prep_config_entry_fixture( - hass: HomeAssistant, config_entry: ConfigEntry, setup_default_vapix_requests: None -) -> Callable[[], ConfigEntry]: +@pytest.fixture(name="config_entry_factory") +async def fixture_config_entry_factory( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_requests: Callable[[str], None], +) -> ConfigEntryFactoryType: """Fixture factory to set up Axis network device.""" - async def __mock_setup_config_entry() -> ConfigEntry: - assert await hass.config_entries.async_setup(config_entry.entry_id) + async def __mock_setup_config_entry() -> MockConfigEntry: + config_entry.add_to_hass(hass) + mock_requests(config_entry.data[CONF_HOST]) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() return config_entry return __mock_setup_config_entry -@pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture( - hass: HomeAssistant, config_entry: ConfigEntry, setup_default_vapix_requests: None -) -> ConfigEntry: +@pytest.fixture(name="config_entry_setup") +async def fixture_config_entry_setup( + config_entry_factory: ConfigEntryFactoryType, +) -> MockConfigEntry: """Define a fixture to set up Axis network device.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - return config_entry + return await config_entry_factory() # RTSP fixtures -@pytest.fixture(autouse=True) -def mock_axis_rtspclient() -> Generator[Callable[[dict | None, str], None]]: +@pytest.fixture(autouse=True, name="_mock_rtsp_client") +def fixture_axis_rtsp_client() -> Generator[_RtspClientMock]: """No real RTSP communication allowed.""" with patch("axis.stream_manager.RTSPClient") as rtsp_client_mock: rtsp_client_mock.return_value.session.state = State.STOPPED @@ -298,7 +326,7 @@ def mock_axis_rtspclient() -> Generator[Callable[[dict | None, str], None]]: rtsp_client_mock.return_value.stop = stop_stream - def make_rtsp_call(data: dict | None = None, state: str = "") -> None: + def make_rtsp_call(data: dict[str, Any] | None = None, state: str = "") -> None: """Generate a RTSP call.""" axis_streammanager_session_callback = rtsp_client_mock.call_args[0][4] @@ -313,10 +341,8 @@ def mock_axis_rtspclient() -> Generator[Callable[[dict | None, str], None]]: yield make_rtsp_call -@pytest.fixture(autouse=True) -def mock_rtsp_event( - mock_axis_rtspclient: Callable[[dict | None, str], None], -) -> Callable[[str, str, str, str, str, str], None]: +@pytest.fixture(autouse=True, name="mock_rtsp_event") +def fixture_rtsp_event(_mock_rtsp_client: _RtspClientMock) -> RtspEventMock: """Fixture to allow mocking received RTSP events.""" def send_event( @@ -361,20 +387,18 @@ def mock_rtsp_event( """ - mock_axis_rtspclient(data=event.encode("utf-8")) + _mock_rtsp_client(data=event.encode("utf-8")) return send_event -@pytest.fixture(autouse=True) -def mock_rtsp_signal_state( - mock_axis_rtspclient: Callable[[dict | None, str], None], -) -> Callable[[bool], None]: +@pytest.fixture(autouse=True, name="mock_rtsp_signal_state") +def fixture_rtsp_signal_state(_mock_rtsp_client: _RtspClientMock) -> RtspStateType: """Fixture to allow mocking RTSP state signalling.""" def send_signal(connected: bool) -> None: """Signal state change of RTSP connection.""" signal = Signal.PLAYING if connected else Signal.FAILED - mock_axis_rtspclient(state=signal) + _mock_rtsp_client(state=signal) return send_signal diff --git a/tests/components/axis/const.py b/tests/components/axis/const.py index 16b9d17f99e..2efb464efd7 100644 --- a/tests/components/axis/const.py +++ b/tests/components/axis/const.py @@ -4,8 +4,8 @@ from axis.models.api import CONTEXT MAC = "00408C123456" FORMATTED_MAC = "00:40:8c:12:34:56" -MODEL = "model" -NAME = "name" +MODEL = "A1234" +NAME = "home" DEFAULT_HOST = "1.2.3.4" diff --git a/tests/components/axis/snapshots/test_binary_sensor.ambr b/tests/components/axis/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..ab860489d55 --- /dev/null +++ b/tests/components/axis/snapshots/test_binary_sensor.ambr @@ -0,0 +1,518 @@ +# serializer version: 1 +# name: test_binary_sensors[event0][binary_sensor.home_daynight_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_daynight_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DayNight 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:VideoSource/tnsaxis:DayNightVision-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event0][binary_sensor.home_daynight_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'light', + 'friendly_name': 'home DayNight 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_daynight_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event10][binary_sensor.home_object_analytics_device1scenario8-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Object Analytics Device1Scenario8', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario8-Device1Scenario8', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event10][binary_sensor.home_object_analytics_device1scenario8-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Object Analytics Device1Scenario8', + }), + 'context': , + 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event1][binary_sensor.home_sound_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_sound_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sound 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:AudioSource/tnsaxis:TriggerLevel-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event1][binary_sensor.home_sound_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'sound', + 'friendly_name': 'home Sound 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_sound_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event2][binary_sensor.home_pir_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_pir_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PIR sensor', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:IO/Port-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event2][binary_sensor.home_pir_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'home PIR sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.home_pir_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event3][binary_sensor.home_pir_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_pir_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PIR 0', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:Sensor/PIR-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event3][binary_sensor.home_pir_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home PIR 0', + }), + 'context': , + 'entity_id': 'binary_sensor.home_pir_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event4][binary_sensor.home_fence_guard_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_fence_guard_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fence Guard Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/FenceGuard/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event4][binary_sensor.home_fence_guard_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Fence Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_fence_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event5][binary_sensor.home_motion_guard_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_motion_guard_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion Guard Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/MotionGuard/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event5][binary_sensor.home_motion_guard_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Motion Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_motion_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event6][binary_sensor.home_loitering_guard_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Loitering Guard Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/LoiteringGuard/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event6][binary_sensor.home_loitering_guard_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Loitering Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event7][binary_sensor.home_vmd4_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_vmd4_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VMD4 Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event7][binary_sensor.home_vmd4_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home VMD4 Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_vmd4_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event8][binary_sensor.home_object_analytics_scenario_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Object Analytics Scenario 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario1-Device1Scenario1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event8][binary_sensor.home_object_analytics_scenario_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Object Analytics Scenario 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event9][binary_sensor.home_vmd4_camera1profile9-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VMD4 Camera1Profile9', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile9-Camera1Profile9', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event9][binary_sensor.home_vmd4_camera1profile9-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home VMD4 Camera1Profile9', + }), + 'context': , + 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/axis/snapshots/test_diagnostics.ambr b/tests/components/axis/snapshots/test_diagnostics.ambr index 8ea316d00cf..3a643f55d3e 100644 --- a/tests/components/axis/snapshots/test_diagnostics.ambr +++ b/tests/components/axis/snapshots/test_diagnostics.ambr @@ -30,8 +30,8 @@ 'config': dict({ 'data': dict({ 'host': '1.2.3.4', - 'model': 'model', - 'name': 'name', + 'model': 'A1234', + 'name': 'home', 'password': '**REDACTED**', 'port': 80, 'username': '**REDACTED**', diff --git a/tests/components/axis/snapshots/test_hub.ambr b/tests/components/axis/snapshots/test_hub.ambr new file mode 100644 index 00000000000..16579287f09 --- /dev/null +++ b/tests/components/axis/snapshots/test_hub.ambr @@ -0,0 +1,73 @@ +# serializer version: 1 +# name: test_device_registry_entry[api_discovery_items0] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://1.2.3.4:80', + 'connections': set({ + tuple( + 'mac', + '00:40:8c:12:34:56', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'axis', + '00:40:8c:12:34:56', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Axis Communications AB', + 'model': 'A1234 Network Camera', + 'model_id': None, + 'name': 'home', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '00:40:8c:12:34:56', + 'suggested_area': None, + 'sw_version': '9.10.1', + 'via_device_id': None, + }) +# --- +# name: test_device_registry_entry[api_discovery_items1] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://1.2.3.4:80', + 'connections': set({ + tuple( + 'mac', + '00:40:8c:12:34:56', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'axis', + '00:40:8c:12:34:56', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Axis Communications AB', + 'model': 'A1234 Network Camera', + 'model_id': None, + 'name': 'home', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '00:40:8c:12:34:56', + 'suggested_area': None, + 'sw_version': '9.80.1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/axis/snapshots/test_light.ambr b/tests/components/axis/snapshots/test_light.ambr new file mode 100644 index 00000000000..b37da39fe27 --- /dev/null +++ b/tests/components/axis/snapshots/test_light.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_lights[api_discovery_items0][light.home_ir_light_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.home_ir_light_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IR Light 0', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:Light/Status-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[api_discovery_items0][light.home_ir_light_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 170, + 'color_mode': , + 'friendly_name': 'home IR Light 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.home_ir_light_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/axis/snapshots/test_switch.ambr b/tests/components/axis/snapshots/test_switch.ambr new file mode 100644 index 00000000000..dc4c75371cf --- /dev/null +++ b/tests/components/axis/snapshots/test_switch.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_doorbell-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_doorbell', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Doorbell', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_doorbell-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Doorbell', + }), + 'context': , + 'entity_id': 'switch.home_doorbell', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_relay_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_relay_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Relay 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_relay_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Relay 1', + }), + 'context': , + 'entity_id': 'switch.home_relay_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_doorbell-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_doorbell', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Doorbell', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_doorbell-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Doorbell', + }), + 'context': , + 'entity_id': 'switch.home_doorbell', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_relay_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_relay_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Relay 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_relay_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Relay 1', + }), + 'context': , + 'entity_id': 'switch.home_relay_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/axis/test_binary_sensor.py b/tests/components/axis/test_binary_sensor.py index 99a530724e3..a1cf1e129d5 100644 --- a/tests/components/axis/test_binary_sensor.py +++ b/tests/components/axis/test_binary_sensor.py @@ -1,22 +1,22 @@ """Axis binary sensor platform tests.""" -from collections.abc import Callable -from typing import Any +from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.binary_sensor import ( - DOMAIN as BINARY_SENSOR_DOMAIN, - BinarySensorDeviceClass, -) -from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .const import NAME +from .conftest import ConfigEntryFactoryType, RtspEventMock + +from tests.common import snapshot_platform @pytest.mark.parametrize( - ("event", "entity"), + "event", [ ( { @@ -25,13 +25,7 @@ from .const import NAME "source_idx": "1", "data_type": "DayNight", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_daynight_1", - "state": STATE_ON, - "name": f"{NAME} DayNight 1", - "device_class": BinarySensorDeviceClass.LIGHT, - }, + } ), ( { @@ -40,13 +34,7 @@ from .const import NAME "source_idx": "1", "data_type": "Sound", "data_value": "0", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_sound_1", - "state": STATE_OFF, - "name": f"{NAME} Sound 1", - "device_class": BinarySensorDeviceClass.SOUND, - }, + } ), ( { @@ -56,13 +44,7 @@ from .const import NAME "operation": "Initialized", "source_name": "port", "source_idx": "0", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_pir_sensor", - "state": STATE_OFF, - "name": f"{NAME} PIR sensor", - "device_class": BinarySensorDeviceClass.CONNECTIVITY, - }, + } ), ( { @@ -71,78 +53,42 @@ from .const import NAME "data_value": "0", "source_name": "sensor", "source_idx": "0", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_pir_0", - "state": STATE_OFF, - "name": f"{NAME} PIR 0", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/FenceGuard/Camera1Profile1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_fence_guard_profile_1", - "state": STATE_ON, - "name": f"{NAME} Fence Guard Profile 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/MotionGuard/Camera1Profile1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_motion_guard_profile_1", - "state": STATE_ON, - "name": f"{NAME} Motion Guard Profile 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/LoiteringGuard/Camera1Profile1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_loitering_guard_profile_1", - "state": STATE_ON, - "name": f"{NAME} Loitering Guard Profile 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_vmd4_profile_1", - "state": STATE_ON, - "name": f"{NAME} VMD4 Profile 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_object_analytics_scenario_1", - "state": STATE_ON, - "name": f"{NAME} Object Analytics Scenario 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), # Events with names generated from event ID and topic ( @@ -150,50 +96,35 @@ from .const import NAME "topic": "tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile9", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_vmd4_camera1profile9", - "state": STATE_ON, - "name": f"{NAME} VMD4 Camera1Profile9", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario8", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_object_analytics_device1scenario8", - "state": STATE_ON, - "name": f"{NAME} Object Analytics Device1Scenario8", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ], ) -@pytest.mark.usefixtures("setup_config_entry") async def test_binary_sensors( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + config_entry_factory: ConfigEntryFactoryType, + mock_rtsp_event: RtspEventMock, event: dict[str, str], - entity: dict[str, Any], ) -> None: """Test that sensors are loaded properly.""" + with patch("homeassistant.components.axis.PLATFORMS", [Platform.BINARY_SENSOR]): + config_entry = await config_entry_factory() mock_rtsp_event(**event) - await hass.async_block_till_done() - assert len(hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN)) == 1 - - state = hass.states.get(entity["id"]) - assert state.state == entity["state"] - assert state.name == entity["name"] - assert state.attributes["device_class"] == entity["device_class"] + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) @pytest.mark.parametrize( - ("event"), + "event", [ # Event with unsupported topic { @@ -225,13 +156,12 @@ async def test_binary_sensors( }, ], ) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_unsupported_events( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + mock_rtsp_event: RtspEventMock, event: dict[str, str], ) -> None: """Validate nothing breaks with unsupported events.""" mock_rtsp_event(**event) - await hass.async_block_till_done() assert len(hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN)) == 0 diff --git a/tests/components/axis/test_camera.py b/tests/components/axis/test_camera.py index 7d26cc7a3bc..00fe4391b0c 100644 --- a/tests/components/axis/test_camera.py +++ b/tests/components/axis/test_camera.py @@ -1,36 +1,17 @@ """Axis camera platform tests.""" -from collections.abc import Callable - import pytest from homeassistant.components import camera -from homeassistant.components.axis.const import ( - CONF_STREAM_PROFILE, - DOMAIN as AXIS_DOMAIN, -) +from homeassistant.components.axis.const import CONF_STREAM_PROFILE from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_IDLE from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .const import MAC, NAME -async def test_platform_manually_configured(hass: HomeAssistant) -> None: - """Test that nothing happens when platform is manually configured.""" - assert ( - await async_setup_component( - hass, CAMERA_DOMAIN, {CAMERA_DOMAIN: {"platform": AXIS_DOMAIN}} - ) - is True - ) - - assert AXIS_DOMAIN not in hass.data - - -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_camera(hass: HomeAssistant) -> None: """Test that Axis camera platform is loaded properly.""" assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 @@ -51,7 +32,7 @@ async def test_camera(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_entry_options", [{CONF_STREAM_PROFILE: "profile_1"}]) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_camera_with_stream_profile(hass: HomeAssistant) -> None: """Test that Axis camera entity is using the correct path with stream profike.""" assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 @@ -82,13 +63,11 @@ root.Properties.Firmware.BuildDate=Feb 15 2019 09:42 root.Properties.Firmware.BuildNumber=26 root.Properties.Firmware.Version=9.10.1 root.Properties.System.SerialNumber={MAC} -""" +""" # No image format data to signal camera support @pytest.mark.parametrize("param_properties_payload", [PROPERTY_DATA]) -async def test_camera_disabled( - hass: HomeAssistant, prepare_config_entry: Callable[[], ConfigEntry] -) -> None: +@pytest.mark.usefixtures("config_entry_setup") +async def test_camera_disabled(hass: HomeAssistant) -> None: """Test that Axis camera platform is loaded properly but does not create camera entity.""" - await prepare_config_entry() assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 0 diff --git a/tests/components/axis/test_config_flow.py b/tests/components/axis/test_config_flow.py index 055c74cc9a5..5ceb6588fbd 100644 --- a/tests/components/axis/test_config_flow.py +++ b/tests/components/axis/test_config_flow.py @@ -2,7 +2,7 @@ from collections.abc import Callable from ipaddress import ip_address -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest @@ -17,13 +17,11 @@ from homeassistant.components.axis.const import ( ) from homeassistant.config_entries import ( SOURCE_DHCP, - SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER, SOURCE_ZEROCONF, - ConfigEntry, ) from homeassistant.const import ( CONF_HOST, @@ -45,21 +43,9 @@ from tests.common import MockConfigEntry DHCP_FORMATTED_MAC = dr.format_mac(MAC).replace(":", "") -@pytest.fixture(name="mock_config_entry") -async def mock_config_entry_fixture( - hass: HomeAssistant, config_entry: MockConfigEntry, mock_setup_entry: AsyncMock -) -> MockConfigEntry: - """Mock config entry and setup entry.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - return config_entry - - -@pytest.mark.usefixtures("setup_default_vapix_requests", "mock_setup_entry") +@pytest.mark.usefixtures("mock_default_requests") async def test_flow_manual_configuration(hass: HomeAssistant) -> None: """Test that config flow works.""" - MockConfigEntry(domain=AXIS_DOMAIN, source=SOURCE_IGNORE).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_USER} ) @@ -93,11 +79,11 @@ async def test_flow_manual_configuration(hass: HomeAssistant) -> None: async def test_manual_configuration_update_configuration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], ) -> None: """Test that config flow fails on already configured device.""" - assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_USER} @@ -106,7 +92,7 @@ async def test_manual_configuration_update_configuration( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -121,10 +107,19 @@ async def test_manual_configuration_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" -async def test_flow_fails_faulty_credentials(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("exc", "error"), + [ + (config_flow.AuthenticationRequired, "invalid_auth"), + (config_flow.CannotConnect, "cannot_connect"), + ], +) +async def test_flow_fails_on_api( + hass: HomeAssistant, exc: Exception, error: str +) -> None: """Test that config flow fails on faulty credentials.""" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_USER} @@ -135,7 +130,7 @@ async def test_flow_fails_faulty_credentials(hass: HomeAssistant) -> None: with patch( "homeassistant.components.axis.config_flow.get_axis_api", - side_effect=config_flow.AuthenticationRequired, + side_effect=exc, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -148,37 +143,10 @@ async def test_flow_fails_faulty_credentials(hass: HomeAssistant) -> None: }, ) - assert result["errors"] == {"base": "invalid_auth"} + assert result["errors"] == {"base": error} -async def test_flow_fails_cannot_connect(hass: HomeAssistant) -> None: - """Test that config flow fails on cannot connect.""" - result = await hass.config_entries.flow.async_init( - AXIS_DOMAIN, context={"source": SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - with patch( - "homeassistant.components.axis.config_flow.get_axis_api", - side_effect=config_flow.CannotConnect, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PROTOCOL: "http", - CONF_HOST: "1.2.3.4", - CONF_USERNAME: "user", - CONF_PASSWORD: "pass", - CONF_PORT: 80, - }, - ) - - assert result["errors"] == {"base": "cannot_connect"} - - -@pytest.mark.usefixtures("setup_default_vapix_requests", "mock_setup_entry") +@pytest.mark.usefixtures("mock_default_requests") async def test_flow_create_entry_multiple_existing_entries_of_same_model( hass: HomeAssistant, ) -> None: @@ -229,24 +197,24 @@ async def test_flow_create_entry_multiple_existing_entries_of_same_model( async def test_reauth_flow_update_configuration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], ) -> None: """Test that config flow fails on already configured device.""" - assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" - assert mock_config_entry.data[CONF_USERNAME] == "root" - assert mock_config_entry.data[CONF_PASSWORD] == "pass" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_USERNAME] == "root" + assert config_entry_setup.data[CONF_PASSWORD] == "pass" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_REAUTH}, - data=mock_config_entry.data, + data=config_entry_setup.data, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -261,35 +229,35 @@ async def test_reauth_flow_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_PROTOCOL] == "https" - assert mock_config_entry.data[CONF_HOST] == "2.3.4.5" - assert mock_config_entry.data[CONF_PORT] == 443 - assert mock_config_entry.data[CONF_USERNAME] == "user2" - assert mock_config_entry.data[CONF_PASSWORD] == "pass2" + assert config_entry_setup.data[CONF_PROTOCOL] == "https" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_PORT] == 443 + assert config_entry_setup.data[CONF_USERNAME] == "user2" + assert config_entry_setup.data[CONF_PASSWORD] == "pass2" async def test_reconfiguration_flow_update_configuration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], ) -> None: """Test that config flow reconfiguration updates configured device.""" - assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" - assert mock_config_entry.data[CONF_USERNAME] == "root" - assert mock_config_entry.data[CONF_PASSWORD] == "pass" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_USERNAME] == "root" + assert config_entry_setup.data[CONF_PASSWORD] == "pass" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={ "source": SOURCE_RECONFIGURE, - "entry_id": mock_config_entry.entry_id, + "entry_id": config_entry_setup.entry_id, }, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -301,11 +269,11 @@ async def test_reconfiguration_flow_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_PROTOCOL] == "http" - assert mock_config_entry.data[CONF_HOST] == "2.3.4.5" - assert mock_config_entry.data[CONF_PORT] == 80 - assert mock_config_entry.data[CONF_USERNAME] == "user" - assert mock_config_entry.data[CONF_PASSWORD] == "pass" + assert config_entry_setup.data[CONF_PROTOCOL] == "http" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_PORT] == 80 + assert config_entry_setup.data[CONF_USERNAME] == "user" + assert config_entry_setup.data[CONF_PASSWORD] == "pass" @pytest.mark.parametrize( @@ -372,7 +340,7 @@ async def test_reconfiguration_flow_update_configuration( ), ], ) -@pytest.mark.usefixtures("setup_default_vapix_requests", "mock_setup_entry") +@pytest.mark.usefixtures("mock_default_requests") async def test_discovery_flow( hass: HomeAssistant, source: str, @@ -455,12 +423,12 @@ async def test_discovery_flow( ) async def test_discovered_device_already_configured( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + config_entry_setup: MockConfigEntry, source: str, discovery_info: BaseServiceInfo, ) -> None: """Test that discovery doesn't setup already configured devices.""" - assert mock_config_entry.data[CONF_HOST] == DEFAULT_HOST + assert config_entry_setup.data[CONF_HOST] == DEFAULT_HOST result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, data=discovery_info, context={"source": source} @@ -468,7 +436,7 @@ async def test_discovered_device_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == DEFAULT_HOST + assert config_entry_setup.data[CONF_HOST] == DEFAULT_HOST @pytest.mark.parametrize( @@ -513,14 +481,14 @@ async def test_discovered_device_already_configured( ) async def test_discovery_flow_updated_configuration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], source: str, discovery_info: BaseServiceInfo, expected_port: int, ) -> None: """Test that discovery flow update configuration with new parameters.""" - assert mock_config_entry.data == { + assert config_entry_setup.data == { CONF_HOST: DEFAULT_HOST, CONF_PORT: 80, CONF_USERNAME: "root", @@ -529,7 +497,7 @@ async def test_discovery_flow_updated_configuration( CONF_NAME: NAME, } - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, data=discovery_info, context={"source": source} ) @@ -537,7 +505,7 @@ async def test_discovery_flow_updated_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data == { + assert config_entry_setup.data == { CONF_HOST: "2.3.4.5", CONF_PORT: expected_port, CONF_USERNAME: "root", @@ -646,13 +614,13 @@ async def test_discovery_flow_ignore_link_local_address( async def test_option_flow( - hass: HomeAssistant, setup_config_entry: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test config flow options.""" - assert CONF_STREAM_PROFILE not in setup_config_entry.options - assert CONF_VIDEO_SOURCE not in setup_config_entry.options + assert CONF_STREAM_PROFILE not in config_entry_setup.options + assert CONF_VIDEO_SOURCE not in config_entry_setup.options - result = await hass.config_entries.options.async_init(setup_config_entry.entry_id) + result = await hass.config_entries.options.async_init(config_entry_setup.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_stream" @@ -676,5 +644,5 @@ async def test_option_flow( CONF_STREAM_PROFILE: "profile_1", CONF_VIDEO_SOURCE: 1, } - assert setup_config_entry.options[CONF_STREAM_PROFILE] == "profile_1" - assert setup_config_entry.options[CONF_VIDEO_SOURCE] == 1 + assert config_entry_setup.options[CONF_STREAM_PROFILE] == "profile_1" + assert config_entry_setup.options[CONF_VIDEO_SOURCE] == 1 diff --git a/tests/components/axis/test_diagnostics.py b/tests/components/axis/test_diagnostics.py index c3e1faf4277..e96ba88c2cd 100644 --- a/tests/components/axis/test_diagnostics.py +++ b/tests/components/axis/test_diagnostics.py @@ -2,12 +2,13 @@ import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from .const import API_DISCOVERY_BASIC_DEVICE_INFO +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -16,11 +17,10 @@ from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup_config_entry: ConfigEntry, + config_entry_setup: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, setup_config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry_setup + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/axis/test_hub.py b/tests/components/axis/test_hub.py index fb0a28bb262..74cdb0164cd 100644 --- a/tests/components/axis/test_hub.py +++ b/tests/components/axis/test_hub.py @@ -5,27 +5,21 @@ from ipaddress import ip_address from types import MappingProxyType from typing import Any from unittest import mock -from unittest.mock import ANY, AsyncMock, Mock, call, patch +from unittest.mock import ANY, Mock, call, patch import axis as axislib import pytest -from typing_extensions import Generator +from syrupy import SnapshotAssertion from homeassistant.components import axis, zeroconf from homeassistant.components.axis.const import DOMAIN as AXIS_DOMAIN from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigEntry -from homeassistant.const import ( - CONF_HOST, - CONF_MODEL, - CONF_NAME, - STATE_OFF, - STATE_ON, - STATE_UNAVAILABLE, -) +from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigEntryState +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from .conftest import RtspEventMock, RtspStateType from .const import ( API_DISCOVERY_BASIC_DEVICE_INFO, API_DISCOVERY_MQTT, @@ -34,62 +28,27 @@ from .const import ( NAME, ) -from tests.common import async_fire_mqtt_message +from tests.common import MockConfigEntry, async_fire_mqtt_message from tests.typing import MqttMockHAClient -@pytest.fixture(name="forward_entry_setups") -def hass_mock_forward_entry_setup(hass: HomeAssistant) -> Generator[AsyncMock]: - """Mock async_forward_entry_setups.""" - with patch.object( - hass.config_entries, "async_forward_entry_setups" - ) as forward_mock: - yield forward_mock - - -async def test_device_setup( - forward_entry_setups: AsyncMock, - config_entry_data: MappingProxyType[str, Any], - setup_config_entry: ConfigEntry, +@pytest.mark.parametrize( + "api_discovery_items", [({}), (API_DISCOVERY_BASIC_DEVICE_INFO)] +) +async def test_device_registry_entry( + config_entry_setup: MockConfigEntry, device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, ) -> None: """Successful setup.""" - hub = setup_config_entry.runtime_data - - assert hub.api.vapix.firmware_version == "9.10.1" - assert hub.api.vapix.product_number == "M1065-LW" - assert hub.api.vapix.product_type == "Network Camera" - assert hub.api.vapix.serial_number == "00408C123456" - - assert len(forward_entry_setups.mock_calls) == 1 - platforms = set(forward_entry_setups.mock_calls[0][1][1]) - assert platforms == {"binary_sensor", "camera", "light", "switch"} - - assert hub.config.host == config_entry_data[CONF_HOST] - assert hub.config.model == config_entry_data[CONF_MODEL] - assert hub.config.name == config_entry_data[CONF_NAME] - assert hub.unique_id == FORMATTED_MAC - device_entry = device_registry.async_get_device( - identifiers={(AXIS_DOMAIN, hub.unique_id)} + identifiers={(AXIS_DOMAIN, config_entry_setup.unique_id)} ) - - assert device_entry.configuration_url == hub.api.config.url - - -@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_BASIC_DEVICE_INFO]) -async def test_device_info(setup_config_entry: ConfigEntry) -> None: - """Verify other path of device information works.""" - hub = setup_config_entry.runtime_data - - assert hub.api.vapix.firmware_version == "9.80.1" - assert hub.api.vapix.product_number == "M1065-LW" - assert hub.api.vapix.product_type == "Network Camera" - assert hub.api.vapix.serial_number == "00408C123456" + assert device_entry == snapshot @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_MQTT]) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_device_support_mqtt( hass: HomeAssistant, mqtt_mock: MqttMockHAClient ) -> None: @@ -115,7 +74,7 @@ async def test_device_support_mqtt( @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_MQTT]) @pytest.mark.parametrize("mqtt_status_code", [401]) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_device_support_mqtt_low_privilege(mqtt_mock: MqttMockHAClient) -> None: """Successful setup.""" mqtt_call = call(f"{MAC}/#", mock.ANY, 0, "utf-8") @@ -124,14 +83,14 @@ async def test_device_support_mqtt_low_privilege(mqtt_mock: MqttMockHAClient) -> async def test_update_address( hass: HomeAssistant, - setup_config_entry: ConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], ) -> None: """Test update address works.""" - hub = setup_config_entry.runtime_data + hub = config_entry_setup.runtime_data assert hub.api.config.host == "1.2.3.4" - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") await hass.config_entries.flow.async_init( AXIS_DOMAIN, data=zeroconf.ZeroconfServiceInfo( @@ -150,11 +109,11 @@ async def test_update_address( assert hub.api.config.host == "2.3.4.5" -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_device_unavailable( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], - mock_rtsp_signal_state: Callable[[bool], None], + mock_rtsp_event: RtspEventMock, + mock_rtsp_signal_state: RtspStateType, ) -> None: """Successful setup.""" # Provide an entity that can be used to verify connection state on @@ -187,22 +146,12 @@ async def test_device_unavailable( assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.{NAME}_sound_1").state == STATE_OFF -@pytest.mark.usefixtures("setup_default_vapix_requests") -async def test_device_not_accessible( - hass: HomeAssistant, config_entry: ConfigEntry -) -> None: - """Failed setup schedules a retry of setup.""" - with patch.object(axis, "get_axis_api", side_effect=axis.errors.CannotConnect): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert hass.data[AXIS_DOMAIN] == {} - - -@pytest.mark.usefixtures("setup_default_vapix_requests") +@pytest.mark.usefixtures("mock_default_requests") async def test_device_trigger_reauth_flow( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Failed authentication trigger a reauthentication flow.""" + config_entry.add_to_hass(hass) with ( patch.object( axis, "get_axis_api", side_effect=axis.errors.AuthenticationRequired @@ -212,18 +161,7 @@ async def test_device_trigger_reauth_flow( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() mock_flow_init.assert_called_once() - assert hass.data[AXIS_DOMAIN] == {} - - -@pytest.mark.usefixtures("setup_default_vapix_requests") -async def test_device_unknown_error( - hass: HomeAssistant, config_entry: ConfigEntry -) -> None: - """Unknown errors are handled.""" - with patch.object(axis, "get_axis_api", side_effect=Exception): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert hass.data[AXIS_DOMAIN] == {} + assert config_entry.state == ConfigEntryState.SETUP_ERROR async def test_shutdown(config_entry_data: MappingProxyType[str, Any]) -> None: @@ -241,36 +179,31 @@ async def test_shutdown(config_entry_data: MappingProxyType[str, Any]) -> None: assert len(axis_device.api.stream.stop.mock_calls) == 1 -async def test_get_device_fails( - hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any] +@pytest.mark.parametrize( + ("side_effect", "state"), + [ + # Device unauthorized yields authentication required error + (axislib.Unauthorized, ConfigEntryState.SETUP_ERROR), + # Device unavailable yields cannot connect error + (TimeoutError, ConfigEntryState.SETUP_RETRY), + (axislib.RequestError, ConfigEntryState.SETUP_RETRY), + # Device yield unknown error + (axislib.AxisException, ConfigEntryState.SETUP_ERROR), + ], +) +@pytest.mark.usefixtures("mock_default_requests") +async def test_get_axis_api_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + side_effect: Exception, + state: ConfigEntryState, ) -> None: - """Device unauthorized yields authentication required error.""" - with ( - patch( - "axis.interfaces.vapix.Vapix.initialize", side_effect=axislib.Unauthorized - ), - pytest.raises(axis.errors.AuthenticationRequired), + """Failed setup schedules a retry of setup.""" + config_entry.add_to_hass(hass) + with patch( + "homeassistant.components.axis.hub.api.axis.interfaces.vapix.Vapix.initialize", + side_effect=side_effect, ): - await axis.hub.get_axis_api(hass, config_entry_data) - - -async def test_get_device_device_unavailable( - hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any] -) -> None: - """Device unavailable yields cannot connect error.""" - with ( - patch("axis.interfaces.vapix.Vapix.request", side_effect=axislib.RequestError), - pytest.raises(axis.errors.CannotConnect), - ): - await axis.hub.get_axis_api(hass, config_entry_data) - - -async def test_get_device_unknown_error( - hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any] -) -> None: - """Device yield unknown error.""" - with ( - patch("axis.interfaces.vapix.Vapix.request", side_effect=axislib.AxisException), - pytest.raises(axis.errors.AuthenticationRequired), - ): - await axis.hub.get_axis_api(hass, config_entry_data) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state == state diff --git a/tests/components/axis/test_init.py b/tests/components/axis/test_init.py index e4dc7cd1eef..89737325440 100644 --- a/tests/components/axis/test_init.py +++ b/tests/components/axis/test_init.py @@ -5,19 +5,23 @@ from unittest.mock import AsyncMock, Mock, patch import pytest from homeassistant.components import axis -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry -async def test_setup_entry(setup_config_entry: ConfigEntry) -> None: + +async def test_setup_entry(config_entry_setup: MockConfigEntry) -> None: """Test successful setup of entry.""" - assert setup_config_entry.state is ConfigEntryState.LOADED + assert config_entry_setup.state is ConfigEntryState.LOADED async def test_setup_entry_fails( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Test successful setup of entry.""" + config_entry.add_to_hass(hass) + mock_device = Mock() mock_device.async_setup = AsyncMock(return_value=False) @@ -30,18 +34,21 @@ async def test_setup_entry_fails( async def test_unload_entry( - hass: HomeAssistant, setup_config_entry: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test successful unload of entry.""" - assert setup_config_entry.state is ConfigEntryState.LOADED + assert config_entry_setup.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(setup_config_entry.entry_id) - assert setup_config_entry.state is ConfigEntryState.NOT_LOADED + assert await hass.config_entries.async_unload(config_entry_setup.entry_id) + assert config_entry_setup.state is ConfigEntryState.NOT_LOADED @pytest.mark.parametrize("config_entry_version", [1]) -async def test_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def test_migrate_entry( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test successful migration of entry data.""" + config_entry.add_to_hass(hass) assert config_entry.version == 1 mock_device = Mock() diff --git a/tests/components/axis/test_light.py b/tests/components/axis/test_light.py index a5ae66afee0..c33af5ec3a4 100644 --- a/tests/components/axis/test_light.py +++ b/tests/components/axis/test_light.py @@ -1,12 +1,12 @@ """Axis light platform tests.""" -from collections.abc import Callable from typing import Any from unittest.mock import patch from axis.models.api import CONTEXT import pytest import respx +from syrupy import SnapshotAssertion from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN from homeassistant.const import ( @@ -14,12 +14,16 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, - STATE_ON, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from .conftest import ConfigEntryFactoryType, RtspEventMock from .const import DEFAULT_HOST, NAME +from tests.common import snapshot_platform + API_DISCOVERY_LIGHT_CONTROL = { "id": "light-control", "version": "1.1", @@ -69,10 +73,10 @@ def light_control_fixture(light_control_items: list[dict[str, Any]]) -> None: @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_LIGHT_CONTROL]) @pytest.mark.parametrize("light_control_items", [[]]) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_no_light_entity_without_light_control_representation( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + mock_rtsp_event: RtspEventMock, ) -> None: """Verify no lights entities get created without light control representation.""" mock_rtsp_event( @@ -88,10 +92,12 @@ async def test_no_light_entity_without_light_control_representation( @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_LIGHT_CONTROL]) -@pytest.mark.usefixtures("setup_config_entry") async def test_lights( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_rtsp_event: RtspEventMock, + snapshot: SnapshotAssertion, ) -> None: """Test that lights are loaded properly.""" # Add light @@ -128,6 +134,9 @@ async def test_lights( }, ) + with patch("homeassistant.components.axis.PLATFORMS", [Platform.LIGHT]): + config_entry = await config_entry_factory() + mock_rtsp_event( topic="tns1:Device/tnsaxis:Light/Status", data_type="state", @@ -136,15 +145,10 @@ async def test_lights( source_idx="0", ) await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids(LIGHT_DOMAIN)) == 1 + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) entity_id = f"{LIGHT_DOMAIN}.{NAME}_ir_light_0" - light_0 = hass.states.get(entity_id) - assert light_0.state == STATE_ON - assert light_0.name == f"{NAME} IR Light 0" - # Turn on, set brightness, light already on with ( patch("axis.interfaces.vapix.LightHandler.activate_light") as mock_activate, diff --git a/tests/components/axis/test_switch.py b/tests/components/axis/test_switch.py index 479830783b1..964cfdae64c 100644 --- a/tests/components/axis/test_switch.py +++ b/tests/components/axis/test_switch.py @@ -1,23 +1,27 @@ """Axis switch platform tests.""" -from collections.abc import Callable from unittest.mock import patch from axis.models.api import CONTEXT import pytest +from syrupy import SnapshotAssertion from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_OFF, STATE_ON, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from .conftest import ConfigEntryFactoryType, RtspEventMock from .const import API_DISCOVERY_PORT_MANAGEMENT, NAME +from tests.common import snapshot_platform + PORT_DATA = """root.IOPort.I0.Configurable=yes root.IOPort.I0.Direction=output root.IOPort.I0.Output.Name=Doorbell @@ -28,61 +32,6 @@ root.IOPort.I1.Output.Name= root.IOPort.I1.Output.Active=open """ - -@pytest.mark.parametrize("param_ports_payload", [PORT_DATA]) -@pytest.mark.usefixtures("setup_config_entry") -async def test_switches_with_port_cgi( - hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], -) -> None: - """Test that switches are loaded properly using port.cgi.""" - mock_rtsp_event( - topic="tns1:Device/Trigger/Relay", - data_type="LogicalState", - data_value="inactive", - source_name="RelayToken", - source_idx="0", - ) - mock_rtsp_event( - topic="tns1:Device/Trigger/Relay", - data_type="LogicalState", - data_value="active", - source_name="RelayToken", - source_idx="1", - ) - await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 - - relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1") - assert relay_1.state == STATE_ON - assert relay_1.name == f"{NAME} Relay 1" - - entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" - - relay_0 = hass.states.get(entity_id) - assert relay_0.state == STATE_OFF - assert relay_0.name == f"{NAME} Doorbell" - - with patch("axis.interfaces.vapix.Ports.close") as mock_turn_on: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_turn_on.assert_called_once_with("0") - - with patch("axis.interfaces.vapix.Ports.open") as mock_turn_off: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_turn_off.assert_called_once_with("0") - - PORT_MANAGEMENT_RESPONSE = { "apiVersion": "1.0", "method": "getPorts", @@ -113,14 +62,18 @@ PORT_MANAGEMENT_RESPONSE = { } -@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_PORT_MANAGEMENT]) -@pytest.mark.parametrize("port_management_payload", [PORT_MANAGEMENT_RESPONSE]) -@pytest.mark.usefixtures("setup_config_entry") -async def test_switches_with_port_management( +@pytest.mark.parametrize("param_ports_payload", [PORT_DATA]) +async def test_switches_with_port_cgi( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_rtsp_event: RtspEventMock, + snapshot: SnapshotAssertion, ) -> None: - """Test that switches are loaded properly using port management.""" + """Test that switches are loaded properly using port.cgi.""" + with patch("homeassistant.components.axis.PLATFORMS", [Platform.SWITCH]): + config_entry = await config_entry_factory() + mock_rtsp_event( topic="tns1:Device/Trigger/Relay", data_type="LogicalState", @@ -137,30 +90,61 @@ async def test_switches_with_port_management( ) await hass.async_block_till_done() - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 - - relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1") - assert relay_1.state == STATE_ON - assert relay_1.name == f"{NAME} Relay 1" + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" - relay_0 = hass.states.get(entity_id) - assert relay_0.state == STATE_OFF - assert relay_0.name == f"{NAME} Doorbell" + with patch("axis.interfaces.vapix.Ports.close") as mock_turn_on: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_turn_on.assert_called_once_with("0") - # State update + with patch("axis.interfaces.vapix.Ports.open") as mock_turn_off: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_turn_off.assert_called_once_with("0") + +@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_PORT_MANAGEMENT]) +@pytest.mark.parametrize("port_management_payload", [PORT_MANAGEMENT_RESPONSE]) +async def test_switches_with_port_management( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_rtsp_event: RtspEventMock, + snapshot: SnapshotAssertion, +) -> None: + """Test that switches are loaded properly using port management.""" + with patch("homeassistant.components.axis.PLATFORMS", [Platform.SWITCH]): + config_entry = await config_entry_factory() + + mock_rtsp_event( + topic="tns1:Device/Trigger/Relay", + data_type="LogicalState", + data_value="inactive", + source_name="RelayToken", + source_idx="0", + ) mock_rtsp_event( topic="tns1:Device/Trigger/Relay", data_type="LogicalState", data_value="active", source_name="RelayToken", - source_idx="0", + source_idx="1", ) await hass.async_block_till_done() - assert hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1").state == STATE_ON + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" with patch("axis.interfaces.vapix.IoPortManagement.close") as mock_turn_on: await hass.services.async_call( @@ -179,3 +163,16 @@ async def test_switches_with_port_management( blocking=True, ) mock_turn_off.assert_called_once_with("0") + + # State update + + mock_rtsp_event( + topic="tns1:Device/Trigger/Relay", + data_type="LogicalState", + data_value="active", + source_name="RelayToken", + source_idx="0", + ) + await hass.async_block_till_done() + + assert hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1").state == STATE_ON diff --git a/tests/components/azure_data_explorer/conftest.py b/tests/components/azure_data_explorer/conftest.py index 4168021b333..f8915a12ce1 100644 --- a/tests/components/azure_data_explorer/conftest.py +++ b/tests/components/azure_data_explorer/conftest.py @@ -1,12 +1,12 @@ """Test fixtures for Azure Data Explorer.""" +from collections.abc import Generator from datetime import timedelta import logging from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.azure_data_explorer.const import ( CONF_FILTER, diff --git a/tests/components/azure_devops/snapshots/test_sensor.ambr b/tests/components/azure_devops/snapshots/test_sensor.ambr index 0ce82cae1e8..aa8d1d9e7e0 100644 --- a/tests/components/azure_devops/snapshots/test_sensor.ambr +++ b/tests/components/azure_devops/snapshots/test_sensor.ambr @@ -1,467 +1,4 @@ # serializer version: 1 -# name: test_sensors[sensor.testproject_ci_build_finish_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_finish_time', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CI build finish time', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'finish_time', - 'unique_id': 'testorg_1234_9876_finish_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_finish_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build finish time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_finish_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T00:00:00+00:00', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_id-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_id', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build id', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'build_id', - 'unique_id': 'testorg_1234_9876_build_id', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_id-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build id', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_id', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5678', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_queue_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_queue_time', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CI build queue time', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'queue_time', - 'unique_id': 'testorg_1234_9876_queue_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_queue_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build queue time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_queue_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T00:00:00+00:00', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_reason-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_reason', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build reason', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reason', - 'unique_id': 'testorg_1234_9876_reason', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_reason-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build reason', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_reason', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'manual', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_result-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_result', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build result', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'result', - 'unique_id': 'testorg_1234_9876_result', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_result-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build result', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_result', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'succeeded', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_branch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_source_branch', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build source branch', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'source_branch', - 'unique_id': 'testorg_1234_9876_source_branch', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_branch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source branch', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_branch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'main', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_version-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_source_version', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build source version', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'source_version', - 'unique_id': 'testorg_1234_9876_source_version', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_version-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source version', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '123', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_start_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_start_time', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CI build start time', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'start_time', - 'unique_id': 'testorg_1234_9876_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_start_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build start time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_start_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T00:00:00+00:00', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_status', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build status', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'status', - 'unique_id': 'testorg_1234_9876_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'completed', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_url-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_url', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build url', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'url', - 'unique_id': 'testorg_1234_9876_url', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_url-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build url', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_url', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensors[sensor.testproject_ci_latest_build-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -891,52 +428,6 @@ 'state': '2021-01-01T00:00:00+00:00', }) # --- -# name: test_sensors[sensor.testproject_ci_latest_build_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_latest_build_status', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI latest build status', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'status', - 'unique_id': 'testorg_1234_9876_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_latest_build_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_latest_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'completed', - }) -# --- # name: test_sensors[sensor.testproject_ci_latest_build_url-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -983,243 +474,6 @@ 'state': 'unknown', }) # --- -# name: test_sensors[sensor.testproject_test_build_build_id-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_test_build_build_id', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Test Build build id', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'build_id', - 'unique_id': 'testorg_1234_9876_build_id', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_test_build_build_id-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject Test Build build id', - }), - 'context': , - 'entity_id': 'sensor.testproject_test_build_build_id', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5678', - }) -# --- -# name: test_sensors[sensor.testproject_test_build_latest_build-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_test_build_latest_build', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Test Build latest build', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'latest_build', - 'unique_id': 'testorg_1234_9876_latest_build', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_test_build_latest_build-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'definition_id': 9876, - 'definition_name': 'Test Build', - 'finish_time': '2021-01-01T00:00:00Z', - 'friendly_name': 'testproject Test Build latest build', - 'id': 5678, - 'queue_time': '2021-01-01T00:00:00Z', - 'reason': 'manual', - 'result': 'succeeded', - 'source_branch': 'main', - 'source_version': '123', - 'start_time': '2021-01-01T00:00:00Z', - 'status': 'completed', - 'url': None, - }), - 'context': , - 'entity_id': 'sensor.testproject_test_build_latest_build', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_finish_time-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build finish time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_finish_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_id-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build id', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_id', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6789', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_queue_time-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build queue time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_queue_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_reason-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build reason', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_reason', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_result-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build result', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_result', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_source_branch-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source branch', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_branch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_source_version-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source version', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_start_time-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build start time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_start_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_status-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_url-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build url', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_url', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensors_missing_data[sensor.testproject_ci_latest_build-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -1352,19 +606,6 @@ 'state': 'unknown', }) # --- -# name: test_sensors_missing_data[sensor.testproject_ci_latest_build_status-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_latest_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensors_missing_data[sensor.testproject_ci_latest_build_url-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/azure_event_hub/conftest.py b/tests/components/azure_event_hub/conftest.py index a34f2e646f2..b814a845c86 100644 --- a/tests/components/azure_event_hub/conftest.py +++ b/tests/components/azure_event_hub/conftest.py @@ -1,5 +1,6 @@ """Test fixtures for AEH.""" +from collections.abc import AsyncGenerator, Generator from dataclasses import dataclass from datetime import timedelta import logging @@ -8,7 +9,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from azure.eventhub.aio import EventHubProducerClient import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.azure_event_hub.const import ( CONF_FILTER, diff --git a/tests/components/baf/__init__.py b/tests/components/baf/__init__.py index f1074a87cee..a047029f9a0 100644 --- a/tests/components/baf/__init__.py +++ b/tests/components/baf/__init__.py @@ -12,7 +12,7 @@ class MockBAFDevice(Device): """A simple mock for a BAF Device.""" # pylint: disable-next=super-init-not-called - def __init__(self, async_wait_available_side_effect=None): + def __init__(self, async_wait_available_side_effect=None) -> None: """Init simple mock.""" self._async_wait_available_side_effect = async_wait_available_side_effect diff --git a/tests/components/balboa/conftest.py b/tests/components/balboa/conftest.py index fbdc2f8a759..0bb8b2cd468 100644 --- a/tests/components/balboa/conftest.py +++ b/tests/components/balboa/conftest.py @@ -2,12 +2,11 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Generator from unittest.mock import AsyncMock, MagicMock, patch from pybalboa.enums import HeatMode, LowHighRange import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/balboa/snapshots/test_fan.ambr b/tests/components/balboa/snapshots/test_fan.ambr index 2b87a961906..8d35ab6de7c 100644 --- a/tests/components/balboa/snapshots/test_fan.ambr +++ b/tests/components/balboa/snapshots/test_fan.ambr @@ -28,7 +28,7 @@ 'original_name': 'Pump 1', 'platform': 'balboa', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': 'pump', 'unique_id': 'FakeSpa-Pump 1-c0ffee', 'unit_of_measurement': None, @@ -42,7 +42,7 @@ 'percentage_step': 50.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.fakespa_pump_1', diff --git a/tests/components/balboa/test_climate.py b/tests/components/balboa/test_climate.py index c877f2858cd..850184a7d71 100644 --- a/tests/components/balboa/test_climate.py +++ b/tests/components/balboa/test_climate.py @@ -85,6 +85,8 @@ async def test_spa_temperature( hass: HomeAssistant, client: MagicMock, integration: MockConfigEntry ) -> None: """Test spa temperature settings.""" + client.temperature_minimum = 110 + client.temperature_maximum = 250 # flip the spa into F # set temp to a valid number state = await _patch_spa_settemp(hass, client, 0, 100) diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index 1fbcbe0fe69..4764798f34d 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -3,10 +3,26 @@ from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch -from mozart_api.models import BeolinkPeer +from mozart_api.models import ( + Action, + BeolinkPeer, + ContentItem, + PlaybackContentMetadata, + PlaybackProgress, + PlaybackState, + ProductState, + RemoteMenuItem, + RenderingState, + SoftwareUpdateStatus, + Source, + SourceArray, + SourceTypeEnum, + VolumeState, +) import pytest from homeassistant.components.bang_olufsen.const import DOMAIN +from homeassistant.core import HomeAssistant from .const import ( TEST_DATA_CREATE_ENTRY, @@ -30,10 +46,17 @@ def mock_config_entry(): ) +@pytest.fixture +async def mock_media_player(hass: HomeAssistant, mock_config_entry, mock_mozart_client): + """Mock media_player entity.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + @pytest.fixture def mock_mozart_client() -> Generator[AsyncMock]: """Mock MozartClient.""" - with ( patch( "homeassistant.components.bang_olufsen.MozartClient", autospec=True @@ -50,6 +73,170 @@ def mock_mozart_client() -> Generator[AsyncMock]: client.get_beolink_self.return_value = BeolinkPeer( friendly_name=TEST_FRIENDLY_NAME, jid=TEST_JID_1 ) + client.get_softwareupdate_status = AsyncMock() + client.get_softwareupdate_status.return_value = SoftwareUpdateStatus( + software_version="1.0.0", state="" + ) + client.get_product_state = AsyncMock() + client.get_product_state.return_value = ProductState( + volume=VolumeState(), + playback=PlaybackState( + metadata=PlaybackContentMetadata(), + progress=PlaybackProgress(), + source=Source(), + state=RenderingState(value="started"), + ), + ) + client.get_available_sources = AsyncMock() + client.get_available_sources.return_value = SourceArray( + items=[ + # Is in the HIDDEN_SOURCE_IDS constant, so should not be user selectable + Source( + name="AirPlay", + id="airPlay", + is_enabled=True, + is_multiroom_available=False, + ), + # The only available source + Source( + name="Tidal", + id="tidal", + is_enabled=True, + is_multiroom_available=True, + ), + # Is disabled, so should not be user selectable + Source( + name="Powerlink", + id="pl", + is_enabled=False, + ), + ] + ) + client.get_remote_menu = AsyncMock() + client.get_remote_menu.return_value = { + # Music category, so shouldn't be included in video sources + "b355888b-2cde-5f94-8592-d47b71d52a27": RemoteMenuItem( + action_list=[ + Action( + button_name=None, + content_id="netRadio://6629967157728971", + deezer_user_id=None, + gain_db=None, + listening_mode_id=None, + preset_key=None, + queue_item=None, + queue_settings=None, + radio_station_id=None, + source=None, + speaker_group_id=None, + stand_position=None, + stop_duration=None, + tone_name=None, + type="triggerContent", + volume_level=None, + ) + ], + scene_list=None, + disabled=None, + dynamic_list=None, + first_child_menu_item_id=None, + label="Yle Radio Suomi Helsinki", + next_sibling_menu_item_id="0b4552f8-7ac6-5046-9d44-5410a815b8d6", + parent_menu_item_id="eee0c2d0-2b3a-4899-a708-658475c38926", + available=None, + content=ContentItem( + categories=["music"], + content_uri="netRadio://6629967157728971", + label="Yle Radio Suomi Helsinki", + source=SourceTypeEnum(value="netRadio"), + ), + fixed=True, + id="b355888b-2cde-5f94-8592-d47b71d52a27", + ), + # Has "hdmi" as category, so should be included in video sources + "b6591565-80f4-4356-bcd9-c92ca247f0a9": RemoteMenuItem( + action_list=[ + Action( + button_name=None, + content_id="tv://hdmi_1", + deezer_user_id=None, + gain_db=None, + listening_mode_id=None, + preset_key=None, + queue_item=None, + queue_settings=None, + radio_station_id=None, + source=None, + speaker_group_id=None, + stand_position=None, + stop_duration=None, + tone_name=None, + type="triggerContent", + volume_level=None, + ) + ], + scene_list=None, + disabled=False, + dynamic_list="none", + first_child_menu_item_id=None, + label="HDMI A", + next_sibling_menu_item_id="0ba98974-7b1f-40dc-bc48-fbacbb0f1793", + parent_menu_item_id="b66c835b-6b98-4400-8f84-6348043792c7", + available=True, + content=ContentItem( + categories=["hdmi"], + content_uri="tv://hdmi_1", + label="HDMI A", + source=SourceTypeEnum(value="tv"), + ), + fixed=False, + id="b6591565-80f4-4356-bcd9-c92ca247f0a9", + ), + # The parent remote menu item. Has the TV label and should therefore not be included in video sources + "b66c835b-6b98-4400-8f84-6348043792c7": RemoteMenuItem( + action_list=[], + scene_list=None, + disabled=False, + dynamic_list="none", + first_child_menu_item_id="b6591565-80f4-4356-bcd9-c92ca247f0a9", + label="TV", + next_sibling_menu_item_id="0c4547fe-d3cc-4348-a425-473595b8c9fb", + parent_menu_item_id=None, + available=True, + content=None, + fixed=True, + id="b66c835b-6b98-4400-8f84-6348043792c7", + ), + # Has an empty content, so should not be included + "64c9da45-3682-44a4-8030-09ed3ef44160": RemoteMenuItem( + action_list=[], + scene_list=None, + disabled=False, + dynamic_list="none", + first_child_menu_item_id=None, + label="ListeningPosition", + next_sibling_menu_item_id=None, + parent_menu_item_id="0c4547fe-d3cc-4348-a425-473595b8c9fb", + available=True, + content=None, + fixed=True, + id="64c9da45-3682-44a4-8030-09ed3ef44160", + ), + } + client.post_standby = AsyncMock() + client.set_current_volume_level = AsyncMock() + client.set_volume_mute = AsyncMock() + client.post_playback_command = AsyncMock() + client.seek_to_position = AsyncMock() + client.post_clear_queue = AsyncMock() + client.post_overlay_play = AsyncMock() + client.post_uri_source = AsyncMock() + client.run_provided_scene = AsyncMock() + client.activate_preset = AsyncMock() + client.start_deezer_flow = AsyncMock() + client.add_to_queue = AsyncMock() + client.post_remote_trigger = AsyncMock() + client.set_active_source = AsyncMock() # Non-REST API client methods client.check_device_connection = AsyncMock() diff --git a/tests/components/bang_olufsen/const.py b/tests/components/bang_olufsen/const.py index 187f93108a1..d5e2221675a 100644 --- a/tests/components/bang_olufsen/const.py +++ b/tests/components/bang_olufsen/const.py @@ -1,6 +1,25 @@ """Constants used for testing the bang_olufsen integration.""" from ipaddress import IPv4Address, IPv6Address +from unittest.mock import Mock + +from mozart_api.exceptions import ApiException +from mozart_api.models import ( + Action, + OverlayPlayRequest, + OverlayPlayRequestTextToSpeechTextToSpeech, + PlaybackContentMetadata, + PlaybackError, + PlaybackProgress, + PlayQueueItem, + PlayQueueItemType, + RenderingState, + SceneProperties, + UserFlow, + VolumeLevel, + VolumeMute, + VolumeState, +) from homeassistant.components.bang_olufsen.const import ( ATTR_FRIENDLY_NAME, @@ -8,6 +27,7 @@ from homeassistant.components.bang_olufsen.const import ( ATTR_SERIAL_NUMBER, ATTR_TYPE_NUMBER, CONF_BEOLINK_JID, + BangOlufsenSource, ) from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_NAME @@ -24,7 +44,7 @@ TEST_FRIENDLY_NAME = "Living room Balance" TEST_TYPE_NUMBER = "1111" TEST_ITEM_NUMBER = "1111111" TEST_JID_1 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.{TEST_SERIAL_NUMBER}@products.bang-olufsen.com" - +TEST_MEDIA_PLAYER_ENTITY_ID = "media_player.beosound_balance_11111111" TEST_HOSTNAME_ZEROCONF = TEST_NAME.replace(" ", "-") + ".local." TEST_TYPE_ZEROCONF = "_bangolufsen._tcp.local." @@ -80,3 +100,80 @@ TEST_DATA_ZEROCONF_IPV6 = ZeroconfServiceInfo( ATTR_ITEM_NUMBER: TEST_ITEM_NUMBER, }, ) + +TEST_AUDIO_SOURCES = [BangOlufsenSource.TIDAL.name] +TEST_VIDEO_SOURCES = ["HDMI A"] +TEST_SOURCES = TEST_AUDIO_SOURCES + TEST_VIDEO_SOURCES +TEST_FALLBACK_SOURCES = [ + "Audio Streamer", + "Spotify Connect", + "Line-In", + "Optical", + "B&O Radio", + "Deezer", + "Tidal Connect", +] +TEST_PLAYBACK_METADATA = PlaybackContentMetadata( + album_name="Test album", + artist_name="Test artist", + organization="Test organization", + title="Test title", + total_duration_seconds=123, + track=1, +) +TEST_PLAYBACK_ERROR = PlaybackError(error="Test error") +TEST_PLAYBACK_PROGRESS = PlaybackProgress(progress=123) +TEST_PLAYBACK_STATE_PAUSED = RenderingState(value="paused") +TEST_PLAYBACK_STATE_PLAYING = RenderingState(value="started") +TEST_VOLUME = VolumeState(level=VolumeLevel(level=40)) +TEST_VOLUME_HOME_ASSISTANT_FORMAT = 0.4 +TEST_PLAYBACK_STATE_TURN_OFF = RenderingState(value="stopped") +TEST_VOLUME_MUTED = VolumeState( + muted=VolumeMute(muted=True), level=VolumeLevel(level=40) +) +TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT = True +TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT = 10.0 +TEST_SEEK_POSITION = 10000 +TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS = OverlayPlayRequest( + text_to_speech=OverlayPlayRequestTextToSpeechTextToSpeech( + lang="da-dk", text="Dette er en test" + ) +) +TEST_OVERLAY_OFFSET_VOLUME_TTS = OverlayPlayRequest( + text_to_speech=OverlayPlayRequestTextToSpeechTextToSpeech( + lang="en-us", text="This is a test" + ), + volume_absolute=60, +) +TEST_RADIO_STATION = SceneProperties( + action_list=[ + Action( + type="radio", + radio_station_id="1234567890123456", + ) + ] +) +TEST_DEEZER_FLOW = UserFlow(user_id="123") +TEST_DEEZER_PLAYLIST = PlayQueueItem( + provider=PlayQueueItemType(value="deezer"), + start_now_from_position=123, + type="playlist", + uri="playlist:1234567890", +) +TEST_DEEZER_TRACK = PlayQueueItem( + provider=PlayQueueItemType(value="deezer"), + start_now_from_position=0, + type="track", + uri="1234567890", +) + +# codespell can't see the escaped ', so it thinks the word is misspelled +TEST_DEEZER_INVALID_FLOW = ApiException( + status=400, + reason="Bad Request", + http_resp=Mock( + status=400, + reason="Bad Request", + data='{"message": "Couldn\'t start user flow for me"}', # codespell:ignore + ), +) diff --git a/tests/components/bang_olufsen/test_config_flow.py b/tests/components/bang_olufsen/test_config_flow.py index ad513905f16..e637120a6ae 100644 --- a/tests/components/bang_olufsen/test_config_flow.py +++ b/tests/components/bang_olufsen/test_config_flow.py @@ -132,7 +132,7 @@ async def test_config_flow_zeroconf(hass: HomeAssistant, mock_mozart_client) -> assert result_confirm["type"] is FlowResultType.CREATE_ENTRY assert result_confirm["data"] == TEST_DATA_CREATE_ENTRY - assert mock_mozart_client.get_beolink_self.call_count == 0 + assert mock_mozart_client.get_beolink_self.call_count == 1 async def test_config_flow_zeroconf_not_mozart_device(hass: HomeAssistant) -> None: @@ -159,3 +159,21 @@ async def test_config_flow_zeroconf_ipv6(hass: HomeAssistant) -> None: assert result_user["type"] is FlowResultType.ABORT assert result_user["reason"] == "ipv6_address" + + +async def test_config_flow_zeroconf_invalid_ip( + hass: HomeAssistant, mock_mozart_client +) -> None: + """Test zeroconf discovery with invalid IP address.""" + mock_mozart_client.get_beolink_self.side_effect = ClientConnectorError( + Mock(), Mock() + ) + + result_user = await hass.config_entries.flow.async_init( + handler=DOMAIN, + context={CONF_SOURCE: SOURCE_ZEROCONF}, + data=TEST_DATA_ZEROCONF, + ) + + assert result_user["type"] is FlowResultType.ABORT + assert result_user["reason"] == "invalid_address" diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py new file mode 100644 index 00000000000..74867a8eedf --- /dev/null +++ b/tests/components/bang_olufsen/test_media_player.py @@ -0,0 +1,1067 @@ +"""Test the Bang & Olufsen media_player entity.""" + +from contextlib import nullcontext as does_not_raise +from unittest.mock import ANY, patch + +from mozart_api.models import PlaybackContentMetadata +import pytest + +from homeassistant.components.bang_olufsen.const import ( + BANG_OLUFSEN_STATES, + DOMAIN, + BangOlufsenSource, + WebsocketNotification, +) +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_INPUT_SOURCE_LIST, + ATTR_MEDIA_ALBUM_ARTIST, + ATTR_MEDIA_ALBUM_NAME, + ATTR_MEDIA_ANNOUNCE, + ATTR_MEDIA_CHANNEL, + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_DURATION, + ATTR_MEDIA_EXTRA, + ATTR_MEDIA_POSITION, + ATTR_MEDIA_POSITION_UPDATED_AT, + ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_TITLE, + ATTR_MEDIA_TRACK, + ATTR_MEDIA_VOLUME_LEVEL, + ATTR_MEDIA_VOLUME_MUTED, + MediaPlayerState, + MediaType, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.setup import async_setup_component + +from .const import ( + TEST_AUDIO_SOURCES, + TEST_DEEZER_FLOW, + TEST_DEEZER_INVALID_FLOW, + TEST_DEEZER_PLAYLIST, + TEST_DEEZER_TRACK, + TEST_FALLBACK_SOURCES, + TEST_MEDIA_PLAYER_ENTITY_ID, + TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS, + TEST_OVERLAY_OFFSET_VOLUME_TTS, + TEST_PLAYBACK_ERROR, + TEST_PLAYBACK_METADATA, + TEST_PLAYBACK_PROGRESS, + TEST_PLAYBACK_STATE_PAUSED, + TEST_PLAYBACK_STATE_PLAYING, + TEST_PLAYBACK_STATE_TURN_OFF, + TEST_RADIO_STATION, + TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, + TEST_SERIAL_NUMBER, + TEST_SOURCES, + TEST_VIDEO_SOURCES, + TEST_VOLUME, + TEST_VOLUME_HOME_ASSISTANT_FORMAT, + TEST_VOLUME_MUTED, + TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT, +) + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +async def test_initialization( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_mozart_client +) -> None: + """Test the integration is initialized properly in _initialize, async_added_to_hass and __init__.""" + + # Setup entity + with patch( + "homeassistant.components.bang_olufsen.media_player._LOGGER.debug" + ) as mock_logger: + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Ensure that the logger has been called with the debug message + mock_logger.assert_called_once_with( + "Connected to: %s %s running SW %s", "Beosound Balance", "11111111", "1.0.0" + ) + + # Check state (The initial state in this test does not contain all that much. + # States are tested using simulated WebSocket events.) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_SOURCES + assert states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] + + # Check API calls + mock_mozart_client.get_softwareupdate_status.assert_called_once() + mock_mozart_client.get_product_state.assert_called_once() + mock_mozart_client.get_available_sources.assert_called_once() + mock_mozart_client.get_remote_menu.assert_called_once() + + +async def test_async_update_sources_audio_only( + hass: HomeAssistant, mock_config_entry, mock_mozart_client +) -> None: + """Test sources are correctly handled in _async_update_sources.""" + mock_mozart_client.get_remote_menu.return_value = {} + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_AUDIO_SOURCES + + +async def test_async_update_sources_outdated_api( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test fallback sources are correctly handled in _async_update_sources.""" + mock_mozart_client.get_available_sources.side_effect = ValueError() + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ( + states.attributes[ATTR_INPUT_SOURCE_LIST] + == TEST_FALLBACK_SOURCES + TEST_VIDEO_SOURCES + ) + + +async def test_async_update_playback_metadata( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test _async_update_playback_metadata.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_MEDIA_DURATION not in states.attributes + assert ATTR_MEDIA_TITLE not in states.attributes + assert ATTR_MEDIA_ALBUM_NAME not in states.attributes + assert ATTR_MEDIA_ALBUM_ARTIST not in states.attributes + assert ATTR_MEDIA_TRACK not in states.attributes + assert ATTR_MEDIA_CHANNEL not in states.attributes + + # Send the WebSocket event dispatch + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_METADATA}", + TEST_PLAYBACK_METADATA, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ( + states.attributes[ATTR_MEDIA_DURATION] + == TEST_PLAYBACK_METADATA.total_duration_seconds + ) + assert states.attributes[ATTR_MEDIA_TITLE] == TEST_PLAYBACK_METADATA.title + assert states.attributes[ATTR_MEDIA_ALBUM_NAME] == TEST_PLAYBACK_METADATA.album_name + assert ( + states.attributes[ATTR_MEDIA_ALBUM_ARTIST] == TEST_PLAYBACK_METADATA.artist_name + ) + assert states.attributes[ATTR_MEDIA_TRACK] == TEST_PLAYBACK_METADATA.track + assert states.attributes[ATTR_MEDIA_CHANNEL] == TEST_PLAYBACK_METADATA.organization + + +async def test_async_update_playback_error( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test _async_update_playback_error.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # The async_dispatcher_send function seems to swallow exceptions, making pytest.raises unusable + with patch("homeassistant.helpers.dispatcher._LOGGER.error") as mock_logger: + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_ERROR}", + TEST_PLAYBACK_ERROR, + ) + + # The traceback can't be tested, so it is replaced with "ANY" + mock_logger.assert_called_once_with( + "%s\n%s", + "Exception in _async_update_playback_error when dispatching '11111111_playback_error': (PlaybackError(error='Test error', item=None),)", + ANY, + ) + + +async def test_async_update_playback_progress( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test _async_update_playback_progress.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_MEDIA_POSITION not in states.attributes + old_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] + assert old_updated_at + + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_PROGRESS}", + TEST_PLAYBACK_PROGRESS, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.attributes[ATTR_MEDIA_POSITION] == TEST_PLAYBACK_PROGRESS.progress + new_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] + assert new_updated_at + assert old_updated_at != new_updated_at + + +async def test_async_update_playback_state( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test _async_update_playback_state.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == MediaPlayerState.PLAYING + + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", + TEST_PLAYBACK_STATE_PAUSED, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == TEST_PLAYBACK_STATE_PAUSED.value + + +@pytest.mark.parametrize( + ("reported_source", "real_source", "content_type", "progress", "metadata"), + [ + # Normal source, music mediatype expected, no progress expected + ( + BangOlufsenSource.TIDAL, + BangOlufsenSource.TIDAL, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(), + ), + # URI source, url media type expected, no progress expected + ( + BangOlufsenSource.URI_STREAMER, + BangOlufsenSource.URI_STREAMER, + MediaType.URL, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(), + ), + # Line-In source,media type expected, progress 0 expected + ( + BangOlufsenSource.LINE_IN, + BangOlufsenSource.CHROMECAST, + MediaType.MUSIC, + 0, + PlaybackContentMetadata(), + ), + # Chromecast as source, but metadata says Line-In. + # Progress is not set to 0 as the source is Chromecast first + ( + BangOlufsenSource.CHROMECAST, + BangOlufsenSource.LINE_IN, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(title=BangOlufsenSource.LINE_IN.name), + ), + # Chromecast as source, but metadata says Bluetooth + ( + BangOlufsenSource.CHROMECAST, + BangOlufsenSource.BLUETOOTH, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(title=BangOlufsenSource.BLUETOOTH.name), + ), + # Chromecast as source, but metadata says Bluetooth in another way + ( + BangOlufsenSource.CHROMECAST, + BangOlufsenSource.BLUETOOTH, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(art=[]), + ), + ], +) +async def test_async_update_source_change( + reported_source, + real_source, + content_type, + progress, + metadata, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, +) -> None: + """Test _async_update_source_change.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_INPUT_SOURCE not in states.attributes + assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC + + # Simulate progress attribute being available + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_PROGRESS}", + TEST_PLAYBACK_PROGRESS, + ) + + # Simulate metadata + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_METADATA}", + metadata, + ) + + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.SOURCE_CHANGE}", + reported_source, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.attributes[ATTR_INPUT_SOURCE] == real_source.name + assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == content_type + assert states.attributes[ATTR_MEDIA_POSITION] == progress + + +async def test_async_turn_off( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_turn_off.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "turn_off", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", + TEST_PLAYBACK_STATE_TURN_OFF, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_TURN_OFF.value] + + # Check API call + mock_mozart_client.post_standby.assert_called_once() + + +async def test_async_set_volume_level( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_set_volume_level and _async_update_volume by proxy.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_MEDIA_VOLUME_LEVEL not in states.attributes + + await hass.services.async_call( + "media_player", + "volume_set", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_VOLUME_LEVEL: TEST_VOLUME_HOME_ASSISTANT_FORMAT, + }, + blocking=True, + ) + + # The service call will trigger a WebSocket notification + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", + TEST_VOLUME, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ( + states.attributes[ATTR_MEDIA_VOLUME_LEVEL] == TEST_VOLUME_HOME_ASSISTANT_FORMAT + ) + + mock_mozart_client.set_current_volume_level.assert_called_once_with( + volume_level=TEST_VOLUME.level + ) + + +async def test_async_mute_volume( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_mute_volume.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_MEDIA_VOLUME_MUTED not in states.attributes + + await hass.services.async_call( + "media_player", + "volume_mute", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_VOLUME_MUTED: TEST_VOLUME_HOME_ASSISTANT_FORMAT, + }, + blocking=True, + ) + + # The service call will trigger a WebSocket notification + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", + TEST_VOLUME_MUTED, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ( + states.attributes[ATTR_MEDIA_VOLUME_MUTED] + == TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT + ) + + mock_mozart_client.set_volume_mute.assert_called_once_with( + volume_mute=TEST_VOLUME_MUTED.muted + ) + + +@pytest.mark.parametrize( + ("initial_state", "command"), + [ + # Current state is playing, "pause" command expected + (TEST_PLAYBACK_STATE_PLAYING, "pause"), + # Current state is paused, "play" command expected + (TEST_PLAYBACK_STATE_PAUSED, "play"), + ], +) +async def test_async_media_play_pause( + initial_state, + command, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, +) -> None: + """Test async_media_play_pause.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Set the initial state + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", + initial_state, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == BANG_OLUFSEN_STATES[initial_state.value] + + await hass.services.async_call( + "media_player", + "media_play_pause", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_playback_command.assert_called_once_with(command=command) + + +async def test_async_media_stop( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_media_stop.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Set the state to playing + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", + TEST_PLAYBACK_STATE_PLAYING, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_PLAYING.value] + + await hass.services.async_call( + "media_player", + "media_stop", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + # Check API call + mock_mozart_client.post_playback_command.assert_called_once_with(command="stop") + + +async def test_async_media_next_track( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_media_next_track.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "media_next_track", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_playback_command.assert_called_once_with(command="skip") + + +@pytest.mark.parametrize( + ("source", "expected_result", "seek_called_times"), + [ + # Deezer source, seek expected + (BangOlufsenSource.DEEZER, does_not_raise(), 1), + # Non deezer source, seek shouldn't work + (BangOlufsenSource.TIDAL, pytest.raises(HomeAssistantError), 0), + ], +) +async def test_async_media_seek( + source, + expected_result, + seek_called_times, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, +) -> None: + """Test async_media_seek.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Set the source + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.SOURCE_CHANGE}", + source, + ) + + # Check results + with expected_result: + await hass.services.async_call( + "media_player", + "media_seek", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_SEEK_POSITION: TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, + }, + blocking=True, + ) + + assert mock_mozart_client.seek_to_position.call_count == seek_called_times + + +async def test_async_media_previous_track( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_media_previous_track.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "media_previous_track", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_playback_command.assert_called_once_with(command="prev") + + +async def test_async_clear_playlist( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_clear_playlist.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "clear_playlist", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_clear_queue.assert_called_once() + + +@pytest.mark.parametrize( + ("source", "expected_result", "audio_source_call", "video_source_call"), + [ + # Invalid source + ("Test source", pytest.raises(ServiceValidationError), 0, 0), + # Valid audio source + (BangOlufsenSource.TIDAL.name, does_not_raise(), 1, 0), + # Valid video source + (TEST_VIDEO_SOURCES[0], does_not_raise(), 0, 1), + ], +) +async def test_async_select_source( + source, + expected_result, + audio_source_call, + video_source_call, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, +) -> None: + """Test async_select_source with an invalid source.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with expected_result: + await hass.services.async_call( + "media_player", + "select_source", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_INPUT_SOURCE: source, + }, + blocking=True, + ) + + assert mock_mozart_client.set_active_source.call_count == audio_source_call + assert mock_mozart_client.post_remote_trigger.call_count == video_source_call + + +async def test_async_play_media_invalid_type( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media only accepts valid media types.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with pytest.raises(ServiceValidationError) as exc_info: + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "test", + ATTR_MEDIA_CONTENT_TYPE: "invalid type", + }, + blocking=True, + ) + + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == "invalid_media_type" + assert exc_info.errisinstance(HomeAssistantError) + + +async def test_async_play_media_url( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media URL.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Setup media source + await async_setup_component(hass, "media_source", {"media_source": {}}) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", + ATTR_MEDIA_CONTENT_TYPE: "audio/mpeg", + }, + blocking=True, + ) + + mock_mozart_client.post_uri_source.assert_called_once() + + +async def test_async_play_media_overlay_absolute_volume_uri( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media overlay with Home Assistant local URI and absolute volume.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await async_setup_component(hass, "media_source", {"media_source": {}}) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: {"overlay_absolute_volume": 60}, + }, + blocking=True, + ) + + mock_mozart_client.post_overlay_play.assert_called_once() + + # Check that the API call was as expected + args, _ = mock_mozart_client.post_overlay_play.call_args + assert args[0].volume_absolute == 60 + assert "/local/doorbell.mp3" in args[0].uri.location + + +async def test_async_play_media_overlay_invalid_offset_volume_tts( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with patch( + "homeassistant.components.bang_olufsen.media_player._LOGGER.warning" + ) as mock_logger: + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "Dette er en test", + ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: { + "overlay_offset_volume": 20, + "overlay_tts_language": "da-dk", + }, + }, + blocking=True, + ) + mock_logger.assert_called_once_with("Error setting volume") + + mock_mozart_client.post_overlay_play.assert_called_once_with( + TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS + ) + + +async def test_async_play_media_overlay_offset_volume_tts( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Set the volume to enable offset + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", + TEST_VOLUME, + ) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "This is a test", + ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: {"overlay_offset_volume": 20}, + }, + blocking=True, + ) + + mock_mozart_client.post_overlay_play.assert_called_once_with( + TEST_OVERLAY_OFFSET_VOLUME_TTS + ) + + +async def test_async_play_media_tts( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Home Assistant tts.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await async_setup_component(hass, "media_source", {"media_source": {}}) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", + ATTR_MEDIA_CONTENT_TYPE: "provider", + }, + blocking=True, + ) + + mock_mozart_client.post_overlay_play.assert_called_once() + + +async def test_async_play_media_radio( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with B&O radio.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "1234567890123456", + ATTR_MEDIA_CONTENT_TYPE: "radio", + }, + blocking=True, + ) + + mock_mozart_client.run_provided_scene.assert_called_once_with( + scene_properties=TEST_RADIO_STATION + ) + + +async def test_async_play_media_favourite( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with B&O favourite.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "1", + ATTR_MEDIA_CONTENT_TYPE: "favourite", + }, + blocking=True, + ) + + mock_mozart_client.activate_preset.assert_called_once_with(id=int("1")) + + +async def test_async_play_media_deezer_flow( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Deezer flow.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Send a service call + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "flow", + ATTR_MEDIA_CONTENT_TYPE: "deezer", + ATTR_MEDIA_EXTRA: {"id": "123"}, + }, + blocking=True, + ) + + mock_mozart_client.start_deezer_flow.assert_called_once_with( + user_flow=TEST_DEEZER_FLOW + ) + + +async def test_async_play_media_deezer_playlist( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Deezer playlist.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "playlist:1234567890", + ATTR_MEDIA_CONTENT_TYPE: "deezer", + ATTR_MEDIA_EXTRA: {"start_from": 123}, + }, + blocking=True, + ) + + mock_mozart_client.add_to_queue.assert_called_once_with( + play_queue_item=TEST_DEEZER_PLAYLIST + ) + + +async def test_async_play_media_deezer_track( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Deezer track.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "1234567890", + ATTR_MEDIA_CONTENT_TYPE: "deezer", + }, + blocking=True, + ) + + mock_mozart_client.add_to_queue.assert_called_once_with( + play_queue_item=TEST_DEEZER_TRACK + ) + + +async def test_async_play_media_invalid_deezer( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with an invalid/no Deezer login.""" + + mock_mozart_client.start_deezer_flow.side_effect = TEST_DEEZER_INVALID_FLOW + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "flow", + ATTR_MEDIA_CONTENT_TYPE: "deezer", + }, + blocking=True, + ) + + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == "play_media_error" + assert exc_info.errisinstance(HomeAssistantError) + + mock_mozart_client.start_deezer_flow.assert_called_once() + + +async def test_async_play_media_url_m3u( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media URL with the m3u extension.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await async_setup_component(hass, "media_source", {"media_source": {}}) + + with ( + pytest.raises(HomeAssistantError) as exc_info, + patch( + "homeassistant.components.bang_olufsen.media_player.async_process_play_media_url", + return_value="https://test.com/test.m3u", + ), + ): + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", + ATTR_MEDIA_CONTENT_TYPE: "audio/mpeg", + }, + blocking=True, + ) + + # Check exception + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == "m3u_invalid_format" + assert exc_info.errisinstance(HomeAssistantError) + + mock_mozart_client.post_uri_source.assert_not_called() + + +@pytest.mark.parametrize( + ("child", "present"), + [ + # Audio source expected + ( + { + "title": "test.mp3", + "media_class": "music", + "media_content_type": "audio/mpeg", + "media_content_id": "media-source://media_source/local/test.mp3", + "can_play": True, + "can_expand": False, + "thumbnail": None, + "children_media_class": None, + }, + True, + ), + # Video source not expected + ( + { + "title": "test.mp4", + "media_class": "video", + "media_content_type": "video/mp4", + "media_content_id": ("media-source://media_source/local/test.mp4"), + "can_play": True, + "can_expand": False, + "thumbnail": None, + "children_media_class": None, + }, + False, + ), + ], +) +async def test_async_browse_media( + child, + present, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test async_browse_media with audio and video source.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await async_setup_component(hass, "media_source", {"media_source": {}}) + + client = await hass_ws_client() + await client.send_json_auto_id( + { + "type": "media_player/browse_media", + "entity_id": TEST_MEDIA_PLAYER_ENTITY_ID, + } + ) + response = await client.receive_json() + assert response["success"] + + assert (child in response["result"]["children"]) is present diff --git a/tests/components/bayesian/test_binary_sensor.py b/tests/components/bayesian/test_binary_sensor.py index e4f646572cb..818e9bed909 100644 --- a/tests/components/bayesian/test_binary_sensor.py +++ b/tests/components/bayesian/test_binary_sensor.py @@ -718,17 +718,18 @@ async def test_observed_entities(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") - assert ["sensor.test_monitored"] == state.attributes.get( - "occurred_observation_entities" - ) + assert state.attributes.get("occurred_observation_entities") == [ + "sensor.test_monitored" + ] hass.states.async_set("sensor.test_monitored1", "on") await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") - assert ["sensor.test_monitored", "sensor.test_monitored1"] == sorted( - state.attributes.get("occurred_observation_entities") - ) + assert sorted(state.attributes.get("occurred_observation_entities")) == [ + "sensor.test_monitored", + "sensor.test_monitored1", + ] async def test_state_attributes_are_serializable(hass: HomeAssistant) -> None: @@ -785,9 +786,10 @@ async def test_state_attributes_are_serializable(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") - assert ["sensor.test_monitored", "sensor.test_monitored1"] == sorted( - state.attributes.get("occurred_observation_entities") - ) + assert sorted(state.attributes.get("occurred_observation_entities")) == [ + "sensor.test_monitored", + "sensor.test_monitored1", + ] for attrs in state.attributes.values(): json.dumps(attrs) diff --git a/tests/components/binary_sensor/test_device_condition.py b/tests/components/binary_sensor/test_device_condition.py index c2bd29fad36..8a0132ff2af 100644 --- a/tests/components/binary_sensor/test_device_condition.py +++ b/tests/components/binary_sensor/test_device_condition.py @@ -22,7 +22,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -32,12 +31,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -239,7 +232,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for turn_on and turn_off conditions.""" @@ -308,26 +301,26 @@ async def test_if_state( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for turn_on and turn_off conditions.""" @@ -375,19 +368,19 @@ async def test_if_state_legacy( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for firing if condition is on with delay.""" @@ -439,26 +432,26 @@ async def test_if_fires_on_for_condition( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future time_freeze.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 20 secs into the future time_freeze.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_off event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/binary_sensor/test_device_trigger.py b/tests/components/binary_sensor/test_device_trigger.py index f91a336061d..78e382f77bf 100644 --- a/tests/components/binary_sensor/test_device_trigger.py +++ b/tests/components/binary_sensor/test_device_trigger.py @@ -22,7 +22,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -32,12 +31,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -240,7 +233,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for on and off triggers firing.""" @@ -313,21 +306,22 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"not_bat_low device - {entry.entity_id} - on - off - None" ) hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] == f"bat_low device - {entry.entity_id} - off - on - None" + service_calls[1].data["some"] + == f"bat_low device - {entry.entity_id} - off - on - None" ) @@ -335,7 +329,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for triggers firing with delay.""" @@ -388,17 +382,17 @@ async def test_if_fires_on_state_change_with_for( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) @@ -407,7 +401,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for triggers firing.""" @@ -459,12 +453,12 @@ async def test_if_fires_on_state_change_legacy( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - None" ) diff --git a/tests/components/binary_sensor/test_init.py b/tests/components/binary_sensor/test_init.py index 8f14063e011..ea0ad05a0db 100644 --- a/tests/components/binary_sensor/test_init.py +++ b/tests/components/binary_sensor/test_init.py @@ -1,9 +1,9 @@ """The tests for the Binary sensor component.""" +from collections.abc import Generator from unittest import mock import pytest -from typing_extensions import Generator from homeassistant.components import binary_sensor from homeassistant.config_entries import ConfigEntry, ConfigFlow diff --git a/tests/components/blackbird/test_media_player.py b/tests/components/blackbird/test_media_player.py index ec5a37f72ad..db92dddcc77 100644 --- a/tests/components/blackbird/test_media_player.py +++ b/tests/components/blackbird/test_media_player.py @@ -35,7 +35,7 @@ class AttrDict(dict): class MockBlackbird: """Mock for pyblackbird object.""" - def __init__(self): + def __init__(self) -> None: """Init mock object.""" self.zones = defaultdict(lambda: AttrDict(power=True, av=1)) diff --git a/tests/components/blink/test_diagnostics.py b/tests/components/blink/test_diagnostics.py index 3b120d23038..d527633d4c9 100644 --- a/tests/components/blink/test_diagnostics.py +++ b/tests/components/blink/test_diagnostics.py @@ -31,4 +31,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("entry_id")) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/blueprint/common.py b/tests/components/blueprint/common.py index dd59b6df082..037aa38f6cb 100644 --- a/tests/components/blueprint/common.py +++ b/tests/components/blueprint/common.py @@ -1,9 +1,8 @@ """Blueprints test helpers.""" +from collections.abc import Generator from unittest.mock import patch -from typing_extensions import Generator - def stub_blueprint_populate_fixture_helper() -> Generator[None]: """Stub copying the blueprints to the config folder.""" diff --git a/tests/components/blueprint/test_importer.py b/tests/components/blueprint/test_importer.py index f135bbf23b8..94036d208ab 100644 --- a/tests/components/blueprint/test_importer.py +++ b/tests/components/blueprint/test_importer.py @@ -192,9 +192,28 @@ async def test_fetch_blueprint_from_website_url( assert imported_blueprint.blueprint.metadata["source_url"] == url -async def test_fetch_blueprint_from_unsupported_url(hass: HomeAssistant) -> None: - """Test fetching blueprint from an unsupported URL.""" - url = "https://example.com/unsupported.yaml" +async def test_fetch_blueprint_from_generic_url( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test fetching blueprint from url.""" + aioclient_mock.get( + "https://example.org/path/someblueprint.yaml", + text=Path( + hass.config.path("blueprints/automation/test_event_service.yaml") + ).read_text(encoding="utf8"), + ) - with pytest.raises(HomeAssistantError, match=r"^Unsupported URL$"): - await importer.fetch_blueprint_from_url(hass, url) + url = "https://example.org/path/someblueprint.yaml" + imported_blueprint = await importer.fetch_blueprint_from_url(hass, url) + assert isinstance(imported_blueprint, importer.ImportedBlueprint) + assert imported_blueprint.blueprint.domain == "automation" + assert imported_blueprint.suggested_filename == "example.org/someblueprint" + assert imported_blueprint.blueprint.metadata["source_url"] == url + + +def test_generic_importer_last() -> None: + """Test that generic importer is always the last one.""" + assert ( + importer.FETCH_FUNCTIONS.count(importer.fetch_blueprint_from_generic_url) == 1 + ) + assert importer.FETCH_FUNCTIONS[-1] == importer.fetch_blueprint_from_generic_url diff --git a/tests/components/blueprint/test_websocket_api.py b/tests/components/blueprint/test_websocket_api.py index 1f684b451ed..13615803569 100644 --- a/tests/components/blueprint/test_websocket_api.py +++ b/tests/components/blueprint/test_websocket_api.py @@ -9,7 +9,7 @@ import yaml from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from homeassistant.util.yaml import parse_yaml +from homeassistant.util.yaml import UndefinedSubstitution, parse_yaml from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator @@ -454,9 +454,124 @@ async def test_delete_blueprint_in_use_by_script( msg = await client.receive_json() assert not unlink_mock.mock_calls - assert msg["id"] == 9 assert not msg["success"] assert msg["error"] == { "code": "home_assistant_error", "message": "Blueprint in use", } + + +async def test_substituting_blueprint_inputs( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test substituting blueprint inputs.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "blueprint/substitute", + "domain": "automation", + "path": "test_event_service.yaml", + "input": { + "trigger_event": "test_event", + "service_to_call": "test.automation", + "a_number": 5, + }, + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"]["substituted_config"] == { + "action": { + "entity_id": "light.kitchen", + "service": "test.automation", + }, + "trigger": { + "event_type": "test_event", + "platform": "event", + }, + } + + +async def test_substituting_blueprint_inputs_unknown_domain( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test substituting blueprint inputs.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "blueprint/substitute", + "domain": "donald_duck", + "path": "test_event_service.yaml", + "input": { + "trigger_event": "test_event", + "service_to_call": "test.automation", + "a_number": 5, + }, + } + ) + + msg = await client.receive_json() + + assert not msg["success"] + assert msg["error"] == { + "code": "invalid_format", + "message": "Unsupported domain", + } + + +async def test_substituting_blueprint_inputs_incomplete_input( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test substituting blueprint inputs.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "blueprint/substitute", + "domain": "automation", + "path": "test_event_service.yaml", + "input": { + "service_to_call": "test.automation", + "a_number": 5, + }, + } + ) + + msg = await client.receive_json() + + assert not msg["success"] + assert msg["error"] == { + "code": "unknown_error", + "message": "Missing input trigger_event", + } + + +async def test_substituting_blueprint_inputs_incomplete_input_2( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test substituting blueprint inputs.""" + client = await hass_ws_client(hass) + with patch( + "homeassistant.components.blueprint.models.BlueprintInputs.async_substitute", + side_effect=UndefinedSubstitution("blah"), + ): + await client.send_json_auto_id( + { + "type": "blueprint/substitute", + "domain": "automation", + "path": "test_event_service.yaml", + "input": { + "trigger_event": "test_event", + "service_to_call": "test.automation", + "a_number": 5, + }, + } + ) + msg = await client.receive_json() + + assert not msg["success"] + assert msg["error"] == { + "code": "unknown_error", + "message": "No substitution found for input blah", + } diff --git a/tests/components/bluesound/__init__.py b/tests/components/bluesound/__init__.py new file mode 100644 index 00000000000..f8a3701422e --- /dev/null +++ b/tests/components/bluesound/__init__.py @@ -0,0 +1 @@ +"""Tests for the Bluesound integration.""" diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py new file mode 100644 index 00000000000..155d6b66e4e --- /dev/null +++ b/tests/components/bluesound/conftest.py @@ -0,0 +1,132 @@ +"""Common fixtures for the Bluesound tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from pyblu import Status, SyncStatus +import pytest + +from homeassistant.components.bluesound.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def sync_status() -> SyncStatus: + """Return a sync status object.""" + return SyncStatus( + etag="etag", + id="1.1.1.1:11000", + mac="00:11:22:33:44:55", + name="player-name", + image="invalid_url", + initialized=True, + brand="brand", + model="model", + model_name="model-name", + volume_db=0.5, + volume=50, + group=None, + master=None, + slaves=None, + zone=None, + zone_master=None, + zone_slave=None, + mute_volume_db=None, + mute_volume=None, + ) + + +@pytest.fixture +def status() -> Status: + """Return a status object.""" + return Status( + etag="etag", + input_id=None, + service=None, + state="playing", + shuffle=False, + album=None, + artist=None, + name=None, + image=None, + volume=10, + volume_db=22.3, + mute=False, + mute_volume=None, + mute_volume_db=None, + seconds=2, + total_seconds=123.1, + can_seek=False, + sleep=0, + group_name=None, + group_volume=None, + indexing=False, + stream_url=None, + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.bluesound.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return a mocked config entry.""" + mock_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "1.1.1.2", + CONF_PORT: 11000, + }, + unique_id="00:11:22:33:44:55-11000", + ) + mock_entry.add_to_hass(hass) + + return mock_entry + + +@pytest.fixture +def mock_player(status: Status) -> Generator[AsyncMock]: + """Mock the player.""" + with ( + patch( + "homeassistant.components.bluesound.Player", autospec=True + ) as mock_player, + patch( + "homeassistant.components.bluesound.config_flow.Player", + new=mock_player, + ), + ): + player = mock_player.return_value + player.__aenter__.return_value = player + player.status.return_value = status + player.sync_status.return_value = SyncStatus( + etag="etag", + id="1.1.1.1:11000", + mac="00:11:22:33:44:55", + name="player-name", + image="invalid_url", + initialized=True, + brand="brand", + model="model", + model_name="model-name", + volume_db=0.5, + volume=50, + group=None, + master=None, + slaves=None, + zone=None, + zone_master=None, + zone_slave=None, + mute_volume_db=None, + mute_volume=None, + ) + yield player diff --git a/tests/components/bluesound/test_config_flow.py b/tests/components/bluesound/test_config_flow.py new file mode 100644 index 00000000000..8fecba7017d --- /dev/null +++ b/tests/components/bluesound/test_config_flow.py @@ -0,0 +1,249 @@ +"""Test the Bluesound config flow.""" + +from unittest.mock import AsyncMock + +from aiohttp import ClientConnectionError + +from homeassistant.components.bluesound.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_user_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.1.1.1", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "player-name" + assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} + assert result["result"].unique_id == "00:11:22:33:44:55-11000" + + mock_setup_entry.assert_called_once() + + +async def test_user_flow_cannot_connect( + hass: HomeAssistant, mock_player: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_player.sync_status.side_effect = ClientConnectionError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.1.1.1", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + assert result["step_id"] == "user" + + mock_player.sync_status.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.1.1.1", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "player-name" + assert result["data"] == { + CONF_HOST: "1.1.1.1", + CONF_PORT: 11000, + } + + mock_setup_entry.assert_called_once() + + +async def test_user_flow_aleady_configured( + hass: HomeAssistant, + mock_player: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we handle already configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.1.1.1", + CONF_PORT: 11000, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_HOST] == "1.1.1.1" + + mock_player.sync_status.assert_called_once() + + +async def test_import_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "player-name" + assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} + assert result["result"].unique_id == "00:11:22:33:44:55-11000" + + mock_setup_entry.assert_called_once() + mock_player.sync_status.assert_called_once() + + +async def test_import_flow_cannot_connect( + hass: HomeAssistant, mock_player: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + mock_player.sync_status.side_effect = ClientConnectionError + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + mock_player.sync_status.assert_called_once() + + +async def test_import_flow_already_configured( + hass: HomeAssistant, + mock_player: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we handle already configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + mock_player.sync_status.assert_called_once() + + +async def test_zeroconf_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address="1.1.1.1", + ip_addresses=["1.1.1.1"], + port=11000, + hostname="player-name", + type="_musc._tcp.local.", + name="player-name._musc._tcp.local.", + properties={}, + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + mock_setup_entry.assert_not_called() + mock_player.sync_status.assert_called_once() + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "player-name" + assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} + assert result["result"].unique_id == "00:11:22:33:44:55-11000" + + mock_setup_entry.assert_called_once() + + +async def test_zeroconf_flow_cannot_connect( + hass: HomeAssistant, mock_player: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + mock_player.sync_status.side_effect = ClientConnectionError + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address="1.1.1.1", + ip_addresses=["1.1.1.1"], + port=11000, + hostname="player-name", + type="_musc._tcp.local.", + name="player-name._musc._tcp.local.", + properties={}, + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + mock_player.sync_status.assert_called_once() + + +async def test_zeroconf_flow_already_configured( + hass: HomeAssistant, + mock_player: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we handle already configured and update the host.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address="1.1.1.1", + ip_addresses=["1.1.1.1"], + port=11000, + hostname="player-name", + type="_musc._tcp.local.", + name="player-name._musc._tcp.local.", + properties={}, + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_HOST] == "1.1.1.1" + + mock_player.sync_status.assert_called_once() diff --git a/tests/components/bluetooth/__init__.py b/tests/components/bluetooth/__init__.py index eae867b96d5..8794d808718 100644 --- a/tests/components/bluetooth/__init__.py +++ b/tests/components/bluetooth/__init__.py @@ -271,7 +271,7 @@ async def _async_setup_with_adapter( class MockBleakClient(BleakClient): """Mock bleak client.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock init.""" super().__init__(*args, **kwargs) self._device_path = "/dev/test" diff --git a/tests/components/bluetooth/conftest.py b/tests/components/bluetooth/conftest.py index 4373ec3f915..93a1c59cba1 100644 --- a/tests/components/bluetooth/conftest.py +++ b/tests/components/bluetooth/conftest.py @@ -1,12 +1,12 @@ """Tests for the bluetooth component.""" +from collections.abc import Generator from unittest.mock import patch from bleak_retry_connector import bleak_manager from dbus_fast.aio import message_bus import habluetooth.util as habluetooth_utils import pytest -from typing_extensions import Generator @pytest.fixture(name="disable_bluez_manager_socket", autouse=True, scope="package") diff --git a/tests/components/bluetooth/test_init.py b/tests/components/bluetooth/test_init.py index bd38c9cfbae..8e7d604f794 100644 --- a/tests/components/bluetooth/test_init.py +++ b/tests/components/bluetooth/test_init.py @@ -3,6 +3,7 @@ import asyncio from datetime import timedelta import time +from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from bleak import BleakError @@ -100,7 +101,7 @@ async def test_setup_and_stop_passive( init_kwargs = None class MockPassiveBleakScanner: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs @@ -151,7 +152,7 @@ async def test_setup_and_stop_old_bluez( init_kwargs = None class MockBleakScanner: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs diff --git a/tests/components/bluetooth/test_manager.py b/tests/components/bluetooth/test_manager.py index 4bff7cbe94d..0ac49aa72cd 100644 --- a/tests/components/bluetooth/test_manager.py +++ b/tests/components/bluetooth/test_manager.py @@ -1,5 +1,6 @@ """Tests for the Bluetooth integration manager.""" +from collections.abc import Generator from datetime import timedelta import time from typing import Any @@ -11,7 +12,6 @@ from bluetooth_adapters import AdvertisementHistory # pylint: disable-next=no-name-in-module from habluetooth.advertisement_tracker import TRACKER_BUFFERING_WOBBLE_SECONDS import pytest -from typing_extensions import Generator from homeassistant.components import bluetooth from homeassistant.components.bluetooth import ( diff --git a/tests/components/bluetooth/test_passive_update_processor.py b/tests/components/bluetooth/test_passive_update_processor.py index 8e1163c0bdb..d7a7a8ba08c 100644 --- a/tests/components/bluetooth/test_passive_update_processor.py +++ b/tests/components/bluetooth/test_passive_update_processor.py @@ -583,8 +583,7 @@ async def test_exception_from_update_method( nonlocal run_count run_count += 1 if run_count == 2: - # pylint: disable-next=broad-exception-raised - raise Exception("Test exception") + raise Exception("Test exception") # noqa: TRY002 return GENERIC_PASSIVE_BLUETOOTH_DATA_UPDATE coordinator = PassiveBluetoothProcessorCoordinator( @@ -1418,8 +1417,7 @@ async def test_exception_from_coordinator_update_method( nonlocal run_count run_count += 1 if run_count == 2: - # pylint: disable-next=broad-exception-raised - raise Exception("Test exception") + raise Exception("Test exception") # noqa: TRY002 return {"test": "data"} @callback @@ -1653,12 +1651,12 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( unregister_binary_sensor_processor() unregister_sensor_processor() - async with async_test_home_assistant() as hass: - await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + async with async_test_home_assistant() as test_hass: + await async_setup_component(test_hass, DOMAIN, {DOMAIN: {}}) current_entry.set(entry) coordinator = PassiveBluetoothProcessorCoordinator( - hass, + test_hass, _LOGGER, "aa:bb:cc:dd:ee:ff", BluetoothScanningMode.ACTIVE, @@ -1706,7 +1704,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( ] sensor_entity_one: PassiveBluetoothProcessorEntity = sensor_entities[0] - sensor_entity_one.hass = hass + sensor_entity_one.hass = test_hass assert sensor_entity_one.available is False # service data not injected assert sensor_entity_one.unique_id == "aa:bb:cc:dd:ee:ff-pressure" assert sensor_entity_one.device_info == { @@ -1723,7 +1721,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( binary_sensor_entity_one: PassiveBluetoothProcessorEntity = ( binary_sensor_entities[0] ) - binary_sensor_entity_one.hass = hass + binary_sensor_entity_one.hass = test_hass assert binary_sensor_entity_one.available is False # service data not injected assert binary_sensor_entity_one.unique_id == "aa:bb:cc:dd:ee:ff-motion" assert binary_sensor_entity_one.device_info == { @@ -1739,7 +1737,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( cancel_coordinator() unregister_binary_sensor_processor() unregister_sensor_processor() - await hass.async_stop() + await test_hass.async_stop() NAMING_PASSIVE_BLUETOOTH_DATA_UPDATE = PassiveBluetoothDataUpdate( diff --git a/tests/components/bluetooth/test_scanner.py b/tests/components/bluetooth/test_scanner.py index dc25f29111c..6acb86476e7 100644 --- a/tests/components/bluetooth/test_scanner.py +++ b/tests/components/bluetooth/test_scanner.py @@ -3,6 +3,7 @@ import asyncio from datetime import timedelta import time +from typing import Any from unittest.mock import ANY, MagicMock, patch from bleak import BleakError @@ -211,7 +212,7 @@ async def test_recovery_from_dbus_restart(hass: HomeAssistant) -> None: mock_discovered = [] class MockBleakScanner: - def __init__(self, detection_callback, *args, **kwargs): + def __init__(self, detection_callback, *args: Any, **kwargs: Any) -> None: nonlocal _callback _callback = detection_callback @@ -631,7 +632,7 @@ async def test_setup_and_stop_macos( init_kwargs = None class MockBleakScanner: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs diff --git a/tests/components/bluetooth/test_wrappers.py b/tests/components/bluetooth/test_wrappers.py index 0c5645b3f71..5fc3d70c97a 100644 --- a/tests/components/bluetooth/test_wrappers.py +++ b/tests/components/bluetooth/test_wrappers.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Iterator from contextlib import contextmanager from unittest.mock import patch @@ -27,7 +28,7 @@ from . import _get_manager, generate_advertisement_data, generate_ble_device @contextmanager -def mock_shutdown(manager: HomeAssistantBluetoothManager) -> None: +def mock_shutdown(manager: HomeAssistantBluetoothManager) -> Iterator[None]: """Mock shutdown of the HomeAssistantBluetoothManager.""" manager.shutdown = True yield diff --git a/tests/components/bluetooth_le_tracker/test_device_tracker.py b/tests/components/bluetooth_le_tracker/test_device_tracker.py index f183f987cde..452297e38c2 100644 --- a/tests/components/bluetooth_le_tracker/test_device_tracker.py +++ b/tests/components/bluetooth_le_tracker/test_device_tracker.py @@ -1,6 +1,7 @@ """Test Bluetooth LE device tracker.""" from datetime import timedelta +from typing import Any from unittest.mock import patch from bleak import BleakError @@ -31,7 +32,7 @@ from tests.components.bluetooth import generate_advertisement_data, generate_ble class MockBleakClient: """Mock BleakClient.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/bmw_connected_drive/__init__.py b/tests/components/bmw_connected_drive/__init__.py index c11d5ef0021..655955ff9aa 100644 --- a/tests/components/bmw_connected_drive/__init__.py +++ b/tests/components/bmw_connected_drive/__init__.py @@ -1,6 +1,10 @@ """Tests for the for the BMW Connected Drive integration.""" -from bimmer_connected.const import REMOTE_SERVICE_BASE_URL, VEHICLE_CHARGING_BASE_URL +from bimmer_connected.const import ( + REMOTE_SERVICE_V4_BASE_URL, + VEHICLE_CHARGING_BASE_URL, + VEHICLE_POI_URL, +) import respx from homeassistant import config_entries @@ -67,10 +71,11 @@ def check_remote_service_call( first_remote_service_call: respx.models.Call = next( c for c in router.calls - if c.request.url.path.startswith(REMOTE_SERVICE_BASE_URL) + if c.request.url.path.startswith(REMOTE_SERVICE_V4_BASE_URL) or c.request.url.path.startswith( VEHICLE_CHARGING_BASE_URL.replace("/{vin}", "") ) + or c.request.url.path.endswith(VEHICLE_POI_URL.rsplit("/", maxsplit=1)[-1]) ) assert ( first_remote_service_call.request.url.path.endswith(remote_service) is True @@ -87,6 +92,10 @@ def check_remote_service_call( == remote_service_params ) + # Send POI doesn't return a status response, so we can't check it + if remote_service == "send-to-car": + return + # Now check final result last_event_status_call = next( c for c in reversed(router.calls) if c.request.url.path.endswith("eventStatus") diff --git a/tests/components/bmw_connected_drive/conftest.py b/tests/components/bmw_connected_drive/conftest.py index f69763dae77..7581b8c6f76 100644 --- a/tests/components/bmw_connected_drive/conftest.py +++ b/tests/components/bmw_connected_drive/conftest.py @@ -1,11 +1,12 @@ """Fixtures for BMW tests.""" +from collections.abc import Generator + from bimmer_connected.tests import ALL_CHARGING_SETTINGS, ALL_PROFILES, ALL_STATES from bimmer_connected.tests.common import MyBMWMockRouter from bimmer_connected.vehicle import remote_services import pytest import respx -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr index 610e194c0e5..c0462279e59 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr @@ -35,7 +35,6 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery_charging', 'friendly_name': 'i3 (+ REX) Charging status', }), @@ -83,11 +82,8 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'device_class': 'problem', 'friendly_name': 'i3 (+ REX) Check control messages', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_check_control_messages', @@ -133,17 +129,14 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2022-10-01', - 'car': 'i3 (+ REX)', 'device_class': 'problem', 'friendly_name': 'i3 (+ REX) Condition based services', 'vehicle_check': 'OK', 'vehicle_check_date': '2023-05-01', 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2023-05-01', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_condition_based_services', @@ -189,7 +182,6 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_connection_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'plug', 'friendly_name': 'i3 (+ REX) Connection status', }), @@ -237,12 +229,9 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'device_class': 'lock', 'door_lock_state': 'UNLOCKED', 'friendly_name': 'i3 (+ REX) Door lock state', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_door_lock_state', @@ -288,8 +277,6 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'device_class': 'opening', 'friendly_name': 'i3 (+ REX) Lids', 'hood': 'CLOSED', @@ -299,7 +286,6 @@ 'rightRear': 'CLOSED', 'sunRoof': 'CLOSED', 'trunk': 'CLOSED', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_lids', @@ -345,7 +331,6 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_pre_entry_climatization-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Pre entry climatization', }), 'context': , @@ -392,13 +377,10 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'device_class': 'opening', 'friendly_name': 'i3 (+ REX) Windows', 'leftFront': 'CLOSED', 'rightFront': 'CLOSED', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_windows', @@ -444,7 +426,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery_charging', 'friendly_name': 'i4 eDrive40 Charging status', }), @@ -492,12 +473,9 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'device_class': 'problem', 'friendly_name': 'i4 eDrive40 Check control messages', 'tire_pressure': 'LOW', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_check_control_messages', @@ -543,11 +521,9 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2024-12-01', 'brake_fluid_distance': '50000 km', - 'car': 'i4 eDrive40', 'device_class': 'problem', 'friendly_name': 'i4 eDrive40 Condition based services', 'tire_wear_front': 'OK', @@ -558,7 +534,6 @@ 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2024-12-01', 'vehicle_tuv_distance': '50000 km', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_condition_based_services', @@ -604,7 +579,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_connection_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'plug', 'friendly_name': 'i4 eDrive40 Connection status', }), @@ -652,12 +626,9 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'device_class': 'lock', 'door_lock_state': 'LOCKED', 'friendly_name': 'i4 eDrive40 Door lock state', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_door_lock_state', @@ -703,8 +674,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'device_class': 'opening', 'friendly_name': 'i4 eDrive40 Lids', 'hood': 'CLOSED', @@ -713,7 +682,6 @@ 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', 'trunk': 'CLOSED', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_lids', @@ -759,7 +727,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_pre_entry_climatization-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Pre entry climatization', }), 'context': , @@ -806,8 +773,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'device_class': 'opening', 'friendly_name': 'i4 eDrive40 Windows', 'leftFront': 'CLOSED', @@ -815,7 +780,6 @@ 'rear': 'CLOSED', 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_windows', @@ -861,7 +825,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery_charging', 'friendly_name': 'iX xDrive50 Charging status', }), @@ -909,12 +872,9 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'device_class': 'problem', 'friendly_name': 'iX xDrive50 Check control messages', 'tire_pressure': 'LOW', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_check_control_messages', @@ -960,11 +920,9 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2024-12-01', 'brake_fluid_distance': '50000 km', - 'car': 'iX xDrive50', 'device_class': 'problem', 'friendly_name': 'iX xDrive50 Condition based services', 'tire_wear_front': 'OK', @@ -975,7 +933,6 @@ 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2024-12-01', 'vehicle_tuv_distance': '50000 km', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_condition_based_services', @@ -1021,7 +978,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_connection_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'plug', 'friendly_name': 'iX xDrive50 Connection status', }), @@ -1069,12 +1025,9 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'device_class': 'lock', 'door_lock_state': 'LOCKED', 'friendly_name': 'iX xDrive50 Door lock state', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_door_lock_state', @@ -1120,8 +1073,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'device_class': 'opening', 'friendly_name': 'iX xDrive50 Lids', 'hood': 'CLOSED', @@ -1131,7 +1082,6 @@ 'rightRear': 'CLOSED', 'sunRoof': 'CLOSED', 'trunk': 'CLOSED', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_lids', @@ -1177,7 +1127,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_pre_entry_climatization-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Pre entry climatization', }), 'context': , @@ -1224,8 +1173,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'device_class': 'opening', 'friendly_name': 'iX xDrive50 Windows', 'leftFront': 'CLOSED', @@ -1233,7 +1180,6 @@ 'rear': 'CLOSED', 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_windows', @@ -1279,13 +1225,10 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'device_class': 'problem', 'engine_oil': 'LOW', 'friendly_name': 'M340i xDrive Check control messages', 'tire_pressure': 'LOW', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_check_control_messages', @@ -1331,11 +1274,9 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2024-12-01', 'brake_fluid_distance': '50000 km', - 'car': 'M340i xDrive', 'device_class': 'problem', 'friendly_name': 'M340i xDrive Condition based services', 'oil': 'OK', @@ -1349,7 +1290,6 @@ 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2024-12-01', 'vehicle_tuv_distance': '50000 km', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_condition_based_services', @@ -1395,12 +1335,9 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'device_class': 'lock', 'door_lock_state': 'LOCKED', 'friendly_name': 'M340i xDrive Door lock state', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_door_lock_state', @@ -1446,8 +1383,6 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'device_class': 'opening', 'friendly_name': 'M340i xDrive Lids', 'hood': 'CLOSED', @@ -1456,7 +1391,6 @@ 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', 'trunk': 'CLOSED', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_lids', @@ -1502,8 +1436,6 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'device_class': 'opening', 'friendly_name': 'M340i xDrive Windows', 'leftFront': 'CLOSED', @@ -1511,7 +1443,6 @@ 'rear': 'CLOSED', 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_windows', diff --git a/tests/components/bmw_connected_drive/snapshots/test_button.ambr b/tests/components/bmw_connected_drive/snapshots/test_button.ambr index cd3f94c7e5e..f38441125ce 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_button.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_button.ambr @@ -35,7 +35,6 @@ # name: test_entity_state_attrs[button.i3_rex_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Activate air conditioning', }), 'context': , @@ -82,7 +81,6 @@ # name: test_entity_state_attrs[button.i3_rex_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Find vehicle', }), 'context': , @@ -129,7 +127,6 @@ # name: test_entity_state_attrs[button.i3_rex_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Flash lights', }), 'context': , @@ -176,7 +173,6 @@ # name: test_entity_state_attrs[button.i3_rex_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Sound horn', }), 'context': , @@ -223,7 +219,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Activate air conditioning', }), 'context': , @@ -270,7 +265,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_deactivate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Deactivate air conditioning', }), 'context': , @@ -317,7 +311,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Find vehicle', }), 'context': , @@ -364,7 +357,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Flash lights', }), 'context': , @@ -411,7 +403,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Sound horn', }), 'context': , @@ -458,7 +449,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Activate air conditioning', }), 'context': , @@ -505,7 +495,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_deactivate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Deactivate air conditioning', }), 'context': , @@ -552,7 +541,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Find vehicle', }), 'context': , @@ -599,7 +587,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Flash lights', }), 'context': , @@ -646,7 +633,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Sound horn', }), 'context': , @@ -693,7 +679,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Activate air conditioning', }), 'context': , @@ -740,7 +725,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_deactivate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Deactivate air conditioning', }), 'context': , @@ -787,7 +771,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Find vehicle', }), 'context': , @@ -834,7 +817,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Flash lights', }), 'context': , @@ -881,7 +863,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Sound horn', }), 'context': , diff --git a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr index 477cd24376d..81ef1220069 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr @@ -232,16 +232,19 @@ }), 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'checkSustainabilityDPP': False, + 'alarmSystem': True, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, + 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -252,27 +255,38 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -287,11 +301,45 @@ 'NOT_SUPPORTED', ]), }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + 'state': 'ACTIVATED', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + 'state': 'ACTIVATED', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + 'state': 'ACTIVATED', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -570,6 +618,7 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), + 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -660,6 +709,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -1086,15 +1147,19 @@ }), 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -1105,37 +1170,80 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': True, + 'isCustomerEsimSupported': False, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -1408,6 +1516,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -1498,6 +1607,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -1840,16 +1961,20 @@ }), 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -1867,31 +1992,73 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': True, + 'isThirdPartyAppStoreSupported': False, + 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -2027,6 +2194,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -2113,6 +2281,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -2942,226 +3122,6 @@ }), ]), 'fingerprint': list([ - dict({ - 'content': dict({ - 'capabilities': dict({ - 'climateFunction': 'AIR_CONDITIONING', - 'climateNow': True, - 'climateTimerTrigger': 'DEPARTURE_TIMER', - 'horn': True, - 'isBmwChargingSupported': True, - 'isCarSharingSupported': False, - 'isChargeNowForBusinessSupported': False, - 'isChargingHistorySupported': True, - 'isChargingHospitalityEnabled': False, - 'isChargingLoudnessEnabled': False, - 'isChargingPlanSupported': True, - 'isChargingPowerLimitEnabled': False, - 'isChargingSettingsEnabled': False, - 'isChargingTargetSocEnabled': False, - 'isClimateTimerSupported': True, - 'isCustomerEsimSupported': False, - 'isDCSContractManagementSupported': True, - 'isDataPrivacyEnabled': False, - 'isEasyChargeEnabled': False, - 'isEvGoChargingSupported': False, - 'isMiniChargingSupported': False, - 'isNonLscFeatureEnabled': False, - 'isRemoteEngineStartSupported': False, - 'isRemoteHistoryDeletionSupported': False, - 'isRemoteHistorySupported': True, - 'isRemoteParkingSupported': False, - 'isRemoteServicesActivationRequired': False, - 'isRemoteServicesBookingRequired': False, - 'isScanAndChargeSupported': False, - 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': False, - 'lastStateCallState': 'ACTIVATED', - 'lights': True, - 'lock': True, - 'remoteChargingCommands': dict({ - }), - 'sendPoi': True, - 'specialThemeSupport': list([ - ]), - 'unlock': True, - 'vehicleFinder': False, - 'vehicleStateSource': 'LAST_STATE_CALL', - }), - 'state': dict({ - 'chargingProfile': dict({ - 'chargingControlType': 'WEEKLY_PLANNER', - 'chargingMode': 'DELAYED_CHARGING', - 'chargingPreference': 'CHARGING_WINDOW', - 'chargingSettings': dict({ - 'hospitality': 'NO_ACTION', - 'idcc': 'NO_ACTION', - 'targetSoc': 100, - }), - 'climatisationOn': False, - 'departureTimes': list([ - dict({ - 'action': 'DEACTIVATE', - 'id': 1, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 35, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 2, - 'timeStamp': dict({ - 'hour': 18, - 'minute': 0, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - 'SATURDAY', - 'SUNDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 3, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 0, - }), - 'timerWeekDays': list([ - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 4, - 'timerWeekDays': list([ - ]), - }), - ]), - 'reductionOfChargeCurrent': dict({ - 'end': dict({ - 'hour': 1, - 'minute': 30, - }), - 'start': dict({ - 'hour': 18, - 'minute': 1, - }), - }), - }), - 'checkControlMessages': list([ - ]), - 'climateTimers': list([ - dict({ - 'departureTime': dict({ - 'hour': 6, - 'minute': 40, - }), - 'isWeeklyTimer': True, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'THURSDAY', - 'SUNDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 12, - 'minute': 50, - }), - 'isWeeklyTimer': False, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'MONDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 18, - 'minute': 59, - }), - 'isWeeklyTimer': True, - 'timerAction': 'DEACTIVATE', - 'timerWeekDays': list([ - 'WEDNESDAY', - ]), - }), - ]), - 'combustionFuelLevel': dict({ - 'range': 105, - 'remainingFuelLiters': 6, - }), - 'currentMileage': 137009, - 'doorsState': dict({ - 'combinedSecurityState': 'UNLOCKED', - 'combinedState': 'CLOSED', - 'hood': 'CLOSED', - 'leftFront': 'CLOSED', - 'leftRear': 'CLOSED', - 'rightFront': 'CLOSED', - 'rightRear': 'CLOSED', - 'trunk': 'CLOSED', - }), - 'driverPreferences': dict({ - 'lscPrivacyMode': 'OFF', - }), - 'electricChargingState': dict({ - 'chargingConnectionType': 'CONDUCTIVE', - 'chargingLevelPercent': 82, - 'chargingStatus': 'WAITING_FOR_CHARGING', - 'chargingTarget': 100, - 'isChargerConnected': True, - 'range': 174, - }), - 'isLeftSteering': True, - 'isLscSupported': True, - 'lastFetched': '2022-06-22T14:24:23.982Z', - 'lastUpdatedAt': '2022-06-22T13:58:52Z', - 'range': 174, - 'requiredServices': list([ - dict({ - 'dateTime': '2022-10-01T00:00:00.000Z', - 'description': 'Next service due by the specified date.', - 'status': 'OK', - 'type': 'BRAKE_FLUID', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next vehicle check due after the specified distance or date.', - 'status': 'OK', - 'type': 'VEHICLE_CHECK', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next state inspection due by the specified date.', - 'status': 'OK', - 'type': 'VEHICLE_TUV', - }), - ]), - 'roofState': dict({ - 'roofState': 'CLOSED', - 'roofStateType': 'SUN_ROOF', - }), - 'windowsState': dict({ - 'combinedState': 'CLOSED', - 'leftFront': 'CLOSED', - 'rightFront': 'CLOSED', - }), - }), - }), - 'filename': 'bmw-eadrax-vcs_v4_vehicles_state_WBY0FINGERPRINT04.json', - }), dict({ 'content': dict({ 'chargeAndClimateSettings': dict({ @@ -3235,20 +3195,31 @@ }), 'filename': 'mini-eadrax-vcs_v5_vehicle-list.json', }), + dict({ + 'content': dict({ + 'gcid': 'ceb64158-d2ca-47e9-9ee6-cbffb881434e', + 'mappingInfos': list([ + ]), + }), + 'filename': 'toyota-eadrax-vcs_v5_vehicle-list.json', + }), dict({ 'content': dict({ 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'checkSustainabilityDPP': False, + 'alarmSystem': True, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, + 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -3259,27 +3230,38 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -3294,11 +3276,45 @@ 'NOT_SUPPORTED', ]), }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + 'state': 'ACTIVATED', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + 'state': 'ACTIVATED', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + 'state': 'ACTIVATED', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -3476,6 +3492,7 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), + 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -3566,6 +3583,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -3685,15 +3714,19 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -3704,37 +3737,80 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': True, + 'isCustomerEsimSupported': False, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -3906,6 +3982,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -3996,6 +4073,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -4115,16 +4204,20 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -4142,31 +4235,73 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': True, + 'isThirdPartyAppStoreSupported': False, + 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -4300,6 +4435,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -4386,6 +4522,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -5343,226 +5491,6 @@ 'vin': '**REDACTED**', }), 'fingerprint': list([ - dict({ - 'content': dict({ - 'capabilities': dict({ - 'climateFunction': 'AIR_CONDITIONING', - 'climateNow': True, - 'climateTimerTrigger': 'DEPARTURE_TIMER', - 'horn': True, - 'isBmwChargingSupported': True, - 'isCarSharingSupported': False, - 'isChargeNowForBusinessSupported': False, - 'isChargingHistorySupported': True, - 'isChargingHospitalityEnabled': False, - 'isChargingLoudnessEnabled': False, - 'isChargingPlanSupported': True, - 'isChargingPowerLimitEnabled': False, - 'isChargingSettingsEnabled': False, - 'isChargingTargetSocEnabled': False, - 'isClimateTimerSupported': True, - 'isCustomerEsimSupported': False, - 'isDCSContractManagementSupported': True, - 'isDataPrivacyEnabled': False, - 'isEasyChargeEnabled': False, - 'isEvGoChargingSupported': False, - 'isMiniChargingSupported': False, - 'isNonLscFeatureEnabled': False, - 'isRemoteEngineStartSupported': False, - 'isRemoteHistoryDeletionSupported': False, - 'isRemoteHistorySupported': True, - 'isRemoteParkingSupported': False, - 'isRemoteServicesActivationRequired': False, - 'isRemoteServicesBookingRequired': False, - 'isScanAndChargeSupported': False, - 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': False, - 'lastStateCallState': 'ACTIVATED', - 'lights': True, - 'lock': True, - 'remoteChargingCommands': dict({ - }), - 'sendPoi': True, - 'specialThemeSupport': list([ - ]), - 'unlock': True, - 'vehicleFinder': False, - 'vehicleStateSource': 'LAST_STATE_CALL', - }), - 'state': dict({ - 'chargingProfile': dict({ - 'chargingControlType': 'WEEKLY_PLANNER', - 'chargingMode': 'DELAYED_CHARGING', - 'chargingPreference': 'CHARGING_WINDOW', - 'chargingSettings': dict({ - 'hospitality': 'NO_ACTION', - 'idcc': 'NO_ACTION', - 'targetSoc': 100, - }), - 'climatisationOn': False, - 'departureTimes': list([ - dict({ - 'action': 'DEACTIVATE', - 'id': 1, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 35, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 2, - 'timeStamp': dict({ - 'hour': 18, - 'minute': 0, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - 'SATURDAY', - 'SUNDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 3, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 0, - }), - 'timerWeekDays': list([ - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 4, - 'timerWeekDays': list([ - ]), - }), - ]), - 'reductionOfChargeCurrent': dict({ - 'end': dict({ - 'hour': 1, - 'minute': 30, - }), - 'start': dict({ - 'hour': 18, - 'minute': 1, - }), - }), - }), - 'checkControlMessages': list([ - ]), - 'climateTimers': list([ - dict({ - 'departureTime': dict({ - 'hour': 6, - 'minute': 40, - }), - 'isWeeklyTimer': True, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'THURSDAY', - 'SUNDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 12, - 'minute': 50, - }), - 'isWeeklyTimer': False, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'MONDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 18, - 'minute': 59, - }), - 'isWeeklyTimer': True, - 'timerAction': 'DEACTIVATE', - 'timerWeekDays': list([ - 'WEDNESDAY', - ]), - }), - ]), - 'combustionFuelLevel': dict({ - 'range': 105, - 'remainingFuelLiters': 6, - }), - 'currentMileage': 137009, - 'doorsState': dict({ - 'combinedSecurityState': 'UNLOCKED', - 'combinedState': 'CLOSED', - 'hood': 'CLOSED', - 'leftFront': 'CLOSED', - 'leftRear': 'CLOSED', - 'rightFront': 'CLOSED', - 'rightRear': 'CLOSED', - 'trunk': 'CLOSED', - }), - 'driverPreferences': dict({ - 'lscPrivacyMode': 'OFF', - }), - 'electricChargingState': dict({ - 'chargingConnectionType': 'CONDUCTIVE', - 'chargingLevelPercent': 82, - 'chargingStatus': 'WAITING_FOR_CHARGING', - 'chargingTarget': 100, - 'isChargerConnected': True, - 'range': 174, - }), - 'isLeftSteering': True, - 'isLscSupported': True, - 'lastFetched': '2022-06-22T14:24:23.982Z', - 'lastUpdatedAt': '2022-06-22T13:58:52Z', - 'range': 174, - 'requiredServices': list([ - dict({ - 'dateTime': '2022-10-01T00:00:00.000Z', - 'description': 'Next service due by the specified date.', - 'status': 'OK', - 'type': 'BRAKE_FLUID', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next vehicle check due after the specified distance or date.', - 'status': 'OK', - 'type': 'VEHICLE_CHECK', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next state inspection due by the specified date.', - 'status': 'OK', - 'type': 'VEHICLE_TUV', - }), - ]), - 'roofState': dict({ - 'roofState': 'CLOSED', - 'roofStateType': 'SUN_ROOF', - }), - 'windowsState': dict({ - 'combinedState': 'CLOSED', - 'leftFront': 'CLOSED', - 'rightFront': 'CLOSED', - }), - }), - }), - 'filename': 'bmw-eadrax-vcs_v4_vehicles_state_WBY0FINGERPRINT04.json', - }), dict({ 'content': dict({ 'chargeAndClimateSettings': dict({ @@ -5636,20 +5564,31 @@ }), 'filename': 'mini-eadrax-vcs_v5_vehicle-list.json', }), + dict({ + 'content': dict({ + 'gcid': 'ceb64158-d2ca-47e9-9ee6-cbffb881434e', + 'mappingInfos': list([ + ]), + }), + 'filename': 'toyota-eadrax-vcs_v5_vehicle-list.json', + }), dict({ 'content': dict({ 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'checkSustainabilityDPP': False, + 'alarmSystem': True, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, + 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -5660,27 +5599,38 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -5695,11 +5645,45 @@ 'NOT_SUPPORTED', ]), }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + 'state': 'ACTIVATED', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + 'state': 'ACTIVATED', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + 'state': 'ACTIVATED', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -5877,6 +5861,7 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), + 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -5967,6 +5952,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -6086,15 +6083,19 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -6105,37 +6106,80 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': True, + 'isCustomerEsimSupported': False, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -6307,6 +6351,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -6397,6 +6442,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -6516,16 +6573,20 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -6543,31 +6604,73 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': True, + 'isThirdPartyAppStoreSupported': False, + 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -6701,6 +6804,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -6787,6 +6891,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -7098,226 +7214,6 @@ dict({ 'data': None, 'fingerprint': list([ - dict({ - 'content': dict({ - 'capabilities': dict({ - 'climateFunction': 'AIR_CONDITIONING', - 'climateNow': True, - 'climateTimerTrigger': 'DEPARTURE_TIMER', - 'horn': True, - 'isBmwChargingSupported': True, - 'isCarSharingSupported': False, - 'isChargeNowForBusinessSupported': False, - 'isChargingHistorySupported': True, - 'isChargingHospitalityEnabled': False, - 'isChargingLoudnessEnabled': False, - 'isChargingPlanSupported': True, - 'isChargingPowerLimitEnabled': False, - 'isChargingSettingsEnabled': False, - 'isChargingTargetSocEnabled': False, - 'isClimateTimerSupported': True, - 'isCustomerEsimSupported': False, - 'isDCSContractManagementSupported': True, - 'isDataPrivacyEnabled': False, - 'isEasyChargeEnabled': False, - 'isEvGoChargingSupported': False, - 'isMiniChargingSupported': False, - 'isNonLscFeatureEnabled': False, - 'isRemoteEngineStartSupported': False, - 'isRemoteHistoryDeletionSupported': False, - 'isRemoteHistorySupported': True, - 'isRemoteParkingSupported': False, - 'isRemoteServicesActivationRequired': False, - 'isRemoteServicesBookingRequired': False, - 'isScanAndChargeSupported': False, - 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': False, - 'lastStateCallState': 'ACTIVATED', - 'lights': True, - 'lock': True, - 'remoteChargingCommands': dict({ - }), - 'sendPoi': True, - 'specialThemeSupport': list([ - ]), - 'unlock': True, - 'vehicleFinder': False, - 'vehicleStateSource': 'LAST_STATE_CALL', - }), - 'state': dict({ - 'chargingProfile': dict({ - 'chargingControlType': 'WEEKLY_PLANNER', - 'chargingMode': 'DELAYED_CHARGING', - 'chargingPreference': 'CHARGING_WINDOW', - 'chargingSettings': dict({ - 'hospitality': 'NO_ACTION', - 'idcc': 'NO_ACTION', - 'targetSoc': 100, - }), - 'climatisationOn': False, - 'departureTimes': list([ - dict({ - 'action': 'DEACTIVATE', - 'id': 1, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 35, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 2, - 'timeStamp': dict({ - 'hour': 18, - 'minute': 0, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - 'SATURDAY', - 'SUNDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 3, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 0, - }), - 'timerWeekDays': list([ - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 4, - 'timerWeekDays': list([ - ]), - }), - ]), - 'reductionOfChargeCurrent': dict({ - 'end': dict({ - 'hour': 1, - 'minute': 30, - }), - 'start': dict({ - 'hour': 18, - 'minute': 1, - }), - }), - }), - 'checkControlMessages': list([ - ]), - 'climateTimers': list([ - dict({ - 'departureTime': dict({ - 'hour': 6, - 'minute': 40, - }), - 'isWeeklyTimer': True, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'THURSDAY', - 'SUNDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 12, - 'minute': 50, - }), - 'isWeeklyTimer': False, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'MONDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 18, - 'minute': 59, - }), - 'isWeeklyTimer': True, - 'timerAction': 'DEACTIVATE', - 'timerWeekDays': list([ - 'WEDNESDAY', - ]), - }), - ]), - 'combustionFuelLevel': dict({ - 'range': 105, - 'remainingFuelLiters': 6, - }), - 'currentMileage': 137009, - 'doorsState': dict({ - 'combinedSecurityState': 'UNLOCKED', - 'combinedState': 'CLOSED', - 'hood': 'CLOSED', - 'leftFront': 'CLOSED', - 'leftRear': 'CLOSED', - 'rightFront': 'CLOSED', - 'rightRear': 'CLOSED', - 'trunk': 'CLOSED', - }), - 'driverPreferences': dict({ - 'lscPrivacyMode': 'OFF', - }), - 'electricChargingState': dict({ - 'chargingConnectionType': 'CONDUCTIVE', - 'chargingLevelPercent': 82, - 'chargingStatus': 'WAITING_FOR_CHARGING', - 'chargingTarget': 100, - 'isChargerConnected': True, - 'range': 174, - }), - 'isLeftSteering': True, - 'isLscSupported': True, - 'lastFetched': '2022-06-22T14:24:23.982Z', - 'lastUpdatedAt': '2022-06-22T13:58:52Z', - 'range': 174, - 'requiredServices': list([ - dict({ - 'dateTime': '2022-10-01T00:00:00.000Z', - 'description': 'Next service due by the specified date.', - 'status': 'OK', - 'type': 'BRAKE_FLUID', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next vehicle check due after the specified distance or date.', - 'status': 'OK', - 'type': 'VEHICLE_CHECK', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next state inspection due by the specified date.', - 'status': 'OK', - 'type': 'VEHICLE_TUV', - }), - ]), - 'roofState': dict({ - 'roofState': 'CLOSED', - 'roofStateType': 'SUN_ROOF', - }), - 'windowsState': dict({ - 'combinedState': 'CLOSED', - 'leftFront': 'CLOSED', - 'rightFront': 'CLOSED', - }), - }), - }), - 'filename': 'bmw-eadrax-vcs_v4_vehicles_state_WBY0FINGERPRINT04.json', - }), dict({ 'content': dict({ 'chargeAndClimateSettings': dict({ @@ -7391,20 +7287,31 @@ }), 'filename': 'mini-eadrax-vcs_v5_vehicle-list.json', }), + dict({ + 'content': dict({ + 'gcid': 'ceb64158-d2ca-47e9-9ee6-cbffb881434e', + 'mappingInfos': list([ + ]), + }), + 'filename': 'toyota-eadrax-vcs_v5_vehicle-list.json', + }), dict({ 'content': dict({ 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'checkSustainabilityDPP': False, + 'alarmSystem': True, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, + 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -7415,27 +7322,38 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -7450,11 +7368,45 @@ 'NOT_SUPPORTED', ]), }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + 'state': 'ACTIVATED', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + 'state': 'ACTIVATED', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + 'state': 'ACTIVATED', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -7632,6 +7584,7 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), + 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -7722,6 +7675,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -7841,15 +7806,19 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -7860,37 +7829,80 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': True, + 'isCustomerEsimSupported': False, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -8062,6 +8074,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -8152,6 +8165,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -8271,16 +8296,20 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -8298,31 +8327,73 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': True, + 'isThirdPartyAppStoreSupported': False, + 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -8456,6 +8527,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -8542,6 +8614,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', diff --git a/tests/components/bmw_connected_drive/snapshots/test_lock.ambr b/tests/components/bmw_connected_drive/snapshots/test_lock.ambr index 17e6b118011..395c6e56dda 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_lock.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_lock.ambr @@ -35,12 +35,9 @@ # name: test_entity_state_attrs[lock.i3_rex_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'door_lock_state': 'UNLOCKED', 'friendly_name': 'i3 (+ REX) Lock', 'supported_features': , - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'lock.i3_rex_lock', @@ -86,12 +83,9 @@ # name: test_entity_state_attrs[lock.i4_edrive40_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'door_lock_state': 'LOCKED', 'friendly_name': 'i4 eDrive40 Lock', 'supported_features': , - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'lock.i4_edrive40_lock', @@ -137,12 +131,9 @@ # name: test_entity_state_attrs[lock.ix_xdrive50_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'door_lock_state': 'LOCKED', 'friendly_name': 'iX xDrive50 Lock', 'supported_features': , - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'lock.ix_xdrive50_lock', @@ -188,12 +179,9 @@ # name: test_entity_state_attrs[lock.m340i_xdrive_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'door_lock_state': 'LOCKED', 'friendly_name': 'M340i xDrive Lock', 'supported_features': , - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'lock.m340i_xdrive_lock', diff --git a/tests/components/bmw_connected_drive/snapshots/test_number.ambr b/tests/components/bmw_connected_drive/snapshots/test_number.ambr index f24ea43d8e8..71dbc46b454 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_number.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_number.ambr @@ -40,7 +40,6 @@ # name: test_entity_state_attrs[number.i4_edrive40_target_soc-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Target SoC', 'max': 100.0, @@ -97,7 +96,6 @@ # name: test_entity_state_attrs[number.ix_xdrive50_target_soc-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Target SoC', 'max': 100.0, diff --git a/tests/components/bmw_connected_drive/snapshots/test_select.ambr b/tests/components/bmw_connected_drive/snapshots/test_select.ambr index 34a8817c8db..b827dfe478a 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_select.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_select.ambr @@ -8,6 +8,7 @@ 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'config_entry_id': , @@ -40,11 +41,11 @@ # name: test_entity_state_attrs[select.i3_rex_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Charging Mode', 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'context': , @@ -107,7 +108,6 @@ # name: test_entity_state_attrs[select.i4_edrive40_ac_charging_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 AC Charging Limit', 'options': list([ '6', @@ -143,6 +143,7 @@ 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'config_entry_id': , @@ -175,11 +176,11 @@ # name: test_entity_state_attrs[select.i4_edrive40_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Charging Mode', 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'context': , @@ -242,7 +243,6 @@ # name: test_entity_state_attrs[select.ix_xdrive50_ac_charging_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 AC Charging Limit', 'options': list([ '6', @@ -278,6 +278,7 @@ 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'config_entry_id': , @@ -310,11 +311,11 @@ # name: test_entity_state_attrs[select.ix_xdrive50_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Charging Mode', 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'context': , diff --git a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr index 6ba87c029ee..8a26acd1040 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr @@ -31,14 +31,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'ac_current_limit', - 'unique_id': 'WBY00000000REXI01-ac_current_limit', + 'unique_id': 'WBY00000000REXI01-charging_profile.ac_current_limit', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_ac_current_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'current', 'friendly_name': 'i3 (+ REX) AC current limit', 'unit_of_measurement': , @@ -80,14 +79,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_end_time', - 'unique_id': 'WBY00000000REXI01-charging_end_time', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_end_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_end_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i3 (+ REX) Charging end time', }), @@ -128,14 +126,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_start_time', - 'unique_id': 'WBY00000000REXI01-charging_start_time', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_start_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_start_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i3 (+ REX) Charging start time', }), @@ -191,14 +188,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_status', - 'unique_id': 'WBY00000000REXI01-charging_status', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_status', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'i3 (+ REX) Charging status', 'options': list([ @@ -256,14 +252,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_target', - 'unique_id': 'WBY00000000REXI01-charging_target', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_target', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i3 (+ REX) Charging target', 'unit_of_measurement': '%', @@ -317,7 +312,6 @@ # name: test_entity_state_attrs[sensor.i3_rex_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Mileage', 'state_class': , @@ -365,14 +359,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_battery_percent', - 'unique_id': 'WBY00000000REXI01-remaining_battery_percent', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_battery_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_battery_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i3 (+ REX) Remaining battery percent', 'state_class': , @@ -413,22 +406,21 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': 'Remaining fuel', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel', - 'unique_id': 'WBY00000000REXI01-remaining_fuel', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'device_class': 'volume', + 'device_class': 'volume_storage', 'friendly_name': 'i3 (+ REX) Remaining fuel', 'state_class': , 'unit_of_measurement': , @@ -475,14 +467,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel_percent', - 'unique_id': 'WBY00000000REXI01-remaining_fuel_percent', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_fuel_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_fuel_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Remaining fuel percent', 'state_class': , 'unit_of_measurement': '%', @@ -529,14 +520,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_electric', - 'unique_id': 'WBY00000000REXI01-remaining_range_electric', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_range_electric', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_range_electric-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Remaining range electric', 'state_class': , @@ -584,14 +574,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_fuel', - 'unique_id': 'WBY00000000REXI01-remaining_range_fuel', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_range_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_range_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Remaining range fuel', 'state_class': , @@ -639,14 +628,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBY00000000REXI01-remaining_range_total', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Remaining range total', 'state_class': , @@ -692,14 +680,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'ac_current_limit', - 'unique_id': 'WBA00000000DEMO02-ac_current_limit', + 'unique_id': 'WBA00000000DEMO02-charging_profile.ac_current_limit', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_ac_current_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'current', 'friendly_name': 'i4 eDrive40 AC current limit', 'unit_of_measurement': , @@ -741,14 +728,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_end_time', - 'unique_id': 'WBA00000000DEMO02-charging_end_time', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_end_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_end_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i4 eDrive40 Charging end time', }), @@ -789,14 +775,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_start_time', - 'unique_id': 'WBA00000000DEMO02-charging_start_time', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_start_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_start_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i4 eDrive40 Charging start time', }), @@ -852,14 +837,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_status', - 'unique_id': 'WBA00000000DEMO02-charging_status', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_status', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'i4 eDrive40 Charging status', 'options': list([ @@ -917,14 +901,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_target', - 'unique_id': 'WBA00000000DEMO02-charging_target', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_target', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Charging target', 'unit_of_measurement': '%', @@ -973,14 +956,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_status', - 'unique_id': 'WBA00000000DEMO02-activity', + 'unique_id': 'WBA00000000DEMO02-climate.activity', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_climate_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'i4 eDrive40 Climate status', 'options': list([ @@ -998,6 +980,234 @@ 'state': 'heating', }) # --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_front_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_target_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.front_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Front left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_front_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_front_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_current_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.front_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Front left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_front_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_front_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_target_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.front_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Front right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_front_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_front_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_current_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.front_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Front right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_front_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.55', + }) +# --- # name: test_entity_state_attrs[sensor.i4_edrive40_mileage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1039,7 +1249,6 @@ # name: test_entity_state_attrs[sensor.i4_edrive40_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i4 eDrive40 Mileage', 'state_class': , @@ -1053,6 +1262,234 @@ 'state': '1121', }) # --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_rear_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_target_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.rear_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Rear left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_rear_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.03', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_rear_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_current_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.rear_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Rear left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_rear_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.24', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_rear_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_target_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.rear_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Rear right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_rear_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.03', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_rear_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_current_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.rear_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Rear right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_rear_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.31', + }) +# --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_battery_percent-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1087,14 +1524,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_battery_percent', - 'unique_id': 'WBA00000000DEMO02-remaining_battery_percent', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.remaining_battery_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_battery_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Remaining battery percent', 'state_class': , @@ -1142,14 +1578,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_electric', - 'unique_id': 'WBA00000000DEMO02-remaining_range_electric', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.remaining_range_electric', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_range_electric-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i4 eDrive40 Remaining range electric', 'state_class': , @@ -1197,14 +1632,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBA00000000DEMO02-remaining_range_total', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i4 eDrive40 Remaining range total', 'state_class': , @@ -1250,14 +1684,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'ac_current_limit', - 'unique_id': 'WBA00000000DEMO01-ac_current_limit', + 'unique_id': 'WBA00000000DEMO01-charging_profile.ac_current_limit', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_ac_current_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'current', 'friendly_name': 'iX xDrive50 AC current limit', 'unit_of_measurement': , @@ -1299,14 +1732,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_end_time', - 'unique_id': 'WBA00000000DEMO01-charging_end_time', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_end_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_end_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'iX xDrive50 Charging end time', }), @@ -1347,14 +1779,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_start_time', - 'unique_id': 'WBA00000000DEMO01-charging_start_time', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_start_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_start_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'iX xDrive50 Charging start time', }), @@ -1410,14 +1841,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_status', - 'unique_id': 'WBA00000000DEMO01-charging_status', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_status', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'iX xDrive50 Charging status', 'options': list([ @@ -1475,14 +1905,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_target', - 'unique_id': 'WBA00000000DEMO01-charging_target', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_target', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Charging target', 'unit_of_measurement': '%', @@ -1531,14 +1960,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_status', - 'unique_id': 'WBA00000000DEMO01-activity', + 'unique_id': 'WBA00000000DEMO01-climate.activity', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_climate_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'iX xDrive50 Climate status', 'options': list([ @@ -1556,6 +1984,234 @@ 'state': 'inactive', }) # --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_front_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_target_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.front_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Front left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_front_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_front_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_current_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.front_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Front left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_front_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_front_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_target_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.front_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Front right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_front_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_front_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_current_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.front_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Front right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_front_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- # name: test_entity_state_attrs[sensor.ix_xdrive50_mileage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1597,7 +2253,6 @@ # name: test_entity_state_attrs[sensor.ix_xdrive50_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'iX xDrive50 Mileage', 'state_class': , @@ -1611,6 +2266,234 @@ 'state': '1121', }) # --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_rear_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_target_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.rear_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Rear left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_rear_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_rear_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_current_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.rear_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Rear left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_rear_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.61', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_rear_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_target_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.rear_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Rear right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_rear_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_rear_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_current_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.rear_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Rear right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_rear_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_battery_percent-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1645,14 +2528,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_battery_percent', - 'unique_id': 'WBA00000000DEMO01-remaining_battery_percent', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.remaining_battery_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_battery_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Remaining battery percent', 'state_class': , @@ -1700,14 +2582,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_electric', - 'unique_id': 'WBA00000000DEMO01-remaining_range_electric', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.remaining_range_electric', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_range_electric-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'iX xDrive50 Remaining range electric', 'state_class': , @@ -1755,14 +2636,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBA00000000DEMO01-remaining_range_total', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'iX xDrive50 Remaining range total', 'state_class': , @@ -1812,14 +2692,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_status', - 'unique_id': 'WBA00000000DEMO03-activity', + 'unique_id': 'WBA00000000DEMO03-climate.activity', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_climate_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'M340i xDrive Climate status', 'options': list([ @@ -1837,6 +2716,234 @@ 'state': 'inactive', }) # --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_front_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_target_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.front_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Front left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_front_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_front_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_current_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.front_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Front left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_front_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_front_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_target_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.front_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Front right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_front_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_front_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_current_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.front_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Front right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_front_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.55', + }) +# --- # name: test_entity_state_attrs[sensor.m340i_xdrive_mileage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1878,7 +2985,6 @@ # name: test_entity_state_attrs[sensor.m340i_xdrive_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'M340i xDrive Mileage', 'state_class': , @@ -1892,6 +2998,234 @@ 'state': '1121', }) # --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_rear_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_target_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.rear_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Rear left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_rear_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_rear_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_current_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.rear_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Rear left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_rear_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.24', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_rear_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_target_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.rear_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Rear right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_rear_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_rear_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_current_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.rear_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Rear right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_rear_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.31', + }) +# --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1919,22 +3253,21 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': 'Remaining fuel', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel', - 'unique_id': 'WBA00000000DEMO03-remaining_fuel', + 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'device_class': 'volume', + 'device_class': 'volume_storage', 'friendly_name': 'M340i xDrive Remaining fuel', 'state_class': , 'unit_of_measurement': , @@ -1981,14 +3314,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel_percent', - 'unique_id': 'WBA00000000DEMO03-remaining_fuel_percent', + 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_fuel_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Remaining fuel percent', 'state_class': , 'unit_of_measurement': '%', @@ -2035,14 +3367,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_fuel', - 'unique_id': 'WBA00000000DEMO03-remaining_range_fuel', + 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_range_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_range_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'M340i xDrive Remaining range fuel', 'state_class': , @@ -2090,14 +3421,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBA00000000DEMO03-remaining_range_total', + 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'M340i xDrive Remaining range total', 'state_class': , diff --git a/tests/components/bmw_connected_drive/snapshots/test_switch.ambr b/tests/components/bmw_connected_drive/snapshots/test_switch.ambr index 5a87a6ddd84..5b60a32c3be 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_switch.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_switch.ambr @@ -35,7 +35,6 @@ # name: test_entity_state_attrs[switch.i4_edrive40_climate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Climate', }), 'context': , @@ -82,7 +81,6 @@ # name: test_entity_state_attrs[switch.ix_xdrive50_charging-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Charging', }), 'context': , @@ -129,7 +127,6 @@ # name: test_entity_state_attrs[switch.ix_xdrive50_climate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Climate', }), 'context': , @@ -176,7 +173,6 @@ # name: test_entity_state_attrs[switch.m340i_xdrive_climate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Climate', }), 'context': , diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index b562e2b898f..f346cd70b26 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -92,7 +92,7 @@ async def test_api_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=FIXTURE_USER_INPUT, + data=deepcopy(FIXTURE_USER_INPUT), ) assert result["type"] is FlowResultType.FORM @@ -116,7 +116,7 @@ async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: result2 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=FIXTURE_USER_INPUT, + data=deepcopy(FIXTURE_USER_INPUT), ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == FIXTURE_COMPLETE_ENTRY[CONF_USERNAME] @@ -137,7 +137,8 @@ async def test_options_flow_implementation(hass: HomeAssistant) -> None: return_value=True, ) as mock_setup_entry, ): - config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) + config_entry_args = deepcopy(FIXTURE_CONFIG_ENTRY) + config_entry = MockConfigEntry(**config_entry_args) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) @@ -158,7 +159,7 @@ async def test_options_flow_implementation(hass: HomeAssistant) -> None: CONF_READ_ONLY: True, } - assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 2 async def test_reauth(hass: HomeAssistant) -> None: @@ -209,4 +210,4 @@ async def test_reauth(hass: HomeAssistant) -> None: assert result2["reason"] == "reauth_successful" assert config_entry.data == FIXTURE_COMPLETE_ENTRY - assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 2 diff --git a/tests/components/bmw_connected_drive/test_coordinator.py b/tests/components/bmw_connected_drive/test_coordinator.py index 5b3f99a9414..b0f507bbfc2 100644 --- a/tests/components/bmw_connected_drive/test_coordinator.py +++ b/tests/components/bmw_connected_drive/test_coordinator.py @@ -8,7 +8,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.bmw_connected_drive import DOMAIN as BMW_DOMAIN -from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.update_coordinator import UpdateFailed @@ -27,10 +27,7 @@ async def test_update_success(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert ( - hass.data[config_entry.domain][config_entry.entry_id].last_update_success - is True - ) + assert config_entry.runtime_data.coordinator.last_update_success is True @pytest.mark.usefixtures("bmw_fixture") @@ -45,7 +42,7 @@ async def test_update_failed( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = hass.data[config_entry.domain][config_entry.entry_id] + coordinator = config_entry.runtime_data.coordinator assert coordinator.last_update_success is True @@ -74,7 +71,7 @@ async def test_update_reauth( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = hass.data[config_entry.domain][config_entry.entry_id] + coordinator = config_entry.runtime_data.coordinator assert coordinator.last_update_success is True @@ -121,6 +118,7 @@ async def test_init_reauth( await hass.async_block_till_done() reauth_issue = issue_registry.async_get_issue( - HA_DOMAIN, f"config_entry_reauth_{BMW_DOMAIN}_{config_entry.entry_id}" + HOMEASSISTANT_DOMAIN, + f"config_entry_reauth_{BMW_DOMAIN}_{config_entry.entry_id}", ) assert reauth_issue.active is True diff --git a/tests/components/bmw_connected_drive/test_init.py b/tests/components/bmw_connected_drive/test_init.py index d648ad65f5d..e523b2b3d02 100644 --- a/tests/components/bmw_connected_drive/test_init.py +++ b/tests/components/bmw_connected_drive/test_init.py @@ -1,10 +1,15 @@ """Test Axis component setup process.""" +from copy import deepcopy from unittest.mock import patch import pytest -from homeassistant.components.bmw_connected_drive.const import DOMAIN as BMW_DOMAIN +from homeassistant.components.bmw_connected_drive import DEFAULT_OPTIONS +from homeassistant.components.bmw_connected_drive.const import ( + CONF_READ_ONLY, + DOMAIN as BMW_DOMAIN, +) from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -18,6 +23,56 @@ VEHICLE_NAME = "i3 (+ REX)" VEHICLE_NAME_SLUG = "i3_rex" +@pytest.mark.usefixtures("bmw_fixture") +@pytest.mark.parametrize( + "options", + [ + DEFAULT_OPTIONS, + {"other_value": 1, **DEFAULT_OPTIONS}, + {}, + ], +) +async def test_migrate_options( + hass: HomeAssistant, + options: dict, +) -> None: + """Test successful migration of options.""" + + config_entry = deepcopy(FIXTURE_CONFIG_ENTRY) + config_entry["options"] = options + + mock_config_entry = MockConfigEntry(**config_entry) + mock_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert len( + hass.config_entries.async_get_entry(mock_config_entry.entry_id).options + ) == len(DEFAULT_OPTIONS) + + +@pytest.mark.usefixtures("bmw_fixture") +async def test_migrate_options_from_data(hass: HomeAssistant) -> None: + """Test successful migration of options.""" + + config_entry = deepcopy(FIXTURE_CONFIG_ENTRY) + config_entry["options"] = {} + config_entry["data"].update({CONF_READ_ONLY: False}) + + mock_config_entry = MockConfigEntry(**config_entry) + mock_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + updated_config_entry = hass.config_entries.async_get_entry( + mock_config_entry.entry_id + ) + assert len(updated_config_entry.options) == len(DEFAULT_OPTIONS) + assert CONF_READ_ONLY not in updated_config_entry.data + + @pytest.mark.parametrize( ("entitydata", "old_unique_id", "new_unique_id"), [ @@ -30,7 +85,7 @@ VEHICLE_NAME_SLUG = "i3_rex" "disabled_by": None, }, f"{VIN}-charging_level_hv", - f"{VIN}-remaining_battery_percent", + f"{VIN}-fuel_and_battery.remaining_battery_percent", ), ( { @@ -41,7 +96,18 @@ VEHICLE_NAME_SLUG = "i3_rex" "disabled_by": None, }, f"{VIN}-remaining_range_total", - f"{VIN}-remaining_range_total", + f"{VIN}-fuel_and_battery.remaining_range_total", + ), + ( + { + "domain": SENSOR_DOMAIN, + "platform": BMW_DOMAIN, + "unique_id": f"{VIN}-mileage", + "suggested_object_id": f"{VEHICLE_NAME} mileage", + "disabled_by": None, + }, + f"{VIN}-mileage", + f"{VIN}-mileage", ), ], ) @@ -53,7 +119,8 @@ async def test_migrate_unique_ids( entity_registry: er.EntityRegistry, ) -> None: """Test successful migration of entity unique_ids.""" - mock_config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) + confg_entry = deepcopy(FIXTURE_CONFIG_ENTRY) + mock_config_entry = MockConfigEntry(**confg_entry) mock_config_entry.add_to_hass(hass) entity: er.RegistryEntry = entity_registry.async_get_or_create( @@ -87,7 +154,7 @@ async def test_migrate_unique_ids( "disabled_by": None, }, f"{VIN}-charging_level_hv", - f"{VIN}-remaining_battery_percent", + f"{VIN}-fuel_and_battery.remaining_battery_percent", ), ], ) @@ -99,15 +166,16 @@ async def test_dont_migrate_unique_ids( entity_registry: er.EntityRegistry, ) -> None: """Test successful migration of entity unique_ids.""" - mock_config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) + confg_entry = deepcopy(FIXTURE_CONFIG_ENTRY) + mock_config_entry = MockConfigEntry(**confg_entry) mock_config_entry.add_to_hass(hass) # create existing entry with new_unique_id existing_entity = entity_registry.async_get_or_create( SENSOR_DOMAIN, BMW_DOMAIN, - unique_id=f"{VIN}-remaining_battery_percent", - suggested_object_id=f"{VEHICLE_NAME} remaining_battery_percent", + unique_id=f"{VIN}-fuel_and_battery.remaining_battery_percent", + suggested_object_id=f"{VEHICLE_NAME} fuel_and_battery.remaining_battery_percent", config_entry=mock_config_entry, ) @@ -142,7 +210,8 @@ async def test_remove_stale_devices( device_registry: dr.DeviceRegistry, ) -> None: """Test remove stale device registry entries.""" - mock_config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY) + config_entry = deepcopy(FIXTURE_CONFIG_ENTRY) + mock_config_entry = MockConfigEntry(**config_entry) mock_config_entry.add_to_hass(hass) device_registry.async_get_or_create( diff --git a/tests/components/bmw_connected_drive/test_notify.py b/tests/components/bmw_connected_drive/test_notify.py new file mode 100644 index 00000000000..4113f618be0 --- /dev/null +++ b/tests/components/bmw_connected_drive/test_notify.py @@ -0,0 +1,151 @@ +"""Test BMW numbers.""" + +from unittest.mock import AsyncMock + +from bimmer_connected.models import MyBMWAPIError, MyBMWRemoteServiceError +from bimmer_connected.tests.common import POI_DATA +from bimmer_connected.vehicle.remote_services import RemoteServices +import pytest +import respx + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from . import check_remote_service_call, setup_mocked_integration + + +async def test_legacy_notify_service_simple( + hass: HomeAssistant, + bmw_fixture: respx.Router, +) -> None: + """Test successful sending of POIs.""" + + # Setup component + assert await setup_mocked_integration(hass) + + # Minimal required data + await hass.services.async_call( + "notify", + "bmw_connected_drive_ix_xdrive50", + { + "message": POI_DATA.get("name"), + "data": { + "latitude": POI_DATA.get("lat"), + "longitude": POI_DATA.get("lon"), + }, + }, + blocking=True, + ) + check_remote_service_call(bmw_fixture, "send-to-car") + + bmw_fixture.reset() + + # Full data + await hass.services.async_call( + "notify", + "bmw_connected_drive_ix_xdrive50", + { + "message": POI_DATA.get("name"), + "data": { + "latitude": POI_DATA.get("lat"), + "longitude": POI_DATA.get("lon"), + "street": POI_DATA.get("street"), + "city": POI_DATA.get("city"), + "postal_code": POI_DATA.get("postal_code"), + "country": POI_DATA.get("country"), + }, + }, + blocking=True, + ) + check_remote_service_call(bmw_fixture, "send-to-car") + + +@pytest.mark.usefixtures("bmw_fixture") +@pytest.mark.parametrize( + ("data", "exc_translation"), + [ + ( + { + "latitude": POI_DATA.get("lat"), + }, + "Invalid data for point of interest: required key not provided @ data['longitude']", + ), + ( + { + "latitude": POI_DATA.get("lat"), + "longitude": "text", + }, + "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + ), + ( + { + "latitude": POI_DATA.get("lat"), + "longitude": 9999, + }, + "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + ), + ], +) +async def test_service_call_invalid_input( + hass: HomeAssistant, + data: dict, + exc_translation: str, +) -> None: + """Test invalid inputs.""" + + # Setup component + assert await setup_mocked_integration(hass) + + with pytest.raises(ServiceValidationError) as exc: + await hass.services.async_call( + "notify", + "bmw_connected_drive_ix_xdrive50", + { + "message": POI_DATA.get("name"), + "data": data, + }, + blocking=True, + ) + assert str(exc.value) == exc_translation + + +@pytest.mark.usefixtures("bmw_fixture") +@pytest.mark.parametrize( + ("raised", "expected"), + [ + (MyBMWRemoteServiceError, HomeAssistantError), + (MyBMWAPIError, HomeAssistantError), + ], +) +async def test_service_call_fail( + hass: HomeAssistant, + raised: Exception, + expected: Exception, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test exception handling.""" + + # Setup component + assert await setup_mocked_integration(hass) + + # Setup exception + monkeypatch.setattr( + RemoteServices, + "trigger_remote_service", + AsyncMock(side_effect=raised), + ) + + # Test + with pytest.raises(expected): + await hass.services.async_call( + "notify", + "bmw_connected_drive_ix_xdrive50", + { + "message": POI_DATA.get("name"), + "data": { + "latitude": POI_DATA.get("lat"), + "longitude": POI_DATA.get("lon"), + }, + }, + blocking=True, + ) diff --git a/tests/components/bmw_connected_drive/test_sensor.py b/tests/components/bmw_connected_drive/test_sensor.py index 6607bed280d..c02f6d425cd 100644 --- a/tests/components/bmw_connected_drive/test_sensor.py +++ b/tests/components/bmw_connected_drive/test_sensor.py @@ -2,13 +2,17 @@ from unittest.mock import patch +from bimmer_connected.models import StrEnum +from bimmer_connected.vehicle import fuel_and_battery +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.bmw_connected_drive import DOMAIN as BMW_DOMAIN +from homeassistant.components.bmw_connected_drive.const import SCAN_INTERVALS from homeassistant.components.bmw_connected_drive.sensor import SENSOR_TYPES from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import Platform +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.translation import async_get_translations @@ -20,7 +24,7 @@ from homeassistant.util.unit_system import ( from . import setup_mocked_integration -from tests.common import snapshot_platform +from tests.common import async_fire_time_changed, snapshot_platform @pytest.mark.freeze_time("2023-06-22 10:30:00+00:00") @@ -107,3 +111,39 @@ async def test_entity_option_translations( } assert sensor_options == translation_states + + +@pytest.mark.usefixtures("bmw_fixture") +async def test_enum_sensor_unknown( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch, freezer: FrozenDateTimeFactory +) -> None: + """Test conversion handling of enum sensors.""" + + # Setup component + assert await setup_mocked_integration(hass) + + entity_id = "sensor.i4_edrive40_charging_status" + + # Check normal state + entity = hass.states.get(entity_id) + assert entity.state == "not_charging" + + class ChargingStateUnkown(StrEnum): + """Charging state of electric vehicle.""" + + UNKNOWN = "UNKNOWN" + + # Setup enum returning only UNKNOWN + monkeypatch.setattr( + fuel_and_battery, + "ChargingState", + ChargingStateUnkown, + ) + + freezer.tick(SCAN_INTERVALS["rest_of_world"]) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Check normal state + entity = hass.states.get("sensor.i4_edrive40_charging_status") + assert entity.state == STATE_UNAVAILABLE diff --git a/tests/components/braviatv/conftest.py b/tests/components/braviatv/conftest.py index 186f4e12337..b25e8ddf067 100644 --- a/tests/components/braviatv/conftest.py +++ b/tests/components/braviatv/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for Bravia TV.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/braviatv/test_diagnostics.py b/tests/components/braviatv/test_diagnostics.py index 13f6c92fb76..a7bd1631788 100644 --- a/tests/components/braviatv/test_diagnostics.py +++ b/tests/components/braviatv/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.braviatv.const import CONF_USE_PSK, DOMAIN from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PIN @@ -71,4 +72,4 @@ async def test_entry_diagnostics( assert await async_setup_component(hass, DOMAIN, {}) result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/bring/conftest.py b/tests/components/bring/conftest.py index 25330c10ba4..6c39c5020f9 100644 --- a/tests/components/bring/conftest.py +++ b/tests/components/bring/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the Bring! tests.""" +from collections.abc import Generator from typing import cast from unittest.mock import AsyncMock, patch from bring_api.types import BringAuthResponse import pytest -from typing_extensions import Generator from homeassistant.components.bring import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/broadlink/__init__.py b/tests/components/broadlink/__init__.py index c9245fb16fa..207014d0958 100644 --- a/tests/components/broadlink/__init__.py +++ b/tests/components/broadlink/__init__.py @@ -89,6 +89,16 @@ BROADLINK_DEVICES = { 57, 5, ), + "Guest room": ( + "192.168.0.66", + "34ea34b61d2e", + "HY02/HY03", + "Hysen", + "HYS", + 0x4EAD, + 10024, + 5, + ), } @@ -105,18 +115,27 @@ class BroadlinkDevice: """Representation of a Broadlink device.""" def __init__( - self, name, host, mac, model, manufacturer, type_, devtype, fwversion, timeout - ): + self, + name: str, + host: str, + mac: str, + model: str, + manufacturer: str, + type_: str, + devtype: int, + fwversion: int, + timeout: int, + ) -> None: """Initialize the device.""" - self.name: str = name - self.host: str = host - self.mac: str = mac - self.model: str = model - self.manufacturer: str = manufacturer - self.type: str = type_ - self.devtype: int = devtype - self.timeout: int = timeout - self.fwversion: int = fwversion + self.name = name + self.host = host + self.mac = mac + self.model = model + self.manufacturer = manufacturer + self.type = type_ + self.devtype = devtype + self.timeout = timeout + self.fwversion = fwversion async def setup_entry(self, hass, mock_api=None, mock_entry=None): """Set up the device.""" @@ -168,6 +187,31 @@ class BroadlinkDevice: } +class BroadlinkMP1BG1Device(BroadlinkDevice): + """Mock device for MP1 and BG1 with special mocking of api return values.""" + + def get_mock_api(self): + """Return a mock device (API) with support for check_power calls.""" + mock_api = super().get_mock_api() + mock_api.check_power.return_value = {"s1": 0, "s2": 0, "s3": 0, "s4": 0} + return mock_api + + +class BroadlinkSP4BDevice(BroadlinkDevice): + """Mock device for SP4b with special mocking of api return values.""" + + def get_mock_api(self): + """Return a mock device (API) with support for get_state calls.""" + mock_api = super().get_mock_api() + mock_api.get_state.return_value = {"pwr": 0} + return mock_api + + def get_device(name): """Get a device by name.""" + dev_type = BROADLINK_DEVICES[name][5] + if dev_type in {0x4EB5}: + return BroadlinkMP1BG1Device(name, *BROADLINK_DEVICES[name]) + if dev_type in {0x5115}: + return BroadlinkSP4BDevice(name, *BROADLINK_DEVICES[name]) return BroadlinkDevice(name, *BROADLINK_DEVICES[name]) diff --git a/tests/components/broadlink/test_climate.py b/tests/components/broadlink/test_climate.py new file mode 100644 index 00000000000..6b39d1895b1 --- /dev/null +++ b/tests/components/broadlink/test_climate.py @@ -0,0 +1,180 @@ +"""Tests for Broadlink climate.""" + +from typing import Any + +import pytest + +from homeassistant.components.broadlink.climate import SensorMode +from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.components.climate import ( + ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + HVACAction, + HVACMode, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity + +from . import get_device + + +@pytest.mark.parametrize( + ( + "api_return_value", + "expected_state", + "expected_current_temperature", + "expected_temperature", + "expected_hvac_action", + ), + [ + ( + { + "sensor": SensorMode.INNER_SENSOR_CONTROL.value, + "power": 1, + "auto_mode": 0, + "active": 1, + "room_temp": 22, + "thermostat_temp": 23, + "external_temp": 30, + }, + HVACMode.HEAT, + 22, + 23, + HVACAction.HEATING, + ), + ( + { + "sensor": SensorMode.OUTER_SENSOR_CONTROL.value, + "power": 1, + "auto_mode": 1, + "active": 0, + "room_temp": 22, + "thermostat_temp": 23, + "external_temp": 30, + }, + HVACMode.AUTO, + 30, + 23, + HVACAction.IDLE, + ), + ( + { + "sensor": SensorMode.INNER_SENSOR_CONTROL.value, + "power": 0, + "auto_mode": 0, + "active": 0, + "room_temp": 22, + "thermostat_temp": 23, + "external_temp": 30, + }, + HVACMode.OFF, + 22, + 23, + HVACAction.OFF, + ), + ], +) +async def test_climate( + api_return_value: dict[str, Any], + expected_state: HVACMode, + expected_current_temperature: int, + expected_temperature: int, + expected_hvac_action: HVACAction, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink climate.""" + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + climates = [entry for entry in entries if entry.domain == Platform.CLIMATE] + assert len(climates) == 1 + + climate = climates[0] + + mock_setup.api.get_full_status.return_value = api_return_value + + await async_update_entity(hass, climate.entity_id) + assert mock_setup.api.get_full_status.call_count == 2 + state = hass.states.get(climate.entity_id) + assert state.state == expected_state + assert state.attributes["current_temperature"] == expected_current_temperature + assert state.attributes["temperature"] == expected_temperature + assert state.attributes["hvac_action"] == expected_hvac_action + + +async def test_climate_set_temperature_turn_off_turn_on( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink climate.""" + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + climates = [entry for entry in entries if entry.domain == Platform.CLIMATE] + assert len(climates) == 1 + + climate = climates[0] + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: climate.entity_id, + ATTR_TEMPERATURE: "24", + }, + blocking=True, + ) + state = hass.states.get(climate.entity_id) + + assert mock_setup.api.set_temp.call_count == 1 + assert mock_setup.api.set_power.call_count == 0 + assert mock_setup.api.set_mode.call_count == 0 + assert state.attributes["temperature"] == 24 + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: climate.entity_id, + }, + blocking=True, + ) + state = hass.states.get(climate.entity_id) + + assert mock_setup.api.set_temp.call_count == 1 + assert mock_setup.api.set_power.call_count == 1 + assert mock_setup.api.set_mode.call_count == 0 + assert state.state == HVACMode.OFF + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: climate.entity_id, + }, + blocking=True, + ) + state = hass.states.get(climate.entity_id) + + assert mock_setup.api.set_temp.call_count == 1 + assert mock_setup.api.set_power.call_count == 2 + assert mock_setup.api.set_mode.call_count == 1 + assert state.state == HVACMode.HEAT diff --git a/tests/components/broadlink/test_select.py b/tests/components/broadlink/test_select.py new file mode 100644 index 00000000000..42715c9a5ab --- /dev/null +++ b/tests/components/broadlink/test_select.py @@ -0,0 +1,67 @@ +"""Tests for Broadlink select.""" + +from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity + +from . import get_device + + +async def test_select( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink select.""" + await hass.config.async_set_time_zone("UTC") + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + selects = [entry for entry in entries if entry.domain == Platform.SELECT] + assert len(selects) == 1 + + select = selects[0] + + mock_setup.api.get_full_status.return_value = { + "dayofweek": 3, + "hour": 2, + "min": 3, + "sec": 4, + } + await async_update_entity(hass, select.entity_id) + assert mock_setup.api.get_full_status.call_count == 2 + state = hass.states.get(select.entity_id) + assert state.state == "wednesday" + + # set value + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: select.entity_id, + ATTR_OPTION: "tuesday", + }, + blocking=True, + ) + state = hass.states.get(select.entity_id) + assert state.state == "tuesday" + assert mock_setup.api.set_time.call_count == 1 + call_args = mock_setup.api.set_time.call_args.kwargs + assert call_args == { + "hour": 2, + "minute": 3, + "second": 4, + "day": 2, + } diff --git a/tests/components/broadlink/test_time.py b/tests/components/broadlink/test_time.py new file mode 100644 index 00000000000..819954158bb --- /dev/null +++ b/tests/components/broadlink/test_time.py @@ -0,0 +1,67 @@ +"""Tests for Broadlink time.""" + +from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.components.time import ( + ATTR_TIME, + DOMAIN as TIME_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity + +from . import get_device + + +async def test_time( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink time.""" + await hass.config.async_set_time_zone("UTC") + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + times = [entry for entry in entries if entry.domain == Platform.TIME] + assert len(times) == 1 + + time = times[0] + + mock_setup.api.get_full_status.return_value = { + "dayofweek": 3, + "hour": 2, + "min": 3, + "sec": 4, + } + await async_update_entity(hass, time.entity_id) + assert mock_setup.api.get_full_status.call_count == 2 + state = hass.states.get(time.entity_id) + assert state.state == "02:03:04+00:00" + + # set value + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: time.entity_id, + ATTR_TIME: "03:04:05", + }, + blocking=True, + ) + state = hass.states.get(time.entity_id) + assert state.state == "03:04:05" + assert mock_setup.api.set_time.call_count == 1 + call_args = mock_setup.api.set_time.call_args.kwargs + assert call_args == { + "hour": 3, + "minute": 4, + "second": 5, + "day": 3, + } diff --git a/tests/components/brother/conftest.py b/tests/components/brother/conftest.py index 5fadca5314d..de22158da00 100644 --- a/tests/components/brother/conftest.py +++ b/tests/components/brother/conftest.py @@ -1,11 +1,11 @@ """Test fixtures for brother.""" +from collections.abc import Generator from datetime import UTC, datetime -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from brother import BrotherSensors import pytest -from typing_extensions import Generator from homeassistant.components.brother.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_TYPE @@ -87,7 +87,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_unload_entry() -> Generator[AsyncMock, None, None]: +def mock_unload_entry() -> Generator[AsyncMock]: """Override async_unload_entry.""" with patch( "homeassistant.components.brother.async_unload_entry", return_value=True @@ -96,7 +96,7 @@ def mock_unload_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_brother_client() -> Generator[AsyncMock, None, None]: +def mock_brother_client() -> Generator[MagicMock]: """Mock Brother client.""" with ( patch("homeassistant.components.brother.Brother", autospec=True) as mock_client, diff --git a/tests/components/brottsplatskartan/conftest.py b/tests/components/brottsplatskartan/conftest.py index c10093f18b9..1d0cf236ed9 100644 --- a/tests/components/brottsplatskartan/conftest.py +++ b/tests/components/brottsplatskartan/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for Brottplatskartan.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/brunt/conftest.py b/tests/components/brunt/conftest.py index bfbca238446..1b60db682c3 100644 --- a/tests/components/brunt/conftest.py +++ b/tests/components/brunt/conftest.py @@ -1,9 +1,9 @@ """Configuration for brunt tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/bryant_evolution/__init__.py b/tests/components/bryant_evolution/__init__.py new file mode 100644 index 00000000000..22fa2950253 --- /dev/null +++ b/tests/components/bryant_evolution/__init__.py @@ -0,0 +1 @@ +"""Tests for the Bryant Evolution integration.""" diff --git a/tests/components/bryant_evolution/conftest.py b/tests/components/bryant_evolution/conftest.py new file mode 100644 index 00000000000..fb12d7ebf29 --- /dev/null +++ b/tests/components/bryant_evolution/conftest.py @@ -0,0 +1,70 @@ +"""Common fixtures for the Bryant Evolution tests.""" + +from collections.abc import Generator, Mapping +from unittest.mock import AsyncMock, patch + +from evolutionhttp import BryantEvolutionLocalClient +import pytest + +from homeassistant.components.bryant_evolution.const import CONF_SYSTEM_ZONE, DOMAIN +from homeassistant.const import CONF_FILENAME +from homeassistant.core import HomeAssistant +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.bryant_evolution.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +DEFAULT_SYSTEM_ZONES = ((1, 1), (1, 2), (2, 3)) +""" +A tuple of (system, zone) pairs representing the default system and zone configurations +for the Bryant Evolution integration. +""" + + +@pytest.fixture(autouse=True) +def mock_evolution_client_factory() -> Generator[AsyncMock]: + """Mock an Evolution client.""" + with patch( + "evolutionhttp.BryantEvolutionLocalClient.get_client", + austospec=True, + ) as mock_get_client: + clients: Mapping[tuple[int, int], AsyncMock] = {} + for system, zone in DEFAULT_SYSTEM_ZONES: + clients[(system, zone)] = AsyncMock(spec=BryantEvolutionLocalClient) + client = clients[system, zone] + client.read_zone_name.return_value = f"System {system} Zone {zone}" + client.read_current_temperature.return_value = 75 + client.read_hvac_mode.return_value = ("COOL", False) + client.read_fan_mode.return_value = "AUTO" + client.read_cooling_setpoint.return_value = 72 + mock_get_client.side_effect = lambda system, zone, tty: clients[ + (system, zone) + ] + yield mock_get_client + + +@pytest.fixture +async def mock_evolution_entry( + hass: HomeAssistant, + mock_evolution_client_factory: AsyncMock, +) -> MockConfigEntry: + """Configure and return a Bryant evolution integration.""" + hass.config.units = US_CUSTOMARY_SYSTEM + entry = MockConfigEntry( + entry_id="01J3XJZSTEF6G5V0QJX6HBC94T", # For determinism in snapshot tests + domain=DOMAIN, + data={CONF_FILENAME: "/dev/ttyUSB0", CONF_SYSTEM_ZONE: [(1, 1)]}, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return entry diff --git a/tests/components/bryant_evolution/snapshots/test_climate.ambr b/tests/components/bryant_evolution/snapshots/test_climate.ambr new file mode 100644 index 00000000000..4f6c1f2bbc4 --- /dev/null +++ b/tests/components/bryant_evolution/snapshots/test_climate.ambr @@ -0,0 +1,83 @@ +# serializer version: 1 +# name: test_setup_integration_success[climate.system_1_zone_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'auto', + 'low', + 'med', + 'high', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 95, + 'min_temp': 45, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.system_1_zone_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bryant_evolution', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J3XJZSTEF6G5V0QJX6HBC94T-S1-Z1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_integration_success[climate.system_1_zone_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 75, + 'fan_mode': 'auto', + 'fan_modes': list([ + 'auto', + 'low', + 'med', + 'high', + ]), + 'friendly_name': 'System 1 Zone 1', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 95, + 'min_temp': 45, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': 72, + }), + 'context': , + 'entity_id': 'climate.system_1_zone_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- diff --git a/tests/components/bryant_evolution/test_climate.py b/tests/components/bryant_evolution/test_climate.py new file mode 100644 index 00000000000..0b527e02a10 --- /dev/null +++ b/tests/components/bryant_evolution/test_climate.py @@ -0,0 +1,259 @@ +"""Test the BryantEvolutionClient type.""" + +from collections.abc import Generator +from datetime import timedelta +import logging +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.bryant_evolution.climate import SCAN_INTERVAL +from homeassistant.components.climate import ( + ATTR_FAN_MODE, + ATTR_HVAC_ACTION, + ATTR_HVAC_MODE, + ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, + HVACAction, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +_LOGGER = logging.getLogger(__name__) + + +async def trigger_polling(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> None: + """Trigger a polling event.""" + freezer.tick(SCAN_INTERVAL + timedelta(seconds=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + +async def test_setup_integration_success( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_evolution_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test that an instance can be constructed.""" + await snapshot_platform( + hass, entity_registry, snapshot, mock_evolution_entry.entry_id + ) + + +async def test_set_temperature_mode_cool( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, +) -> None: + """Test setting the temperature in cool mode.""" + # Start with known initial conditions + client = await mock_evolution_client_factory(1, 1, "/dev/unused") + client.read_hvac_mode.return_value = ("COOL", False) + client.read_cooling_setpoint.return_value = 75 + await trigger_polling(hass, freezer) + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes["temperature"] == 75, state.attributes + + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + data = {ATTR_TEMPERATURE: 70} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + client.read_cooling_setpoint.side_effect = Exception("fake failure") + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, data, blocking=True + ) + + # Verify effect. + client.set_cooling_setpoint.assert_called_once_with(70) + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes["temperature"] == 70 + + +async def test_set_temperature_mode_heat( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, +) -> None: + """Test setting the temperature in heat mode.""" + + # Start with known initial conditions + client = await mock_evolution_client_factory(1, 1, "/dev/unused") + client.read_hvac_mode.return_value = ("HEAT", False) + client.read_heating_setpoint.return_value = 60 + await trigger_polling(hass, freezer) + + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + data = {"temperature": 65} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + client.read_heating_setpoint.side_effect = Exception("fake failure") + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, data, blocking=True + ) + # Verify effect. + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes["temperature"] == 65, state.attributes + + +async def test_set_temperature_mode_heat_cool( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, +) -> None: + """Test setting the temperature in heat_cool mode.""" + + # Enter heat_cool with known setpoints + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + mock_client.read_hvac_mode.return_value = ("AUTO", False) + mock_client.read_cooling_setpoint.return_value = 90 + mock_client.read_heating_setpoint.return_value = 40 + await trigger_polling(hass, freezer) + state = hass.states.get("climate.system_1_zone_1") + assert state.state == "heat_cool" + assert state.attributes["target_temp_low"] == 40 + assert state.attributes["target_temp_high"] == 90 + + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + mock_client.read_heating_setpoint.side_effect = Exception("fake failure") + mock_client.read_cooling_setpoint.side_effect = Exception("fake failure") + data = {"target_temp_low": 70, "target_temp_high": 80} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, data, blocking=True + ) + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes["target_temp_low"] == 70, state.attributes + assert state.attributes["target_temp_high"] == 80, state.attributes + mock_client.set_cooling_setpoint.assert_called_once_with(80) + mock_client.set_heating_setpoint.assert_called_once_with(70) + + +async def test_set_fan_mode( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock], +) -> None: + """Test that setting fan mode works.""" + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + fan_modes = ["auto", "low", "med", "high"] + for mode in fan_modes: + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + mock_client.read_fan_mode.side_effect = Exception("fake failure") + data = {ATTR_FAN_MODE: mode} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, data, blocking=True + ) + assert ( + hass.states.get("climate.system_1_zone_1").attributes[ATTR_FAN_MODE] == mode + ) + mock_client.set_fan_mode.assert_called_with(mode) + + +@pytest.mark.parametrize( + ("hvac_mode", "evolution_mode"), + [("heat_cool", "auto"), ("heat", "heat"), ("cool", "cool"), ("off", "off")], +) +async def test_set_hvac_mode( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock], + hvac_mode, + evolution_mode, +) -> None: + """Test that setting HVAC mode works.""" + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + data = {ATTR_HVAC_MODE: hvac_mode} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + mock_client.read_hvac_mode.side_effect = Exception("fake failure") + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, data, blocking=True + ) + await hass.async_block_till_done() + assert hass.states.get("climate.system_1_zone_1").state == evolution_mode + mock_client.set_hvac_mode.assert_called_with(evolution_mode) + + +@pytest.mark.parametrize( + ("curr_temp", "expected_action"), + [(62, HVACAction.HEATING), (70, HVACAction.OFF), (80, HVACAction.COOLING)], +) +async def test_read_hvac_action_heat_cool( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, + curr_temp: int, + expected_action: HVACAction, +) -> None: + """Test that we can read the current HVAC action in heat_cool mode.""" + htsp = 68 + clsp = 72 + + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + mock_client.read_heating_setpoint.return_value = htsp + mock_client.read_cooling_setpoint.return_value = clsp + is_active = curr_temp < htsp or curr_temp > clsp + mock_client.read_hvac_mode.return_value = ("auto", is_active) + mock_client.read_current_temperature.return_value = curr_temp + await trigger_polling(hass, freezer) + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes[ATTR_HVAC_ACTION] == expected_action + + +@pytest.mark.parametrize( + ("mode", "active", "expected_action"), + [ + ("heat", True, "heating"), + ("heat", False, "off"), + ("cool", True, "cooling"), + ("cool", False, "off"), + ("off", False, "off"), + ], +) +async def test_read_hvac_action( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, + mode: str, + active: bool, + expected_action: str, +) -> None: + """Test that we can read the current HVAC action.""" + # Initial state should be no action. + assert ( + hass.states.get("climate.system_1_zone_1").attributes[ATTR_HVAC_ACTION] + == HVACAction.OFF + ) + # Perturb the system and verify we see an action. + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + mock_client.read_heating_setpoint.return_value = 75 # Needed if mode == heat + mock_client.read_hvac_mode.return_value = (mode, active) + await trigger_polling(hass, freezer) + assert ( + hass.states.get("climate.system_1_zone_1").attributes[ATTR_HVAC_ACTION] + == expected_action + ) diff --git a/tests/components/bryant_evolution/test_config_flow.py b/tests/components/bryant_evolution/test_config_flow.py new file mode 100644 index 00000000000..39d203201eb --- /dev/null +++ b/tests/components/bryant_evolution/test_config_flow.py @@ -0,0 +1,170 @@ +"""Test the Bryant Evolution config flow.""" + +from unittest.mock import DEFAULT, AsyncMock, patch + +from evolutionhttp import BryantEvolutionLocalClient, ZoneInfo + +from homeassistant import config_entries +from homeassistant.components.bryant_evolution.const import CONF_SYSTEM_ZONE, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_FILENAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form_success(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch.object( + BryantEvolutionLocalClient, + "enumerate_zones", + return_value=DEFAULT, + ) as mock_call, + ): + mock_call.side_effect = lambda system_id, filename: { + 1: [ZoneInfo(1, 1, "S1Z1"), ZoneInfo(1, 2, "S1Z2")], + 2: [ZoneInfo(2, 3, "S2Z2"), ZoneInfo(2, 4, "S2Z3")], + }.get(system_id, []) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_FILENAME: "test_form_success", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY, result + assert result["title"] == "SAM at test_form_success" + assert result["data"] == { + CONF_FILENAME: "test_form_success", + CONF_SYSTEM_ZONE: [(1, 1), (1, 2), (2, 3), (2, 4)], + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect( + hass: HomeAssistant, + mock_evolution_client_factory: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + with ( + patch.object( + BryantEvolutionLocalClient, + "enumerate_zones", + return_value=DEFAULT, + ) as mock_call, + ): + mock_call.return_value = [] + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_FILENAME: "test_form_cannot_connect", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + with ( + patch.object( + BryantEvolutionLocalClient, + "enumerate_zones", + return_value=DEFAULT, + ) as mock_call, + ): + mock_call.side_effect = lambda system_id, filename: { + 1: [ZoneInfo(1, 1, "S1Z1"), ZoneInfo(1, 2, "S1Z2")], + 2: [ZoneInfo(2, 3, "S2Z3"), ZoneInfo(2, 4, "S2Z4")], + }.get(system_id, []) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_FILENAME: "some-serial", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "SAM at some-serial" + assert result["data"] == { + CONF_FILENAME: "some-serial", + CONF_SYSTEM_ZONE: [(1, 1), (1, 2), (2, 3), (2, 4)], + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect_bad_file( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_evolution_client_factory: AsyncMock, +) -> None: + """Test we handle cannot connect error from a missing file.""" + mock_evolution_client_factory.side_effect = FileNotFoundError("test error") + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + # This file does not exist. + CONF_FILENAME: "test_form_cannot_connect_bad_file", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + +async def test_reconfigure( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_evolution_entry: MockConfigEntry, +) -> None: + """Test that reconfigure discovers additional systems and zones.""" + + # Reconfigure with additional systems and zones. + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": mock_evolution_entry.entry_id, + }, + ) + with ( + patch.object( + BryantEvolutionLocalClient, + "enumerate_zones", + return_value=DEFAULT, + ) as mock_call, + ): + mock_call.side_effect = lambda system_id, filename: { + 1: [ZoneInfo(1, 1, "S1Z1")], + 2: [ZoneInfo(2, 3, "S2Z3"), ZoneInfo(2, 4, "S2Z4"), ZoneInfo(2, 5, "S2Z5")], + }.get(system_id, []) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_FILENAME: "test_reconfigure", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT, result + assert result["reason"] == "reconfigured" + config_entry = hass.config_entries.async_entries()[0] + assert config_entry.data[CONF_SYSTEM_ZONE] == [ + (1, 1), + (2, 3), + (2, 4), + (2, 5), + ] diff --git a/tests/components/bryant_evolution/test_init.py b/tests/components/bryant_evolution/test_init.py new file mode 100644 index 00000000000..72734f7e117 --- /dev/null +++ b/tests/components/bryant_evolution/test_init.py @@ -0,0 +1,112 @@ +"""Test setup for the bryant_evolution integration.""" + +import logging +from unittest.mock import AsyncMock + +from evolutionhttp import BryantEvolutionLocalClient +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.bryant_evolution.const import CONF_SYSTEM_ZONE, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_FILENAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM + +from .conftest import DEFAULT_SYSTEM_ZONES +from .test_climate import trigger_polling + +from tests.common import MockConfigEntry + +_LOGGER = logging.getLogger(__name__) + + +async def test_setup_integration_prevented_by_unavailable_client( + hass: HomeAssistant, mock_evolution_client_factory: AsyncMock +) -> None: + """Test that setup throws ConfigEntryNotReady when the client is unavailable.""" + mock_evolution_client_factory.side_effect = FileNotFoundError("test error") + mock_evolution_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_FILENAME: "test_setup_integration_prevented_by_unavailable_client", + CONF_SYSTEM_ZONE: [(1, 1)], + }, + ) + mock_evolution_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_evolution_entry.entry_id) + await hass.async_block_till_done() + assert mock_evolution_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_integration_client_returns_none( + hass: HomeAssistant, mock_evolution_client_factory: AsyncMock +) -> None: + """Test that an unavailable client causes ConfigEntryNotReady.""" + mock_client = AsyncMock(spec=BryantEvolutionLocalClient) + mock_evolution_client_factory.side_effect = None + mock_evolution_client_factory.return_value = mock_client + mock_client.read_fan_mode.return_value = None + mock_client.read_current_temperature.return_value = None + mock_client.read_hvac_mode.return_value = None + mock_client.read_cooling_setpoint.return_value = None + mock_client.read_zone_name.return_value = None + mock_evolution_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_FILENAME: "/dev/ttyUSB0", CONF_SYSTEM_ZONE: [(1, 1)]}, + ) + mock_evolution_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_evolution_entry.entry_id) + await hass.async_block_till_done() + assert mock_evolution_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_multiple_systems_zones( + hass: HomeAssistant, + mock_evolution_client_factory: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that a device with multiple systems and zones works.""" + hass.config.units = US_CUSTOMARY_SYSTEM + mock_evolution_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_FILENAME: "/dev/ttyUSB0", CONF_SYSTEM_ZONE: DEFAULT_SYSTEM_ZONES}, + ) + mock_evolution_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_evolution_entry.entry_id) + await hass.async_block_till_done() + + # Set the temperature of each zone to its zone number so that we can + # ensure we've created the right client for each zone. + for sz, client in mock_evolution_entry.runtime_data.items(): + client.read_current_temperature.return_value = sz[1] + await trigger_polling(hass, freezer) + + # Check that each system and zone has the expected temperature value to + # verify that the initial setup flow worked as expected. + for sz in DEFAULT_SYSTEM_ZONES: + system = sz[0] + zone = sz[1] + state = hass.states.get(f"climate.system_{system}_zone_{zone}") + assert state, hass.states.async_all() + assert state.attributes["current_temperature"] == zone + + # Check that the created devices are wired to each other as expected. + device_registry = dr.async_get(hass) + + def find_device(name): + return next(filter(lambda x: x.name == name, device_registry.devices.values())) + + sam = find_device("System Access Module") + s1 = find_device("System 1") + s2 = find_device("System 2") + s1z1 = find_device("System 1 Zone 1") + s1z2 = find_device("System 1 Zone 2") + s2z3 = find_device("System 2 Zone 3") + + assert sam.via_device_id is None + assert s1.via_device_id == sam.id + assert s2.via_device_id == sam.id + assert s1z1.via_device_id == s1.id + assert s1z2.via_device_id == s1.id + assert s2z3.via_device_id == s2.id diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index 224e0e0b157..07ca8b648f3 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -1,10 +1,10 @@ """Fixtures for BSBLAN integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from bsblan import Device, Info, State import pytest -from typing_extensions import Generator from homeassistant.components.bsblan.const import CONF_PASSKEY, DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME @@ -48,11 +48,11 @@ def mock_bsblan() -> Generator[MagicMock]: patch("homeassistant.components.bsblan.config_flow.BSBLAN", new=bsblan_mock), ): bsblan = bsblan_mock.return_value - bsblan.info.return_value = Info.parse_raw(load_fixture("info.json", DOMAIN)) - bsblan.device.return_value = Device.parse_raw( + bsblan.info.return_value = Info.from_json(load_fixture("info.json", DOMAIN)) + bsblan.device.return_value = Device.from_json( load_fixture("device.json", DOMAIN) ) - bsblan.state.return_value = State.parse_raw(load_fixture("state.json", DOMAIN)) + bsblan.state.return_value = State.from_json(load_fixture("state.json", DOMAIN)) yield bsblan diff --git a/tests/components/buienradar/conftest.py b/tests/components/buienradar/conftest.py index 7c9027c7715..7872b50d4a9 100644 --- a/tests/components/buienradar/conftest.py +++ b/tests/components/buienradar/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for buienradar2.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/button/test_device_trigger.py b/tests/components/button/test_device_trigger.py index dee8045a71f..f5ade86e1a0 100644 --- a/tests/components/button/test_device_trigger.py +++ b/tests/components/button/test_device_trigger.py @@ -13,17 +13,7 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import MockConfigEntry, async_get_device_automations async def test_get_triggers( @@ -109,7 +99,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -158,9 +148,9 @@ async def test_if_fires_on_state_change( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "2021-01-01T23:59:59+00:00") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"to - device - {entry.entity_id} - unknown - 2021-01-01T23:59:59+00:00 - None - 0" ) @@ -169,7 +159,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -218,8 +208,8 @@ async def test_if_fires_on_state_change_legacy( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "2021-01-01T23:59:59+00:00") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"to - device - {entry.entity_id} - unknown - 2021-01-01T23:59:59+00:00 - None - 0" ) diff --git a/tests/components/button/test_init.py b/tests/components/button/test_init.py index 583c625e1b2..7df5308e096 100644 --- a/tests/components/button/test_init.py +++ b/tests/components/button/test_init.py @@ -1,11 +1,11 @@ """The tests for the Button component.""" +from collections.abc import Generator from datetime import timedelta from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator from homeassistant.components.button import ( DOMAIN, diff --git a/tests/components/caldav/test_config_flow.py b/tests/components/caldav/test_config_flow.py index 7c47ea14607..0079e59a931 100644 --- a/tests/components/caldav/test_config_flow.py +++ b/tests/components/caldav/test_config_flow.py @@ -1,11 +1,11 @@ """Test the CalDAV config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from caldav.lib.error import AuthorizationError, DAVError import pytest import requests -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.caldav.const import DOMAIN diff --git a/tests/components/caldav/test_todo.py b/tests/components/caldav/test_todo.py index 66f6e975453..69a49e0fcbe 100644 --- a/tests/components/caldav/test_todo.py +++ b/tests/components/caldav/test_todo.py @@ -8,8 +8,17 @@ from caldav.lib.error import DAVError, NotFoundError from caldav.objects import Todo import pytest -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -226,12 +235,12 @@ async def test_supported_components( RESULT_ITEM, ), ( - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, {"status": "NEEDS-ACTION", "summary": "Cheese", "due": date(2023, 11, 18)}, {**RESULT_ITEM, "due": "2023-11-18"}, ), ( - {"due_datetime": "2023-11-18T08:30:00-06:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T08:30:00-06:00"}, { "status": "NEEDS-ACTION", "summary": "Cheese", @@ -240,7 +249,7 @@ async def test_supported_components( {**RESULT_ITEM, "due": "2023-11-18T08:30:00-06:00"}, ), ( - {"description": "Make sure to get Swiss"}, + {ATTR_DESCRIPTION: "Make sure to get Swiss"}, { "status": "NEEDS-ACTION", "summary": "Cheese", @@ -278,9 +287,9 @@ async def test_add_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Cheese", **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Cheese", **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -306,9 +315,9 @@ async def test_add_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV save error"): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -317,7 +326,7 @@ async def test_add_item_failure( ("update_data", "expected_ics", "expected_state", "expected_item"), [ ( - {"rename": "Swiss Cheese"}, + {ATTR_RENAME: "Swiss Cheese"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -334,7 +343,7 @@ async def test_add_item_failure( }, ), ( - {"status": "needs_action"}, + {ATTR_STATUS: "needs_action"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -351,7 +360,7 @@ async def test_add_item_failure( }, ), ( - {"status": "completed"}, + {ATTR_STATUS: "completed"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -368,7 +377,7 @@ async def test_add_item_failure( }, ), ( - {"rename": "Swiss Cheese", "status": "needs_action"}, + {ATTR_RENAME: "Swiss Cheese", ATTR_STATUS: "needs_action"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -385,7 +394,7 @@ async def test_add_item_failure( }, ), ( - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20231118", @@ -402,7 +411,7 @@ async def test_add_item_failure( }, ), ( - {"due_datetime": "2023-11-18T08:30:00-06:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T08:30:00-06:00"}, [ "DESCRIPTION:Any kind will do", "DUE;TZID=America/Regina:20231118T083000", @@ -419,7 +428,7 @@ async def test_add_item_failure( }, ), ( - {"due_datetime": None}, + {ATTR_DUE_DATETIME: None}, [ "DESCRIPTION:Any kind will do", "STATUS:NEEDS-ACTION", @@ -434,7 +443,7 @@ async def test_add_item_failure( }, ), ( - {"description": "Make sure to get Swiss"}, + {ATTR_DESCRIPTION: "Make sure to get Swiss"}, [ "DESCRIPTION:Make sure to get Swiss", "DUE;VALUE=DATE:20171126", @@ -451,7 +460,7 @@ async def test_add_item_failure( }, ), ( - {"description": None}, + {ATTR_DESCRIPTION: None}, ["DUE;VALUE=DATE:20171126", "STATUS:NEEDS-ACTION", "SUMMARY:Cheese"], "1", { @@ -501,12 +510,12 @@ async def test_update_item( await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", + ATTR_ITEM: "Cheese", **update_data, }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -520,9 +529,9 @@ async def test_update_item( result = await hass.services.async_call( TODO_DOMAIN, - "get_items", + TodoServices.GET_ITEMS, {}, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, return_response=True, ) @@ -548,12 +557,12 @@ async def test_update_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV save error"): await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", - "status": "completed", + ATTR_ITEM: "Cheese", + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -582,12 +591,12 @@ async def test_update_item_lookup_failure( with pytest.raises(HomeAssistantError, match=match): await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", - "status": "completed", + ATTR_ITEM: "Cheese", + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -635,9 +644,9 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": uids_to_delete}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uids_to_delete}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -668,9 +677,9 @@ async def test_remove_item_lookup_failure( with pytest.raises(HomeAssistantError, match=match): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -697,9 +706,9 @@ async def test_remove_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV delete error"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -725,9 +734,9 @@ async def test_remove_item_not_found( with pytest.raises(HomeAssistantError, match="Could not find"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -779,12 +788,12 @@ async def test_subscribe( ] await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", - "rename": "Milk", + ATTR_ITEM: "Cheese", + ATTR_RENAME: "Milk", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/calendar/conftest.py b/tests/components/calendar/conftest.py index 83ecaca97d3..3e18f595764 100644 --- a/tests/components/calendar/conftest.py +++ b/tests/components/calendar/conftest.py @@ -1,12 +1,12 @@ """Test fixtures for calendar sensor platforms.""" +from collections.abc import Generator import datetime import secrets from typing import Any from unittest.mock import AsyncMock import pytest -from typing_extensions import Generator from homeassistant.components.calendar import DOMAIN, CalendarEntity, CalendarEvent from homeassistant.config_entries import ConfigEntry, ConfigFlow diff --git a/tests/components/calendar/snapshots/test_init.ambr b/tests/components/calendar/snapshots/test_init.ambr index fe23c5dbac9..1b2bb9f0196 100644 --- a/tests/components/calendar/snapshots/test_init.ambr +++ b/tests/components/calendar/snapshots/test_init.ambr @@ -7,12 +7,6 @@ }), }) # --- -# name: test_list_events_service_duration[frozen_time-calendar.calendar_1-00:15:00-list_events] - dict({ - 'events': list([ - ]), - }) -# --- # name: test_list_events_service_duration[frozen_time-calendar.calendar_1-01:00:00-get_events] dict({ 'calendar.calendar_1': dict({ @@ -28,19 +22,6 @@ }), }) # --- -# name: test_list_events_service_duration[frozen_time-calendar.calendar_1-01:00:00-list_events] - dict({ - 'events': list([ - dict({ - 'description': 'Future Description', - 'end': '2023-10-19T09:20:05-06:00', - 'location': 'Future Location', - 'start': '2023-10-19T08:20:05-06:00', - 'summary': 'Future Event', - }), - ]), - }) -# --- # name: test_list_events_service_duration[frozen_time-calendar.calendar_2-00:15:00-get_events] dict({ 'calendar.calendar_2': dict({ @@ -54,14 +35,3 @@ }), }) # --- -# name: test_list_events_service_duration[frozen_time-calendar.calendar_2-00:15:00-list_events] - dict({ - 'events': list([ - dict({ - 'end': '2023-10-19T08:20:05-06:00', - 'start': '2023-10-19T07:20:05-06:00', - 'summary': 'Current Event', - }), - ]), - }) -# --- diff --git a/tests/components/calendar/test_init.py b/tests/components/calendar/test_init.py index 116ca70f15e..4ad5e11b8e4 100644 --- a/tests/components/calendar/test_init.py +++ b/tests/components/calendar/test_init.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from datetime import timedelta from http import HTTPStatus from typing import Any @@ -9,7 +10,6 @@ from typing import Any from freezegun import freeze_time import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator import voluptuous as vol from homeassistant.components.calendar import DOMAIN, SERVICE_GET_EVENTS @@ -23,7 +23,7 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator @pytest.fixture(name="frozen_time") -def mock_frozen_time() -> None: +def mock_frozen_time() -> str | None: """Fixture to set a frozen time used in tests. This is needed so that it can run before other fixtures. @@ -32,7 +32,7 @@ def mock_frozen_time() -> None: @pytest.fixture(autouse=True) -def mock_set_frozen_time(frozen_time: Any) -> Generator[None]: +def mock_set_frozen_time(frozen_time: str | None) -> Generator[None]: """Fixture to freeze time that also can work for other fixtures.""" if not frozen_time: yield @@ -44,9 +44,9 @@ def mock_set_frozen_time(frozen_time: Any) -> Generator[None]: @pytest.fixture(name="setup_platform", autouse=True) async def mock_setup_platform( hass: HomeAssistant, - set_time_zone: Any, - frozen_time: Any, - mock_setup_integration: Any, + set_time_zone: None, + frozen_time: str | None, + mock_setup_integration: None, config_entry: MockConfigEntry, ) -> None: """Fixture to setup platforms used in the test and fixtures are set up in the right order.""" diff --git a/tests/components/calendar/test_recorder.py b/tests/components/calendar/test_recorder.py index aeddebc226c..c7511b8b2b0 100644 --- a/tests/components/calendar/test_recorder.py +++ b/tests/components/calendar/test_recorder.py @@ -1,7 +1,6 @@ """The tests for calendar recorder.""" from datetime import timedelta -from typing import Any import pytest @@ -19,7 +18,7 @@ from tests.components.recorder.common import async_wait_recording_done async def mock_setup_dependencies( recorder_mock: Recorder, hass: HomeAssistant, - set_time_zone: Any, + set_time_zone: None, mock_setup_integration: None, config_entry: MockConfigEntry, ) -> None: diff --git a/tests/components/calendar/test_trigger.py b/tests/components/calendar/test_trigger.py index 3b415d46e63..dfe4622e82e 100644 --- a/tests/components/calendar/test_trigger.py +++ b/tests/components/calendar/test_trigger.py @@ -9,7 +9,7 @@ forward exercising the triggers. from __future__ import annotations -from collections.abc import AsyncIterator, Callable +from collections.abc import AsyncIterator, Callable, Generator from contextlib import asynccontextmanager import datetime import logging @@ -19,7 +19,6 @@ import zoneinfo from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator from homeassistant.components import automation, calendar from homeassistant.components.calendar.trigger import EVENT_END, EVENT_START @@ -85,9 +84,7 @@ class FakeSchedule: @pytest.fixture -def fake_schedule( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> Generator[FakeSchedule]: +def fake_schedule(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> FakeSchedule: """Fixture that tests can use to make fake events.""" # Setup start time for all tests @@ -105,7 +102,7 @@ def mock_test_entity(test_entities: list[MockCalendarEntity]) -> MockCalendarEnt @pytest.fixture(name="setup_platform", autouse=True) async def mock_setup_platform( hass: HomeAssistant, - mock_setup_integration: Any, + mock_setup_integration: None, config_entry: MockConfigEntry, ) -> None: """Fixture to setup platforms used in the test.""" diff --git a/tests/components/camera/conftest.py b/tests/components/camera/conftest.py index 524b56c2303..ea3d65f4864 100644 --- a/tests/components/camera/conftest.py +++ b/tests/components/camera/conftest.py @@ -1,9 +1,9 @@ """Test helpers for camera.""" +from collections.abc import AsyncGenerator, Generator from unittest.mock import PropertyMock, patch import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components import camera from homeassistant.components.camera.const import StreamType diff --git a/tests/components/camera/test_init.py b/tests/components/camera/test_init.py index 7da6cd91a7a..098c321e63b 100644 --- a/tests/components/camera/test_init.py +++ b/tests/components/camera/test_init.py @@ -1,12 +1,12 @@ """The tests for the camera component.""" +from collections.abc import Generator from http import HTTPStatus import io from types import ModuleType from unittest.mock import AsyncMock, Mock, PropertyMock, mock_open, patch import pytest -from typing_extensions import Generator from homeassistant.components import camera from homeassistant.components.camera.const import ( diff --git a/tests/components/canary/conftest.py b/tests/components/canary/conftest.py index 336e6577ecc..07a3ce89495 100644 --- a/tests/components/canary/conftest.py +++ b/tests/components/canary/conftest.py @@ -1,19 +1,22 @@ """Define fixtures available for all tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from canary.api import Api import pytest +from homeassistant.core import HomeAssistant + @pytest.fixture(autouse=True) -def mock_ffmpeg(hass): +def mock_ffmpeg(hass: HomeAssistant) -> None: """Mock ffmpeg is loaded.""" hass.config.components.add("ffmpeg") @pytest.fixture -def canary(hass): +def canary() -> Generator[MagicMock]: """Mock the CanaryApi for easier testing.""" with ( patch.object(Api, "login", return_value=True), @@ -38,7 +41,7 @@ def canary(hass): @pytest.fixture -def canary_config_flow(hass): +def canary_config_flow() -> Generator[MagicMock]: """Mock the CanaryApi for easier config flow testing.""" with ( patch.object(Api, "login", return_value=True), diff --git a/tests/components/cast/test_config_flow.py b/tests/components/cast/test_config_flow.py index 2c0c36d6632..7dce3f768e2 100644 --- a/tests/components/cast/test_config_flow.py +++ b/tests/components/cast/test_config_flow.py @@ -148,6 +148,7 @@ def get_suggested(schema, key): if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] + return None @pytest.mark.parametrize( diff --git a/tests/components/ccm15/conftest.py b/tests/components/ccm15/conftest.py index d6cc66d77dc..e393b2679b6 100644 --- a/tests/components/ccm15/conftest.py +++ b/tests/components/ccm15/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Midea ccm15 AC Controller tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from ccm15 import CCM15DeviceState, CCM15SlaveDevice import pytest -from typing_extensions import Generator @pytest.fixture @@ -17,7 +17,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def ccm15_device() -> Generator[AsyncMock]: +def ccm15_device() -> Generator[None]: """Mock ccm15 device.""" ccm15_devices = { 0: CCM15SlaveDevice(bytes.fromhex("000000b0b8001b")), @@ -32,7 +32,7 @@ def ccm15_device() -> Generator[AsyncMock]: @pytest.fixture -def network_failure_ccm15_device() -> Generator[AsyncMock]: +def network_failure_ccm15_device() -> Generator[None]: """Mock empty set of ccm15 device.""" device_state = CCM15DeviceState(devices={}) with patch( diff --git a/tests/components/ccm15/test_climate.py b/tests/components/ccm15/test_climate.py index 329caafd11c..785cb17c6a9 100644 --- a/tests/components/ccm15/test_climate.py +++ b/tests/components/ccm15/test_climate.py @@ -1,10 +1,11 @@ """Unit test for CCM15 coordinator component.""" from datetime import timedelta -from unittest.mock import AsyncMock, patch +from unittest.mock import patch from ccm15 import CCM15DeviceState from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.ccm15.const import DOMAIN @@ -27,11 +28,11 @@ from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, async_fire_time_changed +@pytest.mark.usefixtures("ccm15_device") async def test_climate_state( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - ccm15_device: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test the coordinator.""" diff --git a/tests/components/ccm15/test_diagnostics.py b/tests/components/ccm15/test_diagnostics.py index a433591d86e..f6f0d75c4e3 100644 --- a/tests/components/ccm15/test_diagnostics.py +++ b/tests/components/ccm15/test_diagnostics.py @@ -1,7 +1,6 @@ """Test CCM15 diagnostics.""" -from unittest.mock import AsyncMock - +import pytest from syrupy import SnapshotAssertion from homeassistant.components.ccm15.const import DOMAIN @@ -13,10 +12,10 @@ from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +@pytest.mark.usefixtures("ccm15_device") async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - ccm15_device: AsyncMock, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" diff --git a/tests/components/ccm15/test_init.py b/tests/components/ccm15/test_init.py index 3069b61f10f..0fb75920ad3 100644 --- a/tests/components/ccm15/test_init.py +++ b/tests/components/ccm15/test_init.py @@ -1,6 +1,6 @@ """Tests for the ccm15 component.""" -from unittest.mock import AsyncMock +import pytest from homeassistant.components.ccm15.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -10,7 +10,8 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -async def test_load_unload(hass: HomeAssistant, ccm15_device: AsyncMock) -> None: +@pytest.mark.usefixtures("ccm15_device") +async def test_load_unload(hass: HomeAssistant) -> None: """Test options flow.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/cert_expiry/conftest.py b/tests/components/cert_expiry/conftest.py index 2a86c669970..4932e9e1869 100644 --- a/tests/components/cert_expiry/conftest.py +++ b/tests/components/cert_expiry/conftest.py @@ -1,9 +1,9 @@ """Configuration for cert_expiry tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/chacon_dio/__init__.py b/tests/components/chacon_dio/__init__.py new file mode 100644 index 00000000000..2a340097eb2 --- /dev/null +++ b/tests/components/chacon_dio/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Chacon Dio integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/chacon_dio/conftest.py b/tests/components/chacon_dio/conftest.py new file mode 100644 index 00000000000..186bc468bee --- /dev/null +++ b/tests/components/chacon_dio/conftest.py @@ -0,0 +1,73 @@ +"""Common fixtures for the chacon_dio tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.chacon_dio.const import DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from tests.common import MockConfigEntry + +MOCK_COVER_DEVICE = { + "L4HActuator_idmock1": { + "id": "L4HActuator_idmock1", + "name": "Shutter mock 1", + "type": "SHUTTER", + "model": "CERSwd-3B_1.0.6", + "connected": True, + "openlevel": 75, + "movement": "stop", + } +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.chacon_dio.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock the config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="test_entry_unique_id", + data={ + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) + + +@pytest.fixture +def mock_dio_chacon_client() -> Generator[AsyncMock]: + """Mock a Dio Chacon client.""" + + with ( + patch( + "homeassistant.components.chacon_dio.DIOChaconAPIClient", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.chacon_dio.config_flow.DIOChaconAPIClient", + new=mock_client, + ), + ): + client = mock_client.return_value + + # Default values for the tests using this mock : + client.get_user_id.return_value = "dummy-user-id" + client.search_all_devices.return_value = MOCK_COVER_DEVICE + + client.switch_switch.return_value = {} + + client.move_shutter_direction.return_value = {} + client.disconnect.return_value = {} + + yield client diff --git a/tests/components/chacon_dio/snapshots/test_cover.ambr b/tests/components/chacon_dio/snapshots/test_cover.ambr new file mode 100644 index 00000000000..b2febe20070 --- /dev/null +++ b/tests/components/chacon_dio/snapshots/test_cover.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_entities[cover.shutter_mock_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.shutter_mock_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'chacon_dio', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'L4HActuator_idmock1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[cover.shutter_mock_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 75, + 'device_class': 'shutter', + 'friendly_name': 'Shutter mock 1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.shutter_mock_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/chacon_dio/snapshots/test_switch.ambr b/tests/components/chacon_dio/snapshots/test_switch.ambr new file mode 100644 index 00000000000..7a65dad5445 --- /dev/null +++ b/tests/components/chacon_dio/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_entities[switch.switch_mock_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_mock_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'chacon_dio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'L4HActuator_idmock1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[switch.switch_mock_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Switch mock 1', + }), + 'context': , + 'entity_id': 'switch.switch_mock_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/chacon_dio/test_config_flow.py b/tests/components/chacon_dio/test_config_flow.py new file mode 100644 index 00000000000..d72b5a7dec3 --- /dev/null +++ b/tests/components/chacon_dio/test_config_flow.py @@ -0,0 +1,122 @@ +"""Test the chacon_dio config flow.""" + +from unittest.mock import AsyncMock + +from dio_chacon_wifi_api.exceptions import DIOChaconAPIError, DIOChaconInvalidAuthError +import pytest + +from homeassistant.components.chacon_dio.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_dio_chacon_client: AsyncMock +) -> None: + """Test the full flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Chacon DiO dummylogin" + assert result["result"].unique_id == "dummy-user-id" + assert result["data"] == { + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + } + + +@pytest.mark.parametrize( + ("exception", "expected"), + [ + (Exception("Bad request Boy :) --"), {"base": "unknown"}), + (DIOChaconInvalidAuthError, {"base": "invalid_auth"}), + (DIOChaconAPIError, {"base": "cannot_connect"}), + ], +) +async def test_errors( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_dio_chacon_client: AsyncMock, + exception: Exception, + expected: dict[str, str], +) -> None: + """Test we handle any error.""" + mock_dio_chacon_client.get_user_id.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_USERNAME: "nada", + CONF_PASSWORD: "nadap", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == expected + + # Test of recover in normal state after correction of the 1st error + mock_dio_chacon_client.get_user_id.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Chacon DiO dummylogin" + assert result["result"].unique_id == "dummy-user-id" + assert result["data"] == { + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + } + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test abort when setting up duplicate entry.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + mock_dio_chacon_client.get_user_id.return_value = "test_entry_unique_id" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/chacon_dio/test_cover.py b/tests/components/chacon_dio/test_cover.py new file mode 100644 index 00000000000..be606e67e1e --- /dev/null +++ b/tests/components/chacon_dio/test_cover.py @@ -0,0 +1,157 @@ +"""Test the Chacon Dio cover.""" + +from collections.abc import Callable +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_SET_COVER_POSITION, + SERVICE_STOP_COVER, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +COVER_ENTITY_ID = "cover.shutter_mock_1" + + +async def test_entities( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Chacon Dio covers.""" + + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_cover_actions( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Chacon Dio covers.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(COVER_ENTITY_ID) + assert state.state == STATE_CLOSING + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(COVER_ENTITY_ID) + assert state.state == STATE_OPEN + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(COVER_ENTITY_ID) + assert state.state == STATE_OPENING + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_POSITION: 25, ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(COVER_ENTITY_ID) + assert state.state == STATE_OPENING + + +async def test_cover_callbacks( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the creation and values of the Chacon Dio covers.""" + + await setup_integration(hass, mock_config_entry) + + # Server side callback tests + # We find the callback method on the mock client + callback_device_state_function: Callable = ( + mock_dio_chacon_client.set_callback_device_state_by_device.call_args[0][1] + ) + + # Define a method to simply call it + async def _callback_device_state_function(open_level: int, movement: str) -> None: + callback_device_state_function( + { + "id": "L4HActuator_idmock1", + "connected": True, + "openlevel": open_level, + "movement": movement, + } + ) + await hass.async_block_till_done() + + # And call it to effectively launch the callback as the server would do + await _callback_device_state_function(79, "stop") + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.attributes.get(ATTR_CURRENT_POSITION) == 79 + assert state.state == STATE_OPEN + + await _callback_device_state_function(90, "up") + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.attributes.get(ATTR_CURRENT_POSITION) == 90 + assert state.state == STATE_OPENING + + await _callback_device_state_function(60, "down") + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.attributes.get(ATTR_CURRENT_POSITION) == 60 + assert state.state == STATE_CLOSING + + +async def test_no_cover_found( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the cover absence.""" + + mock_dio_chacon_client.search_all_devices.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert not hass.states.get(COVER_ENTITY_ID) diff --git a/tests/components/chacon_dio/test_init.py b/tests/components/chacon_dio/test_init.py new file mode 100644 index 00000000000..78f1a85c71a --- /dev/null +++ b/tests/components/chacon_dio/test_init.py @@ -0,0 +1,43 @@ +"""Test the Dio Chacon Cover init.""" + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import EVENT_HOMEASSISTANT_STOP +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_cover_unload_entry( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the creation and values of the Dio Chacon covers.""" + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + mock_dio_chacon_client.disconnect.assert_called() + + +async def test_cover_shutdown_event( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the creation and values of the Dio Chacon covers.""" + + await setup_integration(hass, mock_config_entry) + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + mock_dio_chacon_client.disconnect.assert_called() diff --git a/tests/components/chacon_dio/test_switch.py b/tests/components/chacon_dio/test_switch.py new file mode 100644 index 00000000000..a5ad0d0ea13 --- /dev/null +++ b/tests/components/chacon_dio/test_switch.py @@ -0,0 +1,132 @@ +"""Test the Chacon Dio switch.""" + +from collections.abc import Callable +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +SWITCH_ENTITY_ID = "switch.switch_mock_1" + +MOCK_SWITCH_DEVICE = { + "L4HActuator_idmock1": { + "id": "L4HActuator_idmock1", + "name": "Switch mock 1", + "type": "SWITCH_LIGHT", + "model": "CERNwd-3B_1.0.6", + "connected": True, + "is_on": True, + } +} + + +async def test_entities( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Chacon Dio switches.""" + + mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE + + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_switch_actions( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the actions on the Chacon Dio switch.""" + + mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, + blocking=True, + ) + state = hass.states.get(SWITCH_ENTITY_ID) + assert state.state == STATE_ON + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, + blocking=True, + ) + state = hass.states.get(SWITCH_ENTITY_ID) + # turn off does not change directly the state, it is made by a server side callback. + assert state.state == STATE_ON + + +async def test_switch_callbacks( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the callbacks on the Chacon Dio switches.""" + + mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE + + await setup_integration(hass, mock_config_entry) + + # Server side callback tests + # We find the callback method on the mock client + callback_device_state_function: Callable = ( + mock_dio_chacon_client.set_callback_device_state_by_device.call_args[0][1] + ) + + # Define a method to simply call it + async def _callback_device_state_function(is_on: bool) -> None: + callback_device_state_function( + { + "id": "L4HActuator_idmock1", + "connected": True, + "is_on": is_on, + } + ) + await hass.async_block_till_done() + + # And call it to effectively launch the callback as the server would do + await _callback_device_state_function(False) + state = hass.states.get(SWITCH_ENTITY_ID) + assert state + assert state.state == STATE_OFF + + +async def test_no_switch_found( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the switch absence.""" + + mock_dio_chacon_client.search_all_devices.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert not hass.states.async_entity_ids(SWITCH_DOMAIN) diff --git a/tests/components/climate/conftest.py b/tests/components/climate/conftest.py index a3a6af6e8a3..fd4368c4219 100644 --- a/tests/components/climate/conftest.py +++ b/tests/components/climate/conftest.py @@ -1,7 +1,8 @@ """Fixtures for Climate platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/climate/test_device_condition.py b/tests/components/climate/test_device_condition.py index 0961bd3dc73..16595f57c6f 100644 --- a/tests/components/climate/test_device_condition.py +++ b/tests/components/climate/test_device_condition.py @@ -17,11 +17,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -29,12 +25,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_condition_types"), [ @@ -151,7 +141,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -220,7 +210,7 @@ async def test_if_state( # Should not fire, entity doesn't exist yet hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set( entry.entity_id, @@ -232,8 +222,8 @@ async def test_if_state( hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_hvac_mode - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_hvac_mode - event - test_event1" hass.states.async_set( entry.entity_id, @@ -246,13 +236,13 @@ async def test_if_state( # Should not fire hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_preset_mode - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_preset_mode - event - test_event2" hass.states.async_set( entry.entity_id, @@ -265,14 +255,14 @@ async def test_if_state( # Should not fire hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -323,8 +313,8 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_hvac_mode - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_hvac_mode - event - test_event1" @pytest.mark.parametrize( diff --git a/tests/components/climate/test_device_trigger.py b/tests/components/climate/test_device_trigger.py index e8e5b577bf4..a492d9805b5 100644 --- a/tests/components/climate/test_device_trigger.py +++ b/tests/components/climate/test_device_trigger.py @@ -23,11 +23,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -35,12 +31,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -151,7 +141,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -236,8 +226,8 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "hvac_mode_changed" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "hvac_mode_changed" # Fake that the temperature is changing hass.states.async_set( @@ -250,8 +240,8 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "current_temperature_changed" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "current_temperature_changed" # Fake that the humidity is changing hass.states.async_set( @@ -264,15 +254,15 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "current_humidity_changed" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "current_humidity_changed" async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -329,8 +319,8 @@ async def test_if_fires_on_state_change_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "hvac_mode_changed" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "hvac_mode_changed" async def test_get_trigger_capabilities_hvac_mode(hass: HomeAssistant) -> None: diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index a459b991203..f306551e540 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -4,6 +4,7 @@ from __future__ import annotations from enum import Enum from types import ModuleType +from typing import Any from unittest.mock import MagicMock, Mock, patch import pytest @@ -17,9 +18,14 @@ from homeassistant.components.climate import ( HVACMode, ) from homeassistant.components.climate.const import ( + ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, + ATTR_MAX_TEMP, + ATTR_MIN_TEMP, ATTR_PRESET_MODE, ATTR_SWING_MODE, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, SERVICE_SET_FAN_MODE, SERVICE_SET_PRESET_MODE, SERVICE_SET_SWING_MODE, @@ -27,7 +33,13 @@ from homeassistant.components.climate.const import ( ClimateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, UnitOfTemperature +from homeassistant.const import ( + ATTR_TEMPERATURE, + PRECISION_WHOLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import issue_registry as ir @@ -158,7 +170,7 @@ async def test_sync_turn_off(hass: HomeAssistant) -> None: assert climate.turn_off.called -def _create_tuples(enum: Enum, constant_prefix: str) -> list[tuple[Enum, str]]: +def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: return [ (enum_field, constant_prefix) for enum_field in enum @@ -709,6 +721,68 @@ async def test_no_warning_integration_has_migrated( ) +async def test_no_warning_integration_implement_feature_flags( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test no warning when integration uses the correct feature flags.""" + + class MockClimateEntityTest(MockClimateEntity): + """Mock Climate device.""" + + _attr_supported_features = ( + ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.SWING_MODE + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TURN_ON + ) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTest(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) + + with patch.object( + MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("climate.test") + assert state is not None + + assert "does not set ClimateEntityFeature" not in caplog.text + assert "implements HVACMode(s):" not in caplog.text + + async def test_turn_on_off_toggle(hass: HomeAssistant) -> None: """Test turn_on/turn_off/toggle methods.""" @@ -1090,3 +1164,127 @@ async def test_no_issue_no_aux_property( "the auxiliary heater methods in a subclass of ClimateEntity which is deprecated " "and will be unsupported from Home Assistant 2024.10." ) not in caplog.text + + +async def test_temperature_validation( + hass: HomeAssistant, config_flow_fixture: None +) -> None: + """Test validation for temperatures.""" + + class MockClimateEntityTemp(MockClimateEntity): + """Mock climate class with mocked aux heater.""" + + _attr_supported_features = ( + ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.SWING_MODE + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + ) + _attr_target_temperature = 15 + _attr_target_temperature_high = 18 + _attr_target_temperature_low = 10 + _attr_target_temperature_step = PRECISION_WHOLE + + def set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if ATTR_TEMPERATURE in kwargs: + self._attr_target_temperature = kwargs[ATTR_TEMPERATURE] + if ATTR_TARGET_TEMP_HIGH in kwargs: + self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] + self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTemp(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("climate.test") + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) is None + assert state.attributes.get(ATTR_MIN_TEMP) == 7 + assert state.attributes.get(ATTR_MAX_TEMP) == 35 + + with pytest.raises( + ServiceValidationError, + match="Provided temperature 40.0 is not valid. Accepted range is 7 to 35", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TEMPERATURE: "40", + }, + blocking=True, + ) + assert ( + str(exc.value) + == "Provided temperature 40.0 is not valid. Accepted range is 7 to 35" + ) + assert exc.value.translation_key == "temp_out_of_range" + + with pytest.raises( + ServiceValidationError, + match="Provided temperature 0.0 is not valid. Accepted range is 7 to 35", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TARGET_TEMP_HIGH: "25", + ATTR_TARGET_TEMP_LOW: "0", + }, + blocking=True, + ) + assert ( + str(exc.value) + == "Provided temperature 0.0 is not valid. Accepted range is 7 to 35" + ) + assert exc.value.translation_key == "temp_out_of_range" + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TARGET_TEMP_HIGH: "25", + ATTR_TARGET_TEMP_LOW: "10", + }, + blocking=True, + ) + + state = hass.states.get("climate.test") + assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 10 + assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25 diff --git a/tests/components/climate/test_intent.py b/tests/components/climate/test_intent.py index ab1e3629ef8..54e2e4ff1a6 100644 --- a/tests/components/climate/test_intent.py +++ b/tests/components/climate/test_intent.py @@ -1,7 +1,8 @@ """Test climate intents.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.components import conversation from homeassistant.components.climate import ( diff --git a/tests/components/climate/test_reproduce_state.py b/tests/components/climate/test_reproduce_state.py index 636ab326a2b..0632ebcc9e4 100644 --- a/tests/components/climate/test_reproduce_state.py +++ b/tests/components/climate/test_reproduce_state.py @@ -3,7 +3,6 @@ import pytest from homeassistant.components.climate import ( - ATTR_AUX_HEAT, ATTR_FAN_MODE, ATTR_HUMIDITY, ATTR_PRESET_MODE, @@ -11,7 +10,6 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN, - SERVICE_SET_AUX_HEAT, SERVICE_SET_FAN_MODE, SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, @@ -96,7 +94,6 @@ async def test_state_with_context(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("service", "attribute"), [ - (SERVICE_SET_AUX_HEAT, ATTR_AUX_HEAT), (SERVICE_SET_PRESET_MODE, ATTR_PRESET_MODE), (SERVICE_SET_SWING_MODE, ATTR_SWING_MODE), (SERVICE_SET_FAN_MODE, ATTR_FAN_MODE), diff --git a/tests/components/cloud/__init__.py b/tests/components/cloud/__init__.py index d527cbbeec2..18f8cd4d311 100644 --- a/tests/components/cloud/__init__.py +++ b/tests/components/cloud/__init__.py @@ -1,11 +1,23 @@ """Tests for the cloud component.""" +from typing import Any from unittest.mock import AsyncMock, patch -from hass_nabucasa import Cloud - -from homeassistant.components import cloud -from homeassistant.components.cloud import const, prefs as cloud_prefs +from homeassistant.components.cloud.const import ( + DATA_CLOUD, + DOMAIN, + PREF_ALEXA_SETTINGS_VERSION, + PREF_ENABLE_ALEXA, + PREF_ENABLE_GOOGLE, + PREF_GOOGLE_SECURE_DEVICES_PIN, + PREF_GOOGLE_SETTINGS_VERSION, +) +from homeassistant.components.cloud.prefs import ( + ALEXA_SETTINGS_VERSION, + GOOGLE_SETTINGS_VERSION, + CloudPreferences, +) +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component PIPELINE_DATA = { @@ -57,27 +69,27 @@ PIPELINE_DATA = { } -async def mock_cloud(hass, config=None): +async def mock_cloud(hass: HomeAssistant, config: dict[str, Any] | None = None) -> None: """Mock cloud.""" # The homeassistant integration is needed by cloud. It's not in it's requirements # because it's always setup by bootstrap. Set it up manually in tests. assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, cloud.DOMAIN, {"cloud": config or {}}) - cloud_inst: Cloud = hass.data["cloud"] + assert await async_setup_component(hass, DOMAIN, {"cloud": config or {}}) + cloud_inst = hass.data[DATA_CLOUD] with patch("hass_nabucasa.Cloud.run_executor", AsyncMock(return_value=None)): await cloud_inst.initialize() -def mock_cloud_prefs(hass, prefs): +def mock_cloud_prefs(hass: HomeAssistant, prefs: dict[str, Any]) -> CloudPreferences: """Fixture for cloud component.""" prefs_to_set = { - const.PREF_ALEXA_SETTINGS_VERSION: cloud_prefs.ALEXA_SETTINGS_VERSION, - const.PREF_ENABLE_ALEXA: True, - const.PREF_ENABLE_GOOGLE: True, - const.PREF_GOOGLE_SECURE_DEVICES_PIN: None, - const.PREF_GOOGLE_SETTINGS_VERSION: cloud_prefs.GOOGLE_SETTINGS_VERSION, + PREF_ALEXA_SETTINGS_VERSION: ALEXA_SETTINGS_VERSION, + PREF_ENABLE_ALEXA: True, + PREF_ENABLE_GOOGLE: True, + PREF_GOOGLE_SECURE_DEVICES_PIN: None, + PREF_GOOGLE_SETTINGS_VERSION: GOOGLE_SETTINGS_VERSION, } prefs_to_set.update(prefs) - hass.data[cloud.DOMAIN].client._prefs._prefs = prefs_to_set - return hass.data[cloud.DOMAIN].client._prefs + hass.data[DATA_CLOUD].client._prefs._prefs = prefs_to_set + return hass.data[DATA_CLOUD].client._prefs diff --git a/tests/components/cloud/conftest.py b/tests/components/cloud/conftest.py index ebd9ea6663e..2edd9571bdd 100644 --- a/tests/components/cloud/conftest.py +++ b/tests/components/cloud/conftest.py @@ -1,6 +1,6 @@ """Fixtures for cloud tests.""" -from collections.abc import Callable, Coroutine +from collections.abc import AsyncGenerator, Callable, Coroutine, Generator from pathlib import Path from typing import Any from unittest.mock import DEFAULT, MagicMock, PropertyMock, patch @@ -15,9 +15,14 @@ from hass_nabucasa.remote import RemoteUI from hass_nabucasa.voice import Voice import jwt import pytest -from typing_extensions import AsyncGenerator -from homeassistant.components.cloud import CloudClient, const, prefs +from homeassistant.components.cloud.client import CloudClient +from homeassistant.components.cloud.const import DATA_CLOUD +from homeassistant.components.cloud.prefs import ( + PREF_ALEXA_DEFAULT_EXPOSE, + PREF_GOOGLE_DEFAULT_EXPOSE, + CloudPreferences, +) from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -173,8 +178,8 @@ def set_cloud_prefs_fixture( async def set_cloud_prefs(prefs_settings: dict[str, Any]) -> None: """Set cloud prefs.""" prefs_to_set = cloud.client.prefs.as_dict() - prefs_to_set.pop(prefs.PREF_ALEXA_DEFAULT_EXPOSE) - prefs_to_set.pop(prefs.PREF_GOOGLE_DEFAULT_EXPOSE) + prefs_to_set.pop(PREF_ALEXA_DEFAULT_EXPOSE) + prefs_to_set.pop(PREF_GOOGLE_DEFAULT_EXPOSE) prefs_to_set.update(prefs_settings) await cloud.client.prefs.async_update(**prefs_to_set) @@ -182,9 +187,8 @@ def set_cloud_prefs_fixture( @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) @@ -193,37 +197,37 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_user_data(): +def mock_user_data() -> Generator[MagicMock]: """Mock os module.""" with patch("hass_nabucasa.Cloud._write_user_info") as writer: yield writer @pytest.fixture -def mock_cloud_fixture(hass): +def mock_cloud_fixture(hass: HomeAssistant) -> CloudPreferences: """Fixture for cloud component.""" hass.loop.run_until_complete(mock_cloud(hass)) return mock_cloud_prefs(hass, {}) @pytest.fixture -async def cloud_prefs(hass): +async def cloud_prefs(hass: HomeAssistant) -> CloudPreferences: """Fixture for cloud preferences.""" - cloud_prefs = prefs.CloudPreferences(hass) + cloud_prefs = CloudPreferences(hass) await cloud_prefs.async_initialize() return cloud_prefs @pytest.fixture -async def mock_cloud_setup(hass): +async def mock_cloud_setup(hass: HomeAssistant) -> None: """Set up the cloud.""" await mock_cloud(hass) @pytest.fixture -def mock_cloud_login(hass, mock_cloud_setup): +def mock_cloud_login(hass: HomeAssistant, mock_cloud_setup: None) -> Generator[None]: """Mock cloud is logged in.""" - hass.data[const.DOMAIN].id_token = jwt.encode( + hass.data[DATA_CLOUD].id_token = jwt.encode( { "email": "hello@home-assistant.io", "custom:sub-exp": "2300-01-03", @@ -231,12 +235,12 @@ def mock_cloud_login(hass, mock_cloud_setup): }, "test", ) - with patch.object(hass.data[const.DOMAIN].auth, "async_check_token"): + with patch.object(hass.data[DATA_CLOUD].auth, "async_check_token"): yield @pytest.fixture(name="mock_auth") -def mock_auth_fixture(): +def mock_auth_fixture() -> Generator[None]: """Mock check token.""" with ( patch("hass_nabucasa.auth.CognitoAuth.async_check_token"), @@ -246,9 +250,9 @@ def mock_auth_fixture(): @pytest.fixture -def mock_expired_cloud_login(hass, mock_cloud_setup): +def mock_expired_cloud_login(hass: HomeAssistant, mock_cloud_setup: None) -> None: """Mock cloud is logged in.""" - hass.data[const.DOMAIN].id_token = jwt.encode( + hass.data[DATA_CLOUD].id_token = jwt.encode( { "email": "hello@home-assistant.io", "custom:sub-exp": "2018-01-01", diff --git a/tests/components/cloud/test_account_link.py b/tests/components/cloud/test_account_link.py index 3f108961bc5..cd81a7cf691 100644 --- a/tests/components/cloud/test_account_link.py +++ b/tests/components/cloud/test_account_link.py @@ -1,6 +1,7 @@ """Test account link services.""" import asyncio +from collections.abc import Generator import logging from time import time from unittest.mock import AsyncMock, Mock, patch @@ -9,6 +10,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.cloud import account_link +from homeassistant.components.cloud.const import DATA_CLOUD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -20,7 +22,9 @@ TEST_DOMAIN = "oauth2_test" @pytest.fixture -def flow_handler(hass): +def flow_handler( + hass: HomeAssistant, +) -> Generator[type[config_entry_oauth2_flow.AbstractOAuth2FlowHandler]]: """Return a registered config flow.""" mock_platform(hass, f"{TEST_DOMAIN}.config_flow") @@ -133,7 +137,7 @@ async def test_setup_provide_implementation(hass: HomeAssistant) -> None: async def test_get_services_cached(hass: HomeAssistant) -> None: """Test that we cache services.""" - hass.data["cloud"] = None + hass.data[DATA_CLOUD] = None services = 1 @@ -165,7 +169,7 @@ async def test_get_services_cached(hass: HomeAssistant) -> None: async def test_get_services_error(hass: HomeAssistant) -> None: """Test that we cache services.""" - hass.data["cloud"] = None + hass.data[DATA_CLOUD] = None with ( patch.object(account_link, "CACHE_TIMEOUT", 0), @@ -179,9 +183,12 @@ async def test_get_services_error(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("current_request_with_host") -async def test_implementation(hass: HomeAssistant, flow_handler) -> None: +async def test_implementation( + hass: HomeAssistant, + flow_handler: type[config_entry_oauth2_flow.AbstractOAuth2FlowHandler], +) -> None: """Test Cloud OAuth2 implementation.""" - hass.data["cloud"] = None + hass.data[DATA_CLOUD] = None impl = account_link.CloudOAuth2Implementation(hass, "test") assert impl.name == "Home Assistant Cloud" diff --git a/tests/components/cloud/test_alexa_config.py b/tests/components/cloud/test_alexa_config.py index a6b05198ca4..3b4868b56ac 100644 --- a/tests/components/cloud/test_alexa_config.py +++ b/tests/components/cloud/test_alexa_config.py @@ -8,6 +8,7 @@ import pytest from homeassistant.components.alexa import errors from homeassistant.components.cloud import ALEXA_SCHEMA, alexa_config from homeassistant.components.cloud.const import ( + DATA_CLOUD, PREF_ALEXA_DEFAULT_EXPOSE, PREF_ALEXA_ENTITY_CONFIGS, PREF_SHOULD_EXPOSE, @@ -15,7 +16,6 @@ from homeassistant.components.cloud.const import ( from homeassistant.components.cloud.prefs import CloudPreferences from homeassistant.components.homeassistant.exposed_entities import ( DATA_EXPOSED_ENTITIES, - ExposedEntities, async_expose_entity, async_get_entity_settings, ) @@ -34,24 +34,27 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture -def cloud_stub(): +def cloud_stub() -> Mock: """Stub the cloud.""" return Mock(is_logged_in=True, subscription_expired=False) -def expose_new(hass, expose_new): +def expose_new(hass: HomeAssistant, expose_new: bool) -> None: """Enable exposing new entities to Alexa.""" - exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES] + exposed_entities = hass.data[DATA_EXPOSED_ENTITIES] exposed_entities.async_set_expose_new_entities("cloud.alexa", expose_new) -def expose_entity(hass, entity_id, should_expose): +def expose_entity(hass: HomeAssistant, entity_id: str, should_expose: bool) -> None: """Expose an entity to Alexa.""" async_expose_entity(hass, "cloud.alexa", entity_id, should_expose) async def test_alexa_config_expose_entity_prefs( - hass: HomeAssistant, cloud_prefs, cloud_stub, entity_registry: er.EntityRegistry + hass: HomeAssistant, + cloud_prefs: CloudPreferences, + cloud_stub: Mock, + entity_registry: er.EntityRegistry, ) -> None: """Test Alexa config should expose using prefs.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -133,7 +136,7 @@ async def test_alexa_config_expose_entity_prefs( async def test_alexa_config_report_state( - hass: HomeAssistant, cloud_prefs, cloud_stub + hass: HomeAssistant, cloud_prefs: CloudPreferences, cloud_stub: Mock ) -> None: """Test Alexa config should expose using prefs.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -168,7 +171,9 @@ async def test_alexa_config_report_state( async def test_alexa_config_invalidate_token( - hass: HomeAssistant, cloud_prefs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + cloud_prefs: CloudPreferences, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test Alexa config should expose using prefs.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -218,11 +223,11 @@ async def test_alexa_config_invalidate_token( ) async def test_alexa_config_fail_refresh_token( hass: HomeAssistant, - cloud_prefs, + cloud_prefs: CloudPreferences, aioclient_mock: AiohttpClientMocker, entity_registry: er.EntityRegistry, - reject_reason, - expected_exception, + reject_reason: str, + expected_exception: type[Exception], ) -> None: """Test Alexa config failing to refresh token.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -342,7 +347,10 @@ def patch_sync_helper(): async def test_alexa_update_expose_trigger_sync( - hass: HomeAssistant, entity_registry: er.EntityRegistry, cloud_prefs, cloud_stub + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + cloud_prefs: CloudPreferences, + cloud_stub: Mock, ) -> None: """Test Alexa config responds to updating exposed entities.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -415,18 +423,18 @@ async def test_alexa_update_expose_trigger_sync( ] +@pytest.mark.usefixtures("mock_cloud_login") async def test_alexa_entity_registry_sync( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_cloud_login, - cloud_prefs, + cloud_prefs: CloudPreferences, ) -> None: """Test Alexa config responds to entity registry.""" # Enable exposing new entities to Alexa expose_new(hass, True) await alexa_config.CloudAlexaConfig( - hass, ALEXA_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data["cloud"] + hass, ALEXA_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data[DATA_CLOUD] ).async_initialize() with patch_sync_helper() as (to_update, to_remove): @@ -475,7 +483,7 @@ async def test_alexa_entity_registry_sync( async def test_alexa_update_report_state( - hass: HomeAssistant, cloud_prefs, cloud_stub + hass: HomeAssistant, cloud_prefs: CloudPreferences, cloud_stub: Mock ) -> None: """Test Alexa config responds to reporting state.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -502,23 +510,24 @@ async def test_alexa_update_report_state( assert len(mock_sync.mock_calls) == 1 +@pytest.mark.usefixtures("mock_expired_cloud_login") def test_enabled_requires_valid_sub( - hass: HomeAssistant, mock_expired_cloud_login, cloud_prefs + hass: HomeAssistant, cloud_prefs: CloudPreferences ) -> None: """Test that alexa config enabled requires a valid Cloud sub.""" assert cloud_prefs.alexa_enabled - assert hass.data["cloud"].is_logged_in - assert hass.data["cloud"].subscription_expired + assert hass.data[DATA_CLOUD].is_logged_in + assert hass.data[DATA_CLOUD].subscription_expired config = alexa_config.CloudAlexaConfig( - hass, ALEXA_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data["cloud"] + hass, ALEXA_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data[DATA_CLOUD] ) assert not config.enabled async def test_alexa_handle_logout( - hass: HomeAssistant, cloud_prefs, cloud_stub + hass: HomeAssistant, cloud_prefs: CloudPreferences, cloud_stub: Mock ) -> None: """Test Alexa config responds to logging out.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -561,7 +570,7 @@ async def test_alexa_handle_logout( async def test_alexa_config_migrate_expose_entity_prefs( hass: HomeAssistant, cloud_prefs: CloudPreferences, - cloud_stub, + cloud_stub: Mock, entity_registry: er.EntityRegistry, alexa_settings_version: int, ) -> None: @@ -755,7 +764,7 @@ async def test_alexa_config_migrate_expose_entity_prefs_v2_exposed( async def test_alexa_config_migrate_expose_entity_prefs_default_none( hass: HomeAssistant, cloud_prefs: CloudPreferences, - cloud_stub, + cloud_stub: Mock, entity_registry: er.EntityRegistry, ) -> None: """Test migrating Alexa entity config.""" @@ -793,7 +802,7 @@ async def test_alexa_config_migrate_expose_entity_prefs_default_none( async def test_alexa_config_migrate_expose_entity_prefs_default( hass: HomeAssistant, cloud_prefs: CloudPreferences, - cloud_stub, + cloud_stub: Mock, entity_registry: er.EntityRegistry, ) -> None: """Test migrating Alexa entity config.""" diff --git a/tests/components/cloud/test_binary_sensor.py b/tests/components/cloud/test_binary_sensor.py index 789947f3c7d..8a4a1a0e9aa 100644 --- a/tests/components/cloud/test_binary_sensor.py +++ b/tests/components/cloud/test_binary_sensor.py @@ -1,10 +1,10 @@ """Tests for the cloud binary sensor.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from hass_nabucasa.const import DISPATCH_REMOTE_CONNECT, DISPATCH_REMOTE_DISCONNECT import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_registry import EntityRegistry diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 7c04373c261..005efd990fb 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -1,6 +1,7 @@ """Test the cloud.iot module.""" from datetime import timedelta +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch import aiohttp @@ -15,13 +16,14 @@ from homeassistant.components.cloud.client import ( CloudClient, ) from homeassistant.components.cloud.const import ( + DATA_CLOUD, PREF_ALEXA_REPORT_STATE, PREF_ENABLE_ALEXA, PREF_ENABLE_GOOGLE, ) +from homeassistant.components.cloud.prefs import CloudPreferences from homeassistant.components.homeassistant.exposed_entities import ( DATA_EXPOSED_ENTITIES, - ExposedEntities, async_expose_entity, ) from homeassistant.const import CONTENT_TYPE_JSON, __version__ as HA_VERSION @@ -37,7 +39,7 @@ from tests.components.alexa import test_smart_home as test_alexa @pytest.fixture -def mock_cloud_inst(): +def mock_cloud_inst() -> MagicMock: """Mock cloud class.""" return MagicMock(subscription_expired=False) @@ -64,7 +66,7 @@ async def test_handler_alexa(hass: HomeAssistant) -> None: ) mock_cloud_prefs(hass, {PREF_ALEXA_REPORT_STATE: False}) - cloud = hass.data["cloud"] + cloud = hass.data[DATA_CLOUD] resp = await cloud.client.async_alexa_message( test_alexa.get_new_request("Alexa.Discovery", "Discover") @@ -81,10 +83,12 @@ async def test_handler_alexa(hass: HomeAssistant) -> None: assert device["manufacturerName"] == "Home Assistant" -async def test_handler_alexa_disabled(hass: HomeAssistant, mock_cloud_fixture) -> None: +async def test_handler_alexa_disabled( + hass: HomeAssistant, mock_cloud_fixture: CloudPreferences +) -> None: """Test handler Alexa when user has disabled it.""" mock_cloud_fixture._prefs[PREF_ENABLE_ALEXA] = False - cloud = hass.data["cloud"] + cloud = hass.data[DATA_CLOUD] resp = await cloud.client.async_alexa_message( test_alexa.get_new_request("Alexa.Discovery", "Discover") @@ -118,7 +122,7 @@ async def test_handler_google_actions(hass: HomeAssistant) -> None: ) mock_cloud_prefs(hass, {}) - cloud = hass.data["cloud"] + cloud = hass.data[DATA_CLOUD] reqid = "5711642932632160983" data = {"requestId": reqid, "inputs": [{"intent": "action.devices.SYNC"}]} @@ -154,7 +158,10 @@ async def test_handler_google_actions(hass: HomeAssistant) -> None: ], ) async def test_handler_google_actions_disabled( - hass: HomeAssistant, mock_cloud_fixture, intent, response_payload + hass: HomeAssistant, + mock_cloud_fixture: CloudPreferences, + intent: str, + response_payload: dict[str, Any], ) -> None: """Test handler Google Actions when user has disabled it.""" mock_cloud_fixture._prefs[PREF_ENABLE_GOOGLE] = False @@ -165,7 +172,7 @@ async def test_handler_google_actions_disabled( reqid = "5711642932632160983" data = {"requestId": reqid, "inputs": [{"intent": intent}]} - cloud = hass.data["cloud"] + cloud = hass.data[DATA_CLOUD] with patch( "hass_nabucasa.Cloud._decode_claims", return_value={"cognito:username": "myUserName"}, @@ -183,7 +190,7 @@ async def test_webhook_msg( with patch("hass_nabucasa.Cloud.initialize"): setup = await async_setup_component(hass, "cloud", {"cloud": {}}) assert setup - cloud = hass.data["cloud"] + cloud = hass.data[DATA_CLOUD] await cloud.client.prefs.async_initialize() await cloud.client.prefs.async_update( @@ -253,16 +260,15 @@ async def test_webhook_msg( assert '{"nonexisting": "payload"}' in caplog.text +@pytest.mark.usefixtures("mock_cloud_setup", "mock_cloud_login") async def test_google_config_expose_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_cloud_setup, - mock_cloud_login, ) -> None: """Test Google config exposing entity method uses latest config.""" # Enable exposing new entities to Google - exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES] + exposed_entities = hass.data[DATA_EXPOSED_ENTITIES] exposed_entities.async_set_expose_new_entities("cloud.google_assistant", True) # Register a light entity @@ -270,7 +276,7 @@ async def test_google_config_expose_entity( "light", "test", "unique", suggested_object_id="kitchen" ) - cloud_client = hass.data[DOMAIN].client + cloud_client = hass.data[DATA_CLOUD].client state = State(entity_entry.entity_id, "on") gconf = await cloud_client.get_google_config() @@ -281,11 +287,10 @@ async def test_google_config_expose_entity( assert not gconf.should_expose(state) +@pytest.mark.usefixtures("mock_cloud_setup", "mock_cloud_login") async def test_google_config_should_2fa( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_cloud_setup, - mock_cloud_login, ) -> None: """Test Google config disabling 2FA method uses latest config.""" @@ -294,7 +299,7 @@ async def test_google_config_should_2fa( "light", "test", "unique", suggested_object_id="kitchen" ) - cloud_client = hass.data[DOMAIN].client + cloud_client = hass.data[DATA_CLOUD].client gconf = await cloud_client.get_google_config() state = State(entity_entry.entity_id, "on") @@ -351,7 +356,7 @@ async def test_system_msg(hass: HomeAssistant) -> None: with patch("hass_nabucasa.Cloud.initialize"): setup = await async_setup_component(hass, "cloud", {"cloud": {}}) assert setup - cloud = hass.data["cloud"] + cloud = hass.data[DATA_CLOUD] assert cloud.client.relayer_region is None @@ -374,7 +379,7 @@ async def test_cloud_connection_info(hass: HomeAssistant) -> None: hexmock.return_value = "12345678901234567890" setup = await async_setup_component(hass, "cloud", {"cloud": {}}) assert setup - cloud = hass.data["cloud"] + cloud = hass.data[DATA_CLOUD] response = await cloud.client.async_cloud_connection_info({}) diff --git a/tests/components/cloud/test_google_config.py b/tests/components/cloud/test_google_config.py index 66530bfa3f8..b152309b24a 100644 --- a/tests/components/cloud/test_google_config.py +++ b/tests/components/cloud/test_google_config.py @@ -8,6 +8,7 @@ import pytest from homeassistant.components.cloud import GACTIONS_SCHEMA from homeassistant.components.cloud.const import ( + DATA_CLOUD, PREF_DISABLE_2FA, PREF_GOOGLE_DEFAULT_EXPOSE, PREF_GOOGLE_ENTITY_CONFIGS, @@ -18,7 +19,6 @@ from homeassistant.components.cloud.prefs import CloudPreferences from homeassistant.components.google_assistant import helpers as ga_helpers from homeassistant.components.homeassistant.exposed_entities import ( DATA_EXPOSED_ENTITIES, - ExposedEntities, async_expose_entity, async_get_entity_settings, ) @@ -36,7 +36,7 @@ from tests.common import async_fire_time_changed @pytest.fixture -def mock_conf(hass, cloud_prefs): +def mock_conf(hass: HomeAssistant, cloud_prefs: CloudPreferences) -> CloudGoogleConfig: """Mock Google conf.""" return CloudGoogleConfig( hass, @@ -47,19 +47,19 @@ def mock_conf(hass, cloud_prefs): ) -def expose_new(hass, expose_new): +def expose_new(hass: HomeAssistant, expose_new: bool) -> None: """Enable exposing new entities to Google.""" - exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES] + exposed_entities = hass.data[DATA_EXPOSED_ENTITIES] exposed_entities.async_set_expose_new_entities("cloud.google_assistant", expose_new) -def expose_entity(hass, entity_id, should_expose): +def expose_entity(hass: HomeAssistant, entity_id: str, should_expose: bool) -> None: """Expose an entity to Google.""" async_expose_entity(hass, "cloud.google_assistant", entity_id, should_expose) async def test_google_update_report_state( - mock_conf, hass: HomeAssistant, cloud_prefs + mock_conf: CloudGoogleConfig, hass: HomeAssistant, cloud_prefs: CloudPreferences ) -> None: """Test Google config responds to updating preference.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -83,7 +83,7 @@ async def test_google_update_report_state( async def test_google_update_report_state_subscription_expired( - mock_conf, hass: HomeAssistant, cloud_prefs + mock_conf: CloudGoogleConfig, hass: HomeAssistant, cloud_prefs: CloudPreferences ) -> None: """Test Google config not reporting state when subscription has expired.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -106,7 +106,9 @@ async def test_google_update_report_state_subscription_expired( assert len(mock_report_state.mock_calls) == 0 -async def test_sync_entities(mock_conf, hass: HomeAssistant, cloud_prefs) -> None: +async def test_sync_entities( + mock_conf: CloudGoogleConfig, hass: HomeAssistant, cloud_prefs: CloudPreferences +) -> None: """Test sync devices.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -129,7 +131,9 @@ async def test_sync_entities(mock_conf, hass: HomeAssistant, cloud_prefs) -> Non async def test_google_update_expose_trigger_sync( - hass: HomeAssistant, entity_registry: er.EntityRegistry, cloud_prefs + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + cloud_prefs: CloudPreferences, ) -> None: """Test Google config responds to updating exposed entities.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -185,11 +189,11 @@ async def test_google_update_expose_trigger_sync( assert len(mock_sync.mock_calls) == 1 +@pytest.mark.usefixtures("mock_cloud_login") async def test_google_entity_registry_sync( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_cloud_login, - cloud_prefs, + cloud_prefs: CloudPreferences, ) -> None: """Test Google config responds to entity registry.""" @@ -197,7 +201,7 @@ async def test_google_entity_registry_sync( expose_new(hass, True) config = CloudGoogleConfig( - hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data["cloud"] + hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data[DATA_CLOUD] ) await config.async_initialize() await config.async_connect_agent_user("mock-user-id") @@ -257,15 +261,15 @@ async def test_google_entity_registry_sync( assert len(mock_sync.mock_calls) == 3 +@pytest.mark.usefixtures("mock_cloud_login") async def test_google_device_registry_sync( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_cloud_login, - cloud_prefs, + cloud_prefs: CloudPreferences, ) -> None: """Test Google config responds to device registry.""" config = CloudGoogleConfig( - hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data["cloud"] + hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data[DATA_CLOUD] ) # Enable exposing new entities to Google @@ -329,12 +333,13 @@ async def test_google_device_registry_sync( assert len(mock_sync.mock_calls) == 1 +@pytest.mark.usefixtures("mock_cloud_login") async def test_sync_google_when_started( - hass: HomeAssistant, mock_cloud_login, cloud_prefs + hass: HomeAssistant, cloud_prefs: CloudPreferences ) -> None: """Test Google config syncs on init.""" config = CloudGoogleConfig( - hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data["cloud"] + hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data[DATA_CLOUD] ) with patch.object(config, "async_sync_entities_all") as mock_sync: await config.async_initialize() @@ -342,12 +347,13 @@ async def test_sync_google_when_started( assert len(mock_sync.mock_calls) == 1 +@pytest.mark.usefixtures("mock_cloud_login") async def test_sync_google_on_home_assistant_start( - hass: HomeAssistant, mock_cloud_login, cloud_prefs + hass: HomeAssistant, cloud_prefs: CloudPreferences ) -> None: """Test Google config syncs when home assistant started.""" config = CloudGoogleConfig( - hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data["cloud"] + hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data[DATA_CLOUD] ) hass.set_state(CoreState.not_running) with patch.object(config, "async_sync_entities_all") as mock_sync: @@ -361,7 +367,10 @@ async def test_sync_google_on_home_assistant_start( async def test_google_config_expose_entity_prefs( - hass: HomeAssistant, mock_conf, cloud_prefs, entity_registry: er.EntityRegistry + hass: HomeAssistant, + mock_conf: CloudGoogleConfig, + cloud_prefs: CloudPreferences, + entity_registry: er.EntityRegistry, ) -> None: """Test Google config should expose using prefs.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -437,23 +446,24 @@ async def test_google_config_expose_entity_prefs( assert not mock_conf.should_expose(state_not_exposed) +@pytest.mark.usefixtures("mock_expired_cloud_login") def test_enabled_requires_valid_sub( - hass: HomeAssistant, mock_expired_cloud_login, cloud_prefs + hass: HomeAssistant, cloud_prefs: CloudPreferences ) -> None: """Test that google config enabled requires a valid Cloud sub.""" assert cloud_prefs.google_enabled - assert hass.data["cloud"].is_logged_in - assert hass.data["cloud"].subscription_expired + assert hass.data[DATA_CLOUD].is_logged_in + assert hass.data[DATA_CLOUD].subscription_expired config = CloudGoogleConfig( - hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data["cloud"] + hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data[DATA_CLOUD] ) assert not config.enabled async def test_setup_google_assistant( - hass: HomeAssistant, mock_conf, cloud_prefs + hass: HomeAssistant, mock_conf: CloudGoogleConfig, cloud_prefs: CloudPreferences ) -> None: """Test that we set up the google_assistant integration if enabled in cloud.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -472,8 +482,9 @@ async def test_setup_google_assistant( assert "google_assistant" in hass.config.components +@pytest.mark.usefixtures("mock_cloud_login") async def test_google_handle_logout( - hass: HomeAssistant, cloud_prefs, mock_cloud_login + hass: HomeAssistant, cloud_prefs: CloudPreferences ) -> None: """Test Google config responds to logging out.""" gconf = CloudGoogleConfig( @@ -495,7 +506,7 @@ async def test_google_handle_logout( await cloud_prefs.get_cloud_user() with patch.object( - hass.data["cloud"].auth, + hass.data[DATA_CLOUD].auth, "async_check_token", side_effect=AssertionError("Should not be called"), ): @@ -853,12 +864,13 @@ async def test_google_config_migrate_expose_entity_prefs_default( } +@pytest.mark.usefixtures("mock_cloud_login") async def test_google_config_get_agent_user_id( - hass: HomeAssistant, mock_cloud_login, cloud_prefs + hass: HomeAssistant, cloud_prefs: CloudPreferences ) -> None: """Test overridden get_agent_user_id_from_webhook method.""" config = CloudGoogleConfig( - hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data["cloud"] + hass, GACTIONS_SCHEMA({}), "mock-user-id", cloud_prefs, hass.data[DATA_CLOUD] ) assert ( config.get_agent_user_id_from_webhook(cloud_prefs.google_local_webhook_id) @@ -867,8 +879,9 @@ async def test_google_config_get_agent_user_id( assert config.get_agent_user_id_from_webhook("other_id") != config.agent_user_id +@pytest.mark.usefixtures("mock_cloud_login") async def test_google_config_get_agent_users( - hass: HomeAssistant, mock_cloud_login, cloud_prefs + hass: HomeAssistant, cloud_prefs: CloudPreferences ) -> None: """Test overridden async_get_agent_users method.""" username_mock = PropertyMock(return_value="blah") diff --git a/tests/components/cloud/test_init.py b/tests/components/cloud/test_init.py index 9cc1324ebc1..ad123cded84 100644 --- a/tests/components/cloud/test_init.py +++ b/tests/components/cloud/test_init.py @@ -4,18 +4,24 @@ from collections.abc import Callable, Coroutine from typing import Any from unittest.mock import MagicMock, patch -from hass_nabucasa import Cloud import pytest -from homeassistant.components import cloud from homeassistant.components.cloud import ( + CloudConnectionState, CloudNotAvailable, CloudNotConnected, async_get_or_create_cloudhook, + async_listen_connection_change, + async_remote_ui_url, +) +from homeassistant.components.cloud.const import ( + DATA_CLOUD, + DOMAIN, + MODE_DEV, + PREF_CLOUDHOOKS, ) -from homeassistant.components.cloud.const import DOMAIN, PREF_CLOUDHOOKS from homeassistant.components.cloud.prefs import STORAGE_KEY -from homeassistant.const import EVENT_HOMEASSISTANT_STOP +from homeassistant.const import CONF_MODE, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import Unauthorized from homeassistant.setup import async_setup_component @@ -32,7 +38,7 @@ async def test_constructor_loads_info_from_config(hass: HomeAssistant) -> None: { "http": {}, "cloud": { - cloud.CONF_MODE: cloud.MODE_DEV, + CONF_MODE: MODE_DEV, "cognito_client_id": "test-cognito_client_id", "user_pool_id": "test-user_pool_id", "region": "test-region", @@ -47,8 +53,8 @@ async def test_constructor_loads_info_from_config(hass: HomeAssistant) -> None: ) assert result - cl = hass.data["cloud"] - assert cl.mode == cloud.MODE_DEV + cl = hass.data[DATA_CLOUD] + assert cl.mode == MODE_DEV assert cl.cognito_client_id == "test-cognito_client_id" assert cl.user_pool_id == "test-user_pool_id" assert cl.region == "test-region" @@ -61,11 +67,12 @@ async def test_constructor_loads_info_from_config(hass: HomeAssistant) -> None: assert cl.remotestate_server == "test-remotestate-server" +@pytest.mark.usefixtures("mock_cloud_fixture") async def test_remote_services( - hass: HomeAssistant, mock_cloud_fixture, hass_read_only_user: MockUser + hass: HomeAssistant, hass_read_only_user: MockUser ) -> None: """Setup cloud component and test services.""" - cloud = hass.data[DOMAIN] + cloud = hass.data[DATA_CLOUD] assert hass.services.has_service(DOMAIN, "remote_connect") assert hass.services.has_service(DOMAIN, "remote_disconnect") @@ -108,7 +115,8 @@ async def test_remote_services( assert mock_disconnect.called is False -async def test_shutdown_event(hass: HomeAssistant, mock_cloud_fixture) -> None: +@pytest.mark.usefixtures("mock_cloud_fixture") +async def test_shutdown_event(hass: HomeAssistant) -> None: """Test if the cloud will stop on shutdown event.""" with patch("hass_nabucasa.Cloud.stop") as mock_stop: hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) @@ -130,7 +138,7 @@ async def test_setup_existing_cloud_user( { "http": {}, "cloud": { - cloud.CONF_MODE: cloud.MODE_DEV, + CONF_MODE: MODE_DEV, "cognito_client_id": "test-cognito_client_id", "user_pool_id": "test-user_pool_id", "region": "test-region", @@ -143,9 +151,10 @@ async def test_setup_existing_cloud_user( assert hass_storage[STORAGE_KEY]["data"]["cloud_user"] == user.id -async def test_on_connect(hass: HomeAssistant, mock_cloud_fixture) -> None: +@pytest.mark.usefixtures("mock_cloud_fixture") +async def test_on_connect(hass: HomeAssistant) -> None: """Test cloud on connect triggers.""" - cl: Cloud[cloud.client.CloudClient] = hass.data["cloud"] + cl = hass.data[DATA_CLOUD] assert len(cl.iot._on_connect) == 3 @@ -157,7 +166,7 @@ async def test_on_connect(hass: HomeAssistant, mock_cloud_fixture) -> None: nonlocal cloud_states cloud_states.append(cloud_state) - cloud.async_listen_connection_change(hass, handle_state) + async_listen_connection_change(hass, handle_state) assert "async_setup" in str(cl.iot._on_connect[-1]) await cl.iot._on_connect[-1]() @@ -179,12 +188,12 @@ async def test_on_connect(hass: HomeAssistant, mock_cloud_fixture) -> None: assert len(mock_load.mock_calls) == 0 assert len(cloud_states) == 1 - assert cloud_states[-1] == cloud.CloudConnectionState.CLOUD_CONNECTED + assert cloud_states[-1] == CloudConnectionState.CLOUD_CONNECTED await cl.iot._on_connect[-1]() await hass.async_block_till_done() assert len(cloud_states) == 2 - assert cloud_states[-1] == cloud.CloudConnectionState.CLOUD_CONNECTED + assert cloud_states[-1] == CloudConnectionState.CLOUD_CONNECTED assert len(cl.iot._on_disconnect) == 2 assert "async_setup" in str(cl.iot._on_disconnect[-1]) @@ -192,39 +201,40 @@ async def test_on_connect(hass: HomeAssistant, mock_cloud_fixture) -> None: await hass.async_block_till_done() assert len(cloud_states) == 3 - assert cloud_states[-1] == cloud.CloudConnectionState.CLOUD_DISCONNECTED + assert cloud_states[-1] == CloudConnectionState.CLOUD_DISCONNECTED await cl.iot._on_disconnect[-1]() await hass.async_block_till_done() assert len(cloud_states) == 4 - assert cloud_states[-1] == cloud.CloudConnectionState.CLOUD_DISCONNECTED + assert cloud_states[-1] == CloudConnectionState.CLOUD_DISCONNECTED -async def test_remote_ui_url(hass: HomeAssistant, mock_cloud_fixture) -> None: +@pytest.mark.usefixtures("mock_cloud_fixture") +async def test_remote_ui_url(hass: HomeAssistant) -> None: """Test getting remote ui url.""" - cl = hass.data["cloud"] + cl = hass.data[DATA_CLOUD] # Not logged in - with pytest.raises(cloud.CloudNotAvailable): - cloud.async_remote_ui_url(hass) + with pytest.raises(CloudNotAvailable): + async_remote_ui_url(hass) - with patch.object(cloud, "async_is_logged_in", return_value=True): + with patch("homeassistant.components.cloud.async_is_logged_in", return_value=True): # Remote not enabled - with pytest.raises(cloud.CloudNotAvailable): - cloud.async_remote_ui_url(hass) + with pytest.raises(CloudNotAvailable): + async_remote_ui_url(hass) with patch.object(cl.remote, "connect"): await cl.client.prefs.async_update(remote_enabled=True) await hass.async_block_till_done() # No instance domain - with pytest.raises(cloud.CloudNotAvailable): - cloud.async_remote_ui_url(hass) + with pytest.raises(CloudNotAvailable): + async_remote_ui_url(hass) # Remote finished initializing cl.client.prefs._prefs["remote_domain"] = "example.com" - assert cloud.async_remote_ui_url(hass) == "https://example.com" + assert async_remote_ui_url(hass) == "https://example.com" async def test_async_get_or_create_cloudhook( diff --git a/tests/components/cloud/test_repairs.py b/tests/components/cloud/test_repairs.py index 7ca20d84bce..d165a129dbe 100644 --- a/tests/components/cloud/test_repairs.py +++ b/tests/components/cloud/test_repairs.py @@ -6,8 +6,10 @@ from unittest.mock import patch import pytest -from homeassistant.components.cloud import DOMAIN -import homeassistant.components.cloud.repairs as cloud_repairs +from homeassistant.components.cloud.const import DOMAIN +from homeassistant.components.cloud.repairs import ( + async_manage_legacy_subscription_issue, +) from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN from homeassistant.core import HomeAssistant import homeassistant.helpers.issue_registry as ir @@ -65,12 +67,12 @@ async def test_legacy_subscription_delete_issue_if_no_longer_legacy( issue_registry: ir.IssueRegistry, ) -> None: """Test that we delete the legacy subscription issue if no longer legacy.""" - cloud_repairs.async_manage_legacy_subscription_issue(hass, {"provider": "legacy"}) + async_manage_legacy_subscription_issue(hass, {"provider": "legacy"}) assert issue_registry.async_get_issue( domain="cloud", issue_id="legacy_subscription" ) - cloud_repairs.async_manage_legacy_subscription_issue(hass, {}) + async_manage_legacy_subscription_issue(hass, {}) assert not issue_registry.async_get_issue( domain="cloud", issue_id="legacy_subscription" ) @@ -93,7 +95,7 @@ async def test_legacy_subscription_repair_flow( json={"url": "https://paypal.com"}, ) - cloud_repairs.async_manage_legacy_subscription_issue(hass, {"provider": "legacy"}) + async_manage_legacy_subscription_issue(hass, {"provider": "legacy"}) repair_issue = issue_registry.async_get_issue( domain="cloud", issue_id="legacy_subscription" ) @@ -174,7 +176,7 @@ async def test_legacy_subscription_repair_flow_timeout( status=403, ) - cloud_repairs.async_manage_legacy_subscription_issue(hass, {"provider": "legacy"}) + async_manage_legacy_subscription_issue(hass, {"provider": "legacy"}) repair_issue = issue_registry.async_get_issue( domain="cloud", issue_id="legacy_subscription" ) diff --git a/tests/components/cloud/test_stt.py b/tests/components/cloud/test_stt.py index a20325d6dc3..02acda1450e 100644 --- a/tests/components/cloud/test_stt.py +++ b/tests/components/cloud/test_stt.py @@ -1,5 +1,6 @@ """Test the speech-to-text platform for the cloud integration.""" +from collections.abc import AsyncGenerator from copy import deepcopy from http import HTTPStatus from typing import Any @@ -7,10 +8,9 @@ from unittest.mock import AsyncMock, MagicMock, patch from hass_nabucasa.voice import STTResponse, VoiceError import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY -from homeassistant.components.cloud import DOMAIN +from homeassistant.components.cloud.const import DOMAIN from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/cloud/test_system_health.py b/tests/components/cloud/test_system_health.py index c6e738011d6..60b23e47fec 100644 --- a/tests/components/cloud/test_system_health.py +++ b/tests/components/cloud/test_system_health.py @@ -8,7 +8,7 @@ from unittest.mock import MagicMock from aiohttp import ClientError from hass_nabucasa.remote import CertificateStatus -from homeassistant.components.cloud import DOMAIN +from homeassistant.components.cloud.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/cloud/test_tts.py b/tests/components/cloud/test_tts.py index 00466d0d177..52a9bc19ea2 100644 --- a/tests/components/cloud/test_tts.py +++ b/tests/components/cloud/test_tts.py @@ -1,6 +1,6 @@ """Tests for cloud tts.""" -from collections.abc import Callable, Coroutine +from collections.abc import AsyncGenerator, Callable, Coroutine from copy import deepcopy from http import HTTPStatus from typing import Any @@ -8,11 +8,11 @@ from unittest.mock import AsyncMock, MagicMock, patch from hass_nabucasa.voice import TTS_VOICES, VoiceError, VoiceTokenError import pytest -from typing_extensions import AsyncGenerator import voluptuous as vol from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY -from homeassistant.components.cloud import DOMAIN, const, tts +from homeassistant.components.cloud.const import DEFAULT_TTS_DEFAULT_VOICE, DOMAIN +from homeassistant.components.cloud.tts import PLATFORM_SCHEMA, SUPPORT_LANGUAGES, Voice from homeassistant.components.media_player import ( ATTR_MEDIA_CONTENT_ID, DOMAIN as DOMAIN_MP, @@ -57,33 +57,30 @@ async def internal_url_mock(hass: HomeAssistant) -> None: def test_default_exists() -> None: """Test our default language exists.""" - assert const.DEFAULT_TTS_DEFAULT_VOICE[0] in TTS_VOICES - assert ( - const.DEFAULT_TTS_DEFAULT_VOICE[1] - in TTS_VOICES[const.DEFAULT_TTS_DEFAULT_VOICE[0]] - ) + assert DEFAULT_TTS_DEFAULT_VOICE[0] in TTS_VOICES + assert DEFAULT_TTS_DEFAULT_VOICE[1] in TTS_VOICES[DEFAULT_TTS_DEFAULT_VOICE[0]] def test_schema() -> None: """Test schema.""" - assert "nl-NL" in tts.SUPPORT_LANGUAGES + assert "nl-NL" in SUPPORT_LANGUAGES - processed = tts.PLATFORM_SCHEMA({"platform": "cloud", "language": "nl-NL"}) + processed = PLATFORM_SCHEMA({"platform": "cloud", "language": "nl-NL"}) assert processed["gender"] == "female" with pytest.raises(vol.Invalid): - tts.PLATFORM_SCHEMA( + PLATFORM_SCHEMA( {"platform": "cloud", "language": "non-existing", "gender": "female"} ) with pytest.raises(vol.Invalid): - tts.PLATFORM_SCHEMA( + PLATFORM_SCHEMA( {"platform": "cloud", "language": "nl-NL", "gender": "not-supported"} ) # Should not raise - tts.PLATFORM_SCHEMA({"platform": "cloud", "language": "nl-NL", "gender": "female"}) - tts.PLATFORM_SCHEMA({"platform": "cloud"}) + PLATFORM_SCHEMA({"platform": "cloud", "language": "nl-NL", "gender": "female"}) + PLATFORM_SCHEMA({"platform": "cloud"}) @pytest.mark.parametrize( @@ -188,7 +185,7 @@ async def test_provider_properties( assert "nl-NL" in engine.supported_languages supported_voices = engine.async_get_supported_voices("nl-NL") assert supported_voices is not None - assert tts.Voice("ColetteNeural", "ColetteNeural") in supported_voices + assert Voice("ColetteNeural", "ColetteNeural") in supported_voices supported_voices = engine.async_get_supported_voices("missing_language") assert supported_voices is None diff --git a/tests/components/cloudflare/__init__.py b/tests/components/cloudflare/__init__.py index 5e1529a9da8..9827355c9cc 100644 --- a/tests/components/cloudflare/__init__.py +++ b/tests/components/cloudflare/__init__.py @@ -3,7 +3,7 @@ from __future__ import annotations from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch import pycfdns @@ -80,25 +80,20 @@ async def init_integration( return entry -def _get_mock_client( - zone: pycfdns.ZoneModel | UndefinedType = UNDEFINED, - records: list[pycfdns.RecordModel] | UndefinedType = UNDEFINED, -): - client: pycfdns.Client = AsyncMock() +def get_mock_client() -> Mock: + """Return of Mock of pycfdns.Client.""" + client = Mock() - client.list_zones = AsyncMock( - return_value=[MOCK_ZONE if zone is UNDEFINED else zone] - ) - client.list_dns_records = AsyncMock( - return_value=MOCK_ZONE_RECORDS if records is UNDEFINED else records - ) + client.list_zones = AsyncMock(return_value=[MOCK_ZONE]) + client.list_dns_records = AsyncMock(return_value=MOCK_ZONE_RECORDS) client.update_dns_record = AsyncMock(return_value=None) return client -def _patch_async_setup_entry(return_value=True): +def patch_async_setup_entry() -> AsyncMock: + """Patch the async_setup_entry method and return a mock.""" return patch( "homeassistant.components.cloudflare.async_setup_entry", - return_value=return_value, + return_value=True, ) diff --git a/tests/components/cloudflare/conftest.py b/tests/components/cloudflare/conftest.py index 81b52dd291d..977126f39a3 100644 --- a/tests/components/cloudflare/conftest.py +++ b/tests/components/cloudflare/conftest.py @@ -1,16 +1,17 @@ """Define fixtures available for all tests.""" -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import MagicMock, patch import pytest -from . import _get_mock_client +from . import get_mock_client @pytest.fixture -def cfupdate(hass): +def cfupdate() -> Generator[MagicMock]: """Mock the CloudflareUpdater for easier testing.""" - mock_cfupdate = _get_mock_client() + mock_cfupdate = get_mock_client() with patch( "homeassistant.components.cloudflare.pycfdns.Client", return_value=mock_cfupdate, @@ -19,11 +20,11 @@ def cfupdate(hass): @pytest.fixture -def cfupdate_flow(hass): +def cfupdate_flow() -> Generator[MagicMock]: """Mock the CloudflareUpdater for easier config flow testing.""" - mock_cfupdate = _get_mock_client() + mock_cfupdate = get_mock_client() with patch( - "homeassistant.components.cloudflare.pycfdns.Client", + "homeassistant.components.cloudflare.config_flow.pycfdns.Client", return_value=mock_cfupdate, ) as mock_api: yield mock_api diff --git a/tests/components/cloudflare/test_config_flow.py b/tests/components/cloudflare/test_config_flow.py index 4b0df91bc60..1278113c0c7 100644 --- a/tests/components/cloudflare/test_config_flow.py +++ b/tests/components/cloudflare/test_config_flow.py @@ -1,5 +1,7 @@ """Test the Cloudflare config flow.""" +from unittest.mock import MagicMock + import pycfdns from homeassistant.components.cloudflare.const import CONF_RECORDS, DOMAIN @@ -13,13 +15,13 @@ from . import ( USER_INPUT, USER_INPUT_RECORDS, USER_INPUT_ZONE, - _patch_async_setup_entry, + patch_async_setup_entry, ) from tests.common import MockConfigEntry -async def test_user_form(hass: HomeAssistant, cfupdate_flow) -> None: +async def test_user_form(hass: HomeAssistant, cfupdate_flow: MagicMock) -> None: """Test we get the user initiated form.""" result = await hass.config_entries.flow.async_init( @@ -49,7 +51,7 @@ async def test_user_form(hass: HomeAssistant, cfupdate_flow) -> None: assert result["step_id"] == "records" assert result["errors"] is None - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_RECORDS, @@ -70,7 +72,9 @@ async def test_user_form(hass: HomeAssistant, cfupdate_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_form_cannot_connect(hass: HomeAssistant, cfupdate_flow) -> None: +async def test_user_form_cannot_connect( + hass: HomeAssistant, cfupdate_flow: MagicMock +) -> None: """Test we handle cannot connect error.""" instance = cfupdate_flow.return_value @@ -88,7 +92,9 @@ async def test_user_form_cannot_connect(hass: HomeAssistant, cfupdate_flow) -> N assert result["errors"] == {"base": "cannot_connect"} -async def test_user_form_invalid_auth(hass: HomeAssistant, cfupdate_flow) -> None: +async def test_user_form_invalid_auth( + hass: HomeAssistant, cfupdate_flow: MagicMock +) -> None: """Test we handle invalid auth error.""" instance = cfupdate_flow.return_value @@ -107,7 +113,7 @@ async def test_user_form_invalid_auth(hass: HomeAssistant, cfupdate_flow) -> Non async def test_user_form_unexpected_exception( - hass: HomeAssistant, cfupdate_flow + hass: HomeAssistant, cfupdate_flow: MagicMock ) -> None: """Test we handle unexpected exception.""" instance = cfupdate_flow.return_value @@ -140,7 +146,7 @@ async def test_user_form_single_instance_allowed(hass: HomeAssistant) -> None: assert result["reason"] == "single_instance_allowed" -async def test_reauth_flow(hass: HomeAssistant, cfupdate_flow) -> None: +async def test_reauth_flow(hass: HomeAssistant, cfupdate_flow: MagicMock) -> None: """Test the reauthentication configuration flow.""" entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_CONFIG) entry.add_to_hass(hass) @@ -157,7 +163,7 @@ async def test_reauth_flow(hass: HomeAssistant, cfupdate_flow) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_API_TOKEN: "other_token"}, diff --git a/tests/components/cloudflare/test_init.py b/tests/components/cloudflare/test_init.py index 3b2a6803566..d629607e503 100644 --- a/tests/components/cloudflare/test_init.py +++ b/tests/components/cloudflare/test_init.py @@ -1,7 +1,7 @@ """Test the Cloudflare integration.""" from datetime import timedelta -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pycfdns import pytest @@ -23,7 +23,7 @@ from . import ENTRY_CONFIG, init_integration from tests.common import MockConfigEntry, async_fire_time_changed -async def test_unload_entry(hass: HomeAssistant, cfupdate) -> None: +async def test_unload_entry(hass: HomeAssistant, cfupdate: MagicMock) -> None: """Test successful unload of entry.""" entry = await init_integration(hass) @@ -42,7 +42,7 @@ async def test_unload_entry(hass: HomeAssistant, cfupdate) -> None: [pycfdns.ComunicationException()], ) async def test_async_setup_raises_entry_not_ready( - hass: HomeAssistant, cfupdate, side_effect + hass: HomeAssistant, cfupdate: MagicMock, side_effect: Exception ) -> None: """Test that it throws ConfigEntryNotReady when exception occurs during setup.""" instance = cfupdate.return_value @@ -57,7 +57,7 @@ async def test_async_setup_raises_entry_not_ready( async def test_async_setup_raises_entry_auth_failed( - hass: HomeAssistant, cfupdate + hass: HomeAssistant, cfupdate: MagicMock ) -> None: """Test that it throws ConfigEntryAuthFailed when exception occurs during setup.""" instance = cfupdate.return_value @@ -84,7 +84,7 @@ async def test_async_setup_raises_entry_auth_failed( async def test_integration_services( - hass: HomeAssistant, cfupdate, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, cfupdate: MagicMock, caplog: pytest.LogCaptureFixture ) -> None: """Test integration services.""" instance = cfupdate.return_value @@ -120,7 +120,9 @@ async def test_integration_services( assert "All target records are up to date" not in caplog.text -async def test_integration_services_with_issue(hass: HomeAssistant, cfupdate) -> None: +async def test_integration_services_with_issue( + hass: HomeAssistant, cfupdate: MagicMock +) -> None: """Test integration services with issue.""" instance = cfupdate.return_value @@ -145,7 +147,7 @@ async def test_integration_services_with_issue(hass: HomeAssistant, cfupdate) -> async def test_integration_services_with_nonexisting_record( - hass: HomeAssistant, cfupdate, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, cfupdate: MagicMock, caplog: pytest.LogCaptureFixture ) -> None: """Test integration services.""" instance = cfupdate.return_value @@ -185,7 +187,7 @@ async def test_integration_services_with_nonexisting_record( async def test_integration_update_interval( hass: HomeAssistant, - cfupdate, + cfupdate: MagicMock, caplog: pytest.LogCaptureFixture, ) -> None: """Test integration update interval.""" diff --git a/tests/components/co2signal/conftest.py b/tests/components/co2signal/conftest.py index 04ab6db7464..d5cca448569 100644 --- a/tests/components/co2signal/conftest.py +++ b/tests/components/co2signal/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Electricity maps integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.co2signal import DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/co2signal/test_diagnostics.py b/tests/components/co2signal/test_diagnostics.py index edc0007952b..3d5e1a0580b 100644 --- a/tests/components/co2signal/test_diagnostics.py +++ b/tests/components/co2signal/test_diagnostics.py @@ -2,6 +2,7 @@ import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -20,4 +21,4 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/coinbase/common.py b/tests/components/coinbase/common.py index 3421c4ce838..1a141c88bc3 100644 --- a/tests/components/coinbase/common.py +++ b/tests/components/coinbase/common.py @@ -5,13 +5,14 @@ from homeassistant.components.coinbase.const import ( CONF_EXCHANGE_RATES, DOMAIN, ) -from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN +from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION from .const import ( GOOD_CURRENCY_2, GOOD_EXCHANGE_RATE, GOOD_EXCHANGE_RATE_2, MOCK_ACCOUNTS_RESPONSE, + MOCK_ACCOUNTS_RESPONSE_V3, ) from tests.common import MockConfigEntry @@ -20,7 +21,7 @@ from tests.common import MockConfigEntry class MockPagination: """Mock pagination result.""" - def __init__(self, value=None): + def __init__(self, value=None) -> None: """Load simple pagination for tests.""" self.next_starting_after = value @@ -28,7 +29,7 @@ class MockPagination: class MockGetAccounts: """Mock accounts with pagination.""" - def __init__(self, starting_after=0): + def __init__(self, starting_after=0) -> None: """Init mocked object, forced to return two at a time.""" if (target_end := starting_after + 2) >= ( max_end := len(MOCK_ACCOUNTS_RESPONSE) @@ -54,6 +55,33 @@ def mocked_get_accounts(_, **kwargs): return MockGetAccounts(**kwargs) +class MockGetAccountsV3: + """Mock accounts with pagination.""" + + def __init__(self, cursor="") -> None: + """Init mocked object, forced to return two at a time.""" + ids = [account["uuid"] for account in MOCK_ACCOUNTS_RESPONSE_V3] + start = ids.index(cursor) if cursor else 0 + + has_next = (target_end := start + 2) < len(MOCK_ACCOUNTS_RESPONSE_V3) + end = target_end if has_next else -1 + next_cursor = ids[end] if has_next else ids[-1] + self.accounts = { + "accounts": MOCK_ACCOUNTS_RESPONSE_V3[start:end], + "has_next": has_next, + "cursor": next_cursor, + } + + def __getitem__(self, item): + """Handle subscript request.""" + return self.accounts[item] + + +def mocked_get_accounts_v3(_, **kwargs): + """Return simplified accounts using mock.""" + return MockGetAccountsV3(**kwargs) + + def mock_get_current_user(): """Return a simplified mock user.""" return { @@ -74,6 +102,19 @@ def mock_get_exchange_rates(): } +def mock_get_portfolios(): + """Return a mocked list of Coinbase portfolios.""" + return { + "portfolios": [ + { + "name": "Default", + "uuid": "123456", + "type": "DEFAULT", + } + ] + } + + async def init_mock_coinbase(hass, currencies=None, rates=None): """Init Coinbase integration for testing.""" config_entry = MockConfigEntry( @@ -93,3 +134,28 @@ async def init_mock_coinbase(hass, currencies=None, rates=None): await hass.async_block_till_done() return config_entry + + +async def init_mock_coinbase_v3(hass, currencies=None, rates=None): + """Init Coinbase integration for testing.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + entry_id="080272b77a4f80c41b94d7cdc86fd826", + unique_id=None, + title="Test User v3", + data={ + CONF_API_KEY: "organizations/123456", + CONF_API_TOKEN: "AbCDeF", + CONF_API_VERSION: "v3", + }, + options={ + CONF_CURRENCIES: currencies or [], + CONF_EXCHANGE_RATES: rates or [], + }, + ) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/coinbase/const.py b/tests/components/coinbase/const.py index dcd14555ca3..5fbba11eb2d 100644 --- a/tests/components/coinbase/const.py +++ b/tests/components/coinbase/const.py @@ -31,3 +31,31 @@ MOCK_ACCOUNTS_RESPONSE = [ "type": "fiat", }, ] + +MOCK_ACCOUNTS_RESPONSE_V3 = [ + { + "uuid": "123456789", + "name": "BTC Wallet", + "currency": GOOD_CURRENCY, + "available_balance": {"value": "0.00001", "currency": GOOD_CURRENCY}, + "type": "ACCOUNT_TYPE_CRYPTO", + "hold": {"value": "0", "currency": GOOD_CURRENCY}, + }, + { + "uuid": "abcdefg", + "name": "BTC Vault", + "currency": GOOD_CURRENCY, + "available_balance": {"value": "100.00", "currency": GOOD_CURRENCY}, + "type": "ACCOUNT_TYPE_VAULT", + "hold": {"value": "0", "currency": GOOD_CURRENCY}, + }, + { + "uuid": "987654321", + "name": "USD Wallet", + "currency": GOOD_CURRENCY_2, + "available_balance": {"value": "9.90", "currency": GOOD_CURRENCY_2}, + "type": "ACCOUNT_TYPE_FIAT", + "ready": True, + "hold": {"value": "0", "currency": GOOD_CURRENCY_2}, + }, +] diff --git a/tests/components/coinbase/snapshots/test_diagnostics.ambr b/tests/components/coinbase/snapshots/test_diagnostics.ambr index 9079a7682c8..4f9e75dc38b 100644 --- a/tests/components/coinbase/snapshots/test_diagnostics.ambr +++ b/tests/components/coinbase/snapshots/test_diagnostics.ambr @@ -3,40 +3,25 @@ dict({ 'accounts': list([ dict({ - 'balance': dict({ - 'amount': '**REDACTED**', - 'currency': 'BTC', - }), - 'currency': dict({ - 'code': 'BTC', - }), + 'amount': '**REDACTED**', + 'currency': 'BTC', 'id': '**REDACTED**', + 'is_vault': False, 'name': 'BTC Wallet', - 'type': 'wallet', }), dict({ - 'balance': dict({ - 'amount': '**REDACTED**', - 'currency': 'BTC', - }), - 'currency': dict({ - 'code': 'BTC', - }), + 'amount': '**REDACTED**', + 'currency': 'BTC', 'id': '**REDACTED**', + 'is_vault': True, 'name': 'BTC Vault', - 'type': 'vault', }), dict({ - 'balance': dict({ - 'amount': '**REDACTED**', - 'currency': 'USD', - }), - 'currency': dict({ - 'code': 'USD', - }), + 'amount': '**REDACTED**', + 'currency': 'USD', 'id': '**REDACTED**', + 'is_vault': False, 'name': 'USD Wallet', - 'type': 'fiat', }), ]), 'entry': dict({ diff --git a/tests/components/coinbase/test_config_flow.py b/tests/components/coinbase/test_config_flow.py index f213392bb1e..aa2c6208e0f 100644 --- a/tests/components/coinbase/test_config_flow.py +++ b/tests/components/coinbase/test_config_flow.py @@ -14,15 +14,18 @@ from homeassistant.components.coinbase.const import ( CONF_EXCHANGE_RATES, DOMAIN, ) -from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN +from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .common import ( init_mock_coinbase, + init_mock_coinbase_v3, mock_get_current_user, mock_get_exchange_rates, + mock_get_portfolios, mocked_get_accounts, + mocked_get_accounts_v3, ) from .const import BAD_CURRENCY, BAD_EXCHANGE_RATE, GOOD_CURRENCY, GOOD_EXCHANGE_RATE @@ -53,16 +56,17 @@ async def test_form(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - { - CONF_API_KEY: "123456", - CONF_API_TOKEN: "AbCDeF", - }, + {CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"}, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test User" - assert result2["data"] == {CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"} + assert result2["data"] == { + CONF_API_KEY: "123456", + CONF_API_TOKEN: "AbCDeF", + CONF_API_VERSION: "v2", + } assert len(mock_setup_entry.mock_calls) == 1 @@ -314,3 +318,77 @@ async def test_option_catch_all_exception(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} + + +async def test_form_v3(hass: HomeAssistant) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3), + patch( + "coinbase.rest.RESTClient.get_portfolios", + return_value=mock_get_portfolios(), + ), + patch( + "coinbase.rest.RESTBase.get", + return_value={"data": mock_get_exchange_rates()}, + ), + patch( + "homeassistant.components.coinbase.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "organizations/123456", CONF_API_TOKEN: "AbCDeF"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Default" + assert result2["data"] == { + CONF_API_KEY: "organizations/123456", + CONF_API_TOKEN: "AbCDeF", + CONF_API_VERSION: "v3", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_option_form_v3(hass: HomeAssistant) -> None: + """Test we handle a good wallet currency option.""" + + with ( + patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3), + patch( + "coinbase.rest.RESTClient.get_portfolios", + return_value=mock_get_portfolios(), + ), + patch( + "coinbase.rest.RESTBase.get", + return_value={"data": mock_get_exchange_rates()}, + ), + patch( + "homeassistant.components.coinbase.update_listener" + ) as mock_update_listener, + ): + config_entry = await init_mock_coinbase_v3(hass) + await hass.async_block_till_done() + result = await hass.config_entries.options.async_init(config_entry.entry_id) + await hass.async_block_till_done() + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_CURRENCIES: [GOOD_CURRENCY], + CONF_EXCHANGE_RATES: [GOOD_EXCHANGE_RATE], + CONF_EXCHANGE_PRECISION: 5, + }, + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + await hass.async_block_till_done() + assert len(mock_update_listener.mock_calls) == 1 diff --git a/tests/components/coinbase/test_diagnostics.py b/tests/components/coinbase/test_diagnostics.py index e30bdef30b8..0e06c172c37 100644 --- a/tests/components/coinbase/test_diagnostics.py +++ b/tests/components/coinbase/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -40,4 +41,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/comfoconnect/test_sensor.py b/tests/components/comfoconnect/test_sensor.py index cea5ed0122f..fdecfa5b1c7 100644 --- a/tests/components/comfoconnect/test_sensor.py +++ b/tests/components/comfoconnect/test_sensor.py @@ -1,7 +1,7 @@ """Tests for the comfoconnect sensor platform.""" -# import json -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import MagicMock, patch import pytest @@ -28,7 +28,7 @@ VALID_CONFIG = { @pytest.fixture -def mock_bridge_discover(): +def mock_bridge_discover() -> Generator[MagicMock]: """Mock the bridge discover method.""" with patch("pycomfoconnect.bridge.Bridge.discover") as mock_bridge_discover: mock_bridge_discover.return_value[0].uuid.hex.return_value = "00" @@ -36,7 +36,7 @@ def mock_bridge_discover(): @pytest.fixture -def mock_comfoconnect_command(): +def mock_comfoconnect_command() -> Generator[MagicMock]: """Mock the ComfoConnect connect method.""" with patch( "pycomfoconnect.comfoconnect.ComfoConnect._command" @@ -45,14 +45,19 @@ def mock_comfoconnect_command(): @pytest.fixture -async def setup_sensor(hass, mock_bridge_discover, mock_comfoconnect_command): +async def setup_sensor( + hass: HomeAssistant, + mock_bridge_discover: MagicMock, + mock_comfoconnect_command: MagicMock, +) -> None: """Set up demo sensor component.""" with assert_setup_component(1, DOMAIN): await async_setup_component(hass, DOMAIN, VALID_CONFIG) await hass.async_block_till_done() -async def test_sensors(hass: HomeAssistant, setup_sensor) -> None: +@pytest.mark.usefixtures("setup_sensor") +async def test_sensors(hass: HomeAssistant) -> None: """Test the sensors.""" state = hass.states.get("sensor.comfoairq_inside_humidity") assert state is not None diff --git a/tests/components/command_line/test_binary_sensor.py b/tests/components/command_line/test_binary_sensor.py index fd726ab77a4..5d1cd845e27 100644 --- a/tests/components/command_line/test_binary_sensor.py +++ b/tests/components/command_line/test_binary_sensor.py @@ -56,6 +56,24 @@ async def test_setup_integration_yaml( assert entity_state.name == "Test" +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "binary_sensor", + { + "binary_sensor": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_cover.py b/tests/components/command_line/test_cover.py index 7ed48909d79..b81d915c6d5 100644 --- a/tests/components/command_line/test_cover.py +++ b/tests/components/command_line/test_cover.py @@ -36,6 +36,24 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "cover", + { + "cover": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + async def test_no_poll_when_cover_has_no_command_state(hass: HomeAssistant) -> None: """Test that the cover does not polls when there's no state command.""" diff --git a/tests/components/command_line/test_notify.py b/tests/components/command_line/test_notify.py index 98bfb856bb8..6898b44f062 100644 --- a/tests/components/command_line/test_notify.py +++ b/tests/components/command_line/test_notify.py @@ -3,6 +3,7 @@ from __future__ import annotations import os +from pathlib import Path import subprocess import tempfile from unittest.mock import patch @@ -15,6 +16,24 @@ from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN from homeassistant.core import HomeAssistant +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "notify", + { + "notify": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( "get_config", [ @@ -78,9 +97,7 @@ async def test_command_line_output(hass: HomeAssistant) -> None: await hass.services.async_call( NOTIFY_DOMAIN, "test3", {"message": message}, blocking=True ) - with open(filename, encoding="UTF-8") as handle: - # the echo command adds a line break - assert message == handle.read() + assert message == await hass.async_add_executor_job(Path(filename).read_text) @pytest.mark.parametrize( diff --git a/tests/components/command_line/test_sensor.py b/tests/components/command_line/test_sensor.py index 26f97e37543..f7879b334cd 100644 --- a/tests/components/command_line/test_sensor.py +++ b/tests/components/command_line/test_sensor.py @@ -27,6 +27,24 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "sensor", + { + "sensor": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( "get_config", [ @@ -467,6 +485,46 @@ async def test_update_with_unnecessary_json_attrs( assert "key_three" not in entity_state.attributes +@pytest.mark.parametrize( + "get_config", + [ + { + "command_line": [ + { + "sensor": { + "name": "Test", + "command": 'echo \ + {\ + \\"top_level\\": {\ + \\"second_level\\": {\ + \\"key\\": \\"some_json_value\\",\ + \\"another_key\\": \\"another_json_value\\",\ + \\"key_three\\": \\"value_three\\"\ + }\ + }\ + }', + "json_attributes": ["key", "another_key", "key_three"], + "json_attributes_path": "$.top_level.second_level", + } + } + ] + } + ], +) +async def test_update_with_json_attrs_with_json_attrs_path( + hass: HomeAssistant, load_yaml_integration: None +) -> None: + """Test using json_attributes_path to select a different part of the json object as root.""" + + entity_state = hass.states.get("sensor.test") + assert entity_state + assert entity_state.attributes["key"] == "some_json_value" + assert entity_state.attributes["another_key"] == "another_json_value" + assert entity_state.attributes["key_three"] == "value_three" + assert "top_level" not in entity_state.attributes + assert "second_level" not in entity_state.attributes + + @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_switch.py b/tests/components/command_line/test_switch.py index c464ded34fb..549e729892c 100644 --- a/tests/components/command_line/test_switch.py +++ b/tests/components/command_line/test_switch.py @@ -37,6 +37,24 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "switch", + { + "switch": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + async def test_state_integration_yaml(hass: HomeAssistant) -> None: """Test with none state.""" with tempfile.TemporaryDirectory() as tempdirname: diff --git a/tests/components/config/conftest.py b/tests/components/config/conftest.py index ffd2f764922..55393a219b1 100644 --- a/tests/components/config/conftest.py +++ b/tests/components/config/conftest.py @@ -1,10 +1,12 @@ """Test fixtures for the config integration.""" +from collections.abc import Generator from contextlib import contextmanager from copy import deepcopy import json import logging from os.path import basename +from typing import Any from unittest.mock import patch import pytest @@ -17,7 +19,7 @@ _LOGGER = logging.getLogger(__name__) @contextmanager -def mock_config_store(data=None): +def mock_config_store(data: dict[str, Any] | None = None) -> Generator[dict[str, Any]]: """Mock config yaml store. Data is a dict {'key': {'version': version, 'data': data}} @@ -72,7 +74,7 @@ def mock_config_store(data=None): @pytest.fixture -def hass_config_store(): +def hass_config_store() -> Generator[dict[str, Any]]: """Fixture to mock config yaml store.""" with mock_config_store() as stored_data: yield stored_data diff --git a/tests/components/config/test_area_registry.py b/tests/components/config/test_area_registry.py index fb59725fd29..03a8272e586 100644 --- a/tests/components/config/test_area_registry.py +++ b/tests/components/config/test_area_registry.py @@ -1,11 +1,15 @@ """Test area_registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered from homeassistant.components.config import area_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import area_registry as ar +from homeassistant.util.dt import utcnow from tests.common import ANY from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -21,10 +25,17 @@ async def client_fixture( async def test_list_areas( - client: MockHAClientWebSocket, area_registry: ar.AreaRegistry + client: MockHAClientWebSocket, + area_registry: ar.AreaRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test list entries.""" + created_area1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_area1) area1 = area_registry.async_create("mock 1") + + created_area2 = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(created_area2) area2 = area_registry.async_create( "mock 2", aliases={"alias_1", "alias_2"}, @@ -46,6 +57,8 @@ async def test_list_areas( "labels": [], "name": "mock 1", "picture": None, + "created_at": created_area1.timestamp(), + "modified_at": created_area1.timestamp(), }, { "aliases": unordered(["alias_1", "alias_2"]), @@ -55,12 +68,16 @@ async def test_list_areas( "labels": unordered(["label_1", "label_2"]), "name": "mock 2", "picture": "/image/example.png", + "created_at": created_area2.timestamp(), + "modified_at": created_area2.timestamp(), }, ] async def test_create_area( - client: MockHAClientWebSocket, area_registry: ar.AreaRegistry + client: MockHAClientWebSocket, + area_registry: ar.AreaRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test create entry.""" # Create area with only mandatory parameters @@ -78,6 +95,8 @@ async def test_create_area( "labels": [], "name": "mock", "picture": None, + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), } assert len(area_registry.areas) == 1 @@ -104,6 +123,8 @@ async def test_create_area( "labels": unordered(["label_1", "label_2"]), "name": "mock 2", "picture": "/image/example.png", + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), } assert len(area_registry.areas) == 2 @@ -161,10 +182,16 @@ async def test_delete_non_existing_area( async def test_update_area( - client: MockHAClientWebSocket, area_registry: ar.AreaRegistry + client: MockHAClientWebSocket, + area_registry: ar.AreaRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) area = area_registry.async_create("mock 1") + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) await client.send_json_auto_id( { @@ -189,9 +216,14 @@ async def test_update_area( "labels": unordered(["label_1", "label_2"]), "name": "mock 2", "picture": "/image/example.png", + "created_at": created_at.timestamp(), + "modified_at": modified_at.timestamp(), } assert len(area_registry.areas) == 1 + modified_at = datetime.fromisoformat("2024-07-16T13:50:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "aliases": ["alias_1", "alias_1"], @@ -214,6 +246,8 @@ async def test_update_area( "labels": [], "name": "mock 2", "picture": None, + "created_at": created_at.timestamp(), + "modified_at": modified_at.timestamp(), } assert len(area_registry.areas) == 1 diff --git a/tests/components/config/test_auth_provider_homeassistant.py b/tests/components/config/test_auth_provider_homeassistant.py index 5c5661376e2..6b580013968 100644 --- a/tests/components/config/test_auth_provider_homeassistant.py +++ b/tests/components/config/test_auth_provider_homeassistant.py @@ -38,7 +38,9 @@ async def owner_access_token(hass: HomeAssistant, hass_owner_user: MockUser) -> @pytest.fixture -async def hass_admin_credential(hass, auth_provider): +async def hass_admin_credential( + hass: HomeAssistant, auth_provider: prov_ha.HassAuthProvider +): """Overload credentials to admin user.""" await hass.async_add_executor_job( auth_provider.data.add_auth, "test-user", "test-pass" @@ -181,7 +183,13 @@ async def test_create_auth_duplicate_username( result = await client.receive_json() assert not result["success"], result - assert result["error"]["code"] == "username_exists" + assert result["error"] == { + "code": "home_assistant_error", + "message": "username_already_exists", + "translation_key": "username_already_exists", + "translation_placeholders": {"username": "test-user"}, + "translation_domain": "auth", + } async def test_delete_removes_just_auth( @@ -280,11 +288,19 @@ async def test_delete_unknown_auth( result = await client.receive_json() assert not result["success"], result - assert result["error"]["code"] == "auth_not_found" + assert result["error"] == { + "code": "home_assistant_error", + "message": "user_not_found", + "translation_key": "user_not_found", + "translation_placeholders": None, + "translation_domain": "auth", + } async def test_change_password( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, auth_provider + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + auth_provider: prov_ha.HassAuthProvider, ) -> None: """Test that change password succeeds with valid password.""" client = await hass_ws_client(hass) @@ -306,7 +322,7 @@ async def test_change_password_wrong_pw( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, hass_admin_user: MockUser, - auth_provider, + auth_provider: prov_ha.HassAuthProvider, ) -> None: """Test that change password fails with invalid password.""" @@ -349,7 +365,9 @@ async def test_change_password_no_creds( async def test_admin_change_password_not_owner( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, auth_provider + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + auth_provider: prov_ha.HassAuthProvider, ) -> None: """Test that change password fails when not owner.""" client = await hass_ws_client(hass) @@ -372,7 +390,7 @@ async def test_admin_change_password_not_owner( async def test_admin_change_password_no_user( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, owner_access_token + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, owner_access_token: str ) -> None: """Test that change password fails with unknown user.""" client = await hass_ws_client(hass, owner_access_token) @@ -394,7 +412,7 @@ async def test_admin_change_password_no_user( async def test_admin_change_password_no_cred( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - owner_access_token, + owner_access_token: str, hass_admin_user: MockUser, ) -> None: """Test that change password fails with unknown credential.""" @@ -419,8 +437,8 @@ async def test_admin_change_password_no_cred( async def test_admin_change_password( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - owner_access_token, - auth_provider, + owner_access_token: str, + auth_provider: prov_ha.HassAuthProvider, hass_admin_user: MockUser, ) -> None: """Test that owners can change any password.""" @@ -439,3 +457,170 @@ async def test_admin_change_password( assert result["success"], result await auth_provider.async_validate_login("test-user", "new-pass") + + +def _assert_username( + local_auth: prov_ha.HassAuthProvider, username: str, *, should_exist: bool +) -> None: + if any(user["username"] == username for user in local_auth.data.users): + if should_exist: + return # found + + pytest.fail(f"Found user with username {username} when not expected") + + if should_exist: + pytest.fail(f"Did not find user with username {username}") + + +async def _test_admin_change_username( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + local_auth: prov_ha.HassAuthProvider, + hass_admin_user: MockUser, + owner_access_token: str, + new_username: str, +) -> dict[str, Any]: + """Test admin change username ws endpoint.""" + client = await hass_ws_client(hass, owner_access_token) + current_username_user = hass_admin_user.credentials[0].data["username"] + _assert_username(local_auth, current_username_user, should_exist=True) + + await client.send_json_auto_id( + { + "type": "config/auth_provider/homeassistant/admin_change_username", + "user_id": hass_admin_user.id, + "username": new_username, + } + ) + return await client.receive_json() + + +async def test_admin_change_username_success( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + local_auth: prov_ha.HassAuthProvider, + hass_admin_user: MockUser, + owner_access_token: str, +) -> None: + """Test that change username succeeds.""" + current_username = hass_admin_user.credentials[0].data["username"] + new_username = "blabla" + + result = await _test_admin_change_username( + hass, + hass_ws_client, + local_auth, + hass_admin_user, + owner_access_token, + new_username, + ) + + assert result["success"], result + _assert_username(local_auth, current_username, should_exist=False) + _assert_username(local_auth, new_username, should_exist=True) + assert hass_admin_user.credentials[0].data["username"] == new_username + # Validate new login works + await local_auth.async_validate_login(new_username, "test-pass") + with pytest.raises(prov_ha.InvalidAuth): + # Verify old login does not work + await local_auth.async_validate_login(current_username, "test-pass") + + +@pytest.mark.parametrize("new_username", [" bla", "bla ", "BlA"]) +async def test_admin_change_username_error_not_normalized( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + local_auth: prov_ha.HassAuthProvider, + hass_admin_user: MockUser, + owner_access_token: str, + new_username: str, +) -> None: + """Test that change username raises error.""" + current_username = hass_admin_user.credentials[0].data["username"] + + result = await _test_admin_change_username( + hass, + hass_ws_client, + local_auth, + hass_admin_user, + owner_access_token, + new_username, + ) + assert not result["success"], result + assert result["error"] == { + "code": "home_assistant_error", + "message": "username_not_normalized", + "translation_key": "username_not_normalized", + "translation_placeholders": {"new_username": new_username}, + "translation_domain": "auth", + } + _assert_username(local_auth, current_username, should_exist=True) + _assert_username(local_auth, new_username, should_exist=False) + assert hass_admin_user.credentials[0].data["username"] == current_username + # Validate old login still works + await local_auth.async_validate_login(current_username, "test-pass") + + +async def test_admin_change_username_not_owner( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, auth_provider +) -> None: + """Test that change username fails when not owner.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "config/auth_provider/homeassistant/admin_change_username", + "user_id": "test-user", + "username": "new-user", + } + ) + + result = await client.receive_json() + assert not result["success"], result + assert result["error"]["code"] == "unauthorized" + + # Validate old login still works + await auth_provider.async_validate_login("test-user", "test-pass") + + +async def test_admin_change_username_no_user( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, owner_access_token +) -> None: + """Test that change username fails with unknown user.""" + client = await hass_ws_client(hass, owner_access_token) + + await client.send_json_auto_id( + { + "type": "config/auth_provider/homeassistant/admin_change_username", + "user_id": "non-existing", + "username": "new-username", + } + ) + + result = await client.receive_json() + assert not result["success"], result + assert result["error"]["code"] == "user_not_found" + + +async def test_admin_change_username_no_cred( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + owner_access_token, + hass_admin_user: MockUser, +) -> None: + """Test that change username fails with unknown credential.""" + + hass_admin_user.credentials.clear() + client = await hass_ws_client(hass, owner_access_token) + + await client.send_json_auto_id( + { + "type": "config/auth_provider/homeassistant/admin_change_username", + "user_id": hass_admin_user.id, + "username": "new-username", + } + ) + + result = await client.receive_json() + assert not result["success"], result + assert result["error"]["code"] == "credentials_not_found" diff --git a/tests/components/config/test_automation.py b/tests/components/config/test_automation.py index 9d9ee5d5649..89113070367 100644 --- a/tests/components/config/test_automation.py +++ b/tests/components/config/test_automation.py @@ -7,12 +7,12 @@ from unittest.mock import patch import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import automation from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from homeassistant.util import yaml from tests.typing import ClientSessionGenerator @@ -26,7 +26,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @pytest.fixture async def setup_automation( hass: HomeAssistant, - automation_config, + automation_config: dict[str, Any], stub_blueprint_populate: None, ) -> None: """Set up automation integration.""" @@ -36,11 +36,11 @@ async def setup_automation( @pytest.mark.parametrize("automation_config", [{}]) +@pytest.mark.usefixtures("setup_automation") async def test_get_automation_config( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - setup_automation, + hass_config_store: dict[str, Any], ) -> None: """Test getting automation config.""" with patch.object(config, "SECTIONS", [automation]): @@ -59,11 +59,11 @@ async def test_get_automation_config( @pytest.mark.parametrize("automation_config", [{}]) +@pytest.mark.usefixtures("setup_automation") async def test_update_automation_config( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - setup_automation, + hass_config_store: dict[str, Any], ) -> None: """Test updating automation config.""" with patch.object(config, "SECTIONS", [automation]): @@ -143,11 +143,11 @@ async def test_update_automation_config( ), ], ) +@pytest.mark.usefixtures("setup_automation") async def test_update_automation_config_with_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - setup_automation, + hass_config_store: dict[str, Any], caplog: pytest.LogCaptureFixture, updated_config: Any, validation_error: str, @@ -196,11 +196,11 @@ async def test_update_automation_config_with_error( ), ], ) +@pytest.mark.usefixtures("setup_automation") async def test_update_automation_config_with_blueprint_substitution_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - setup_automation, + hass_config_store: dict[str, Any], caplog: pytest.LogCaptureFixture, updated_config: Any, validation_error: str, @@ -235,11 +235,11 @@ async def test_update_automation_config_with_blueprint_substitution_error( @pytest.mark.parametrize("automation_config", [{}]) +@pytest.mark.usefixtures("setup_automation") async def test_update_remove_key_automation_config( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - setup_automation, + hass_config_store: dict[str, Any], ) -> None: """Test updating automation config while removing a key.""" with patch.object(config, "SECTIONS", [automation]): @@ -272,11 +272,11 @@ async def test_update_remove_key_automation_config( @pytest.mark.parametrize("automation_config", [{}]) +@pytest.mark.usefixtures("setup_automation") async def test_bad_formatted_automations( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - setup_automation, + hass_config_store: dict[str, Any], ) -> None: """Test that we handle automations without ID.""" with patch.object(config, "SECTIONS", [automation]): @@ -332,12 +332,12 @@ async def test_bad_formatted_automations( ], ], ) +@pytest.mark.usefixtures("setup_automation") async def test_delete_automation( hass: HomeAssistant, hass_client: ClientSessionGenerator, entity_registry: er.EntityRegistry, - hass_config_store, - setup_automation, + hass_config_store: dict[str, Any], ) -> None: """Test deleting an automation.""" @@ -373,12 +373,12 @@ async def test_delete_automation( @pytest.mark.parametrize("automation_config", [{}]) +@pytest.mark.usefixtures("setup_automation") async def test_api_calls_require_admin( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_read_only_access_token: str, - hass_config_store, - setup_automation, + hass_config_store: dict[str, Any], ) -> None: """Test cloud APIs endpoints do not work as a normal user.""" with patch.object(config, "SECTIONS", [automation]): diff --git a/tests/components/config/test_category_registry.py b/tests/components/config/test_category_registry.py index b4d171535b6..d4fe6a0c9b9 100644 --- a/tests/components/config/test_category_registry.py +++ b/tests/components/config/test_category_registry.py @@ -1,10 +1,14 @@ """Test category registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.config import category_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import category_registry as cr +from homeassistant.util.dt import utcnow from tests.common import ANY from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -19,6 +23,7 @@ async def client_fixture( return await hass_ws_client(hass) +@pytest.mark.usefixtures("freezer") async def test_list_categories( client: MockHAClientWebSocket, category_registry: cr.CategoryRegistry, @@ -53,11 +58,15 @@ async def test_list_categories( assert len(msg["result"]) == 2 assert msg["result"][0] == { "category_id": category1.category_id, + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), "name": "Energy saving", "icon": "mdi:leaf", } assert msg["result"][1] == { "category_id": category2.category_id, + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), "name": "Something else", "icon": "mdi:home", } @@ -71,6 +80,8 @@ async def test_list_categories( assert len(msg["result"]) == 1 assert msg["result"][0] == { "category_id": category3.category_id, + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), "name": "Grocery stores", "icon": "mdi:store", } @@ -79,8 +90,11 @@ async def test_list_categories( async def test_create_category( client: MockHAClientWebSocket, category_registry: cr.CategoryRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test create entry.""" + created1 = datetime(2024, 2, 14, 12, 0, 0) + freezer.move_to(created1) await client.send_json_auto_id( { "type": "config/category_registry/create", @@ -98,9 +112,14 @@ async def test_create_category( assert msg["result"] == { "icon": "mdi:leaf", "category_id": ANY, + "created_at": created1.timestamp(), + "modified_at": created1.timestamp(), "name": "Energy saving", } + created2 = datetime(2024, 3, 14, 12, 0, 0) + freezer.move_to(created2) + await client.send_json_auto_id( { "scope": "automation", @@ -117,9 +136,14 @@ async def test_create_category( assert msg["result"] == { "icon": None, "category_id": ANY, + "created_at": created2.timestamp(), + "modified_at": created2.timestamp(), "name": "Something else", } + created3 = datetime(2024, 4, 14, 12, 0, 0) + freezer.move_to(created3) + # Test adding the same one again in a different scope await client.send_json_auto_id( { @@ -139,6 +163,8 @@ async def test_create_category( assert msg["result"] == { "icon": "mdi:leaf", "category_id": ANY, + "created_at": created3.timestamp(), + "modified_at": created3.timestamp(), "name": "Energy saving", } @@ -249,8 +275,11 @@ async def test_delete_non_existing_category( async def test_update_category( client: MockHAClientWebSocket, category_registry: cr.CategoryRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" + created = datetime(2024, 2, 14, 12, 0, 0) + freezer.move_to(created) category = category_registry.async_create( scope="automation", name="Energy saving", @@ -258,6 +287,9 @@ async def test_update_category( assert len(category_registry.categories) == 1 assert len(category_registry.categories["automation"]) == 1 + modified = datetime(2024, 3, 14, 12, 0, 0) + freezer.move_to(modified) + await client.send_json_auto_id( { "scope": "automation", @@ -275,9 +307,14 @@ async def test_update_category( assert msg["result"] == { "icon": "mdi:left", "category_id": category.category_id, + "created_at": created.timestamp(), + "modified_at": modified.timestamp(), "name": "ENERGY SAVING", } + modified = datetime(2024, 4, 14, 12, 0, 0) + freezer.move_to(modified) + await client.send_json_auto_id( { "scope": "automation", @@ -295,6 +332,8 @@ async def test_update_category( assert msg["result"] == { "icon": None, "category_id": category.category_id, + "created_at": created.timestamp(), + "modified_at": modified.timestamp(), "name": "Energy saving", } diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 95ff87c2beb..a4dc91d5355 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -1,10 +1,12 @@ """Test config entries API.""" from collections import OrderedDict +from collections.abc import Generator from http import HTTPStatus from unittest.mock import ANY, AsyncMock, patch from aiohttp.test_utils import TestClient +from freezegun.api import FrozenDateTimeFactory import pytest import voluptuous as vol @@ -17,6 +19,7 @@ from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_flow, config_validation as cv from homeassistant.loader import IntegrationNotFound from homeassistant.setup import async_setup_component +from homeassistant.util.dt import utcnow from tests.common import ( MockConfigEntry, @@ -30,14 +33,14 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator @pytest.fixture -def clear_handlers(): +def clear_handlers() -> Generator[None]: """Clear config entry handlers.""" with patch.dict(HANDLERS, clear=True): yield @pytest.fixture(autouse=True) -def mock_test_component(hass): +def mock_test_component(hass: HomeAssistant) -> None: """Ensure a component called 'test' exists.""" mock_integration(hass, MockModule("test")) @@ -53,7 +56,7 @@ async def client( @pytest.fixture -async def mock_flow(): +def mock_flow() -> Generator[None]: """Mock a config flow.""" class Comp1ConfigFlow(ConfigFlow): @@ -68,9 +71,9 @@ async def mock_flow(): yield -async def test_get_entries( - hass: HomeAssistant, client, clear_handlers, mock_flow -) -> None: +@pytest.mark.usefixtures("freezer") +@pytest.mark.usefixtures("clear_handlers", "mock_flow") +async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: """Test get entries.""" mock_integration(hass, MockModule("comp1")) mock_integration( @@ -124,12 +127,15 @@ async def test_get_entries( data = await resp.json() for entry in data: entry.pop("entry_id") + timestamp = utcnow().timestamp() assert data == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -142,10 +148,12 @@ async def test_get_entries( "title": "Test 1", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp2", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -158,10 +166,12 @@ async def test_get_entries( "title": "Test 2", }, { + "created_at": timestamp, "disabled_by": core_ce.ConfigEntryDisabler.USER, "domain": "comp3", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -174,10 +184,12 @@ async def test_get_entries( "title": "Test 3", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp4", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -190,10 +202,12 @@ async def test_get_entries( "title": "Test 4", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp5", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -238,7 +252,7 @@ async def test_get_entries( assert data[0]["domain"] == "comp5" -async def test_remove_entry(hass: HomeAssistant, client) -> None: +async def test_remove_entry(hass: HomeAssistant, client: TestClient) -> None: """Test removing an entry via the API.""" entry = MockConfigEntry( domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED @@ -251,7 +265,7 @@ async def test_remove_entry(hass: HomeAssistant, client) -> None: assert len(hass.config_entries.async_entries()) == 0 -async def test_reload_entry(hass: HomeAssistant, client) -> None: +async def test_reload_entry(hass: HomeAssistant, client: TestClient) -> None: """Test reloading an entry via the API.""" entry = MockConfigEntry( domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED @@ -267,14 +281,14 @@ async def test_reload_entry(hass: HomeAssistant, client) -> None: assert len(hass.config_entries.async_entries()) == 1 -async def test_reload_invalid_entry(hass: HomeAssistant, client) -> None: +async def test_reload_invalid_entry(hass: HomeAssistant, client: TestClient) -> None: """Test reloading an invalid entry via the API.""" resp = await client.post("/api/config/config_entries/entry/invalid/reload") assert resp.status == HTTPStatus.NOT_FOUND async def test_remove_entry_unauth( - hass: HomeAssistant, client, hass_admin_user: MockUser + hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: """Test removing an entry via the API.""" hass_admin_user.groups = [] @@ -286,7 +300,7 @@ async def test_remove_entry_unauth( async def test_reload_entry_unauth( - hass: HomeAssistant, client, hass_admin_user: MockUser + hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: """Test reloading an entry via the API.""" hass_admin_user.groups = [] @@ -300,7 +314,7 @@ async def test_reload_entry_unauth( async def test_reload_entry_in_failed_state( - hass: HomeAssistant, client, hass_admin_user: MockUser + hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: """Test reloading an entry via the API that has already failed to unload.""" entry = MockConfigEntry(domain="demo", state=core_ce.ConfigEntryState.FAILED_UNLOAD) @@ -314,7 +328,7 @@ async def test_reload_entry_in_failed_state( async def test_reload_entry_in_setup_retry( - hass: HomeAssistant, client, hass_admin_user: MockUser + hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: """Test reloading an entry via the API that is in setup retry.""" mock_setup_entry = AsyncMock(return_value=True) @@ -356,7 +370,7 @@ async def test_reload_entry_in_setup_retry( ], ) async def test_available_flows( - hass: HomeAssistant, client, type_filter, result + hass: HomeAssistant, client: TestClient, type_filter: str | None, result: set[str] ) -> None: """Test querying the available flows.""" with patch.object( @@ -378,7 +392,7 @@ async def test_available_flows( ############################ -async def test_initialize_flow(hass: HomeAssistant, client) -> None: +async def test_initialize_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can initialize a flow.""" mock_platform(hass, "test.config_flow", None) @@ -427,7 +441,9 @@ async def test_initialize_flow(hass: HomeAssistant, client) -> None: } -async def test_initialize_flow_unmet_dependency(hass: HomeAssistant, client) -> None: +async def test_initialize_flow_unmet_dependency( + hass: HomeAssistant, client: TestClient +) -> None: """Test unmet dependencies are listed.""" mock_platform(hass, "test.config_flow", None) @@ -457,7 +473,7 @@ async def test_initialize_flow_unmet_dependency(hass: HomeAssistant, client) -> async def test_initialize_flow_unauth( - hass: HomeAssistant, client, hass_admin_user: MockUser + hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: """Test we can initialize a flow.""" hass_admin_user.groups = [] @@ -483,7 +499,7 @@ async def test_initialize_flow_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED -async def test_abort(hass: HomeAssistant, client) -> None: +async def test_abort(hass: HomeAssistant, client: TestClient) -> None: """Test a flow that aborts.""" mock_platform(hass, "test.config_flow", None) @@ -507,8 +523,8 @@ async def test_abort(hass: HomeAssistant, client) -> None: } -@pytest.mark.usefixtures("enable_custom_integrations") -async def test_create_account(hass: HomeAssistant, client) -> None: +@pytest.mark.usefixtures("enable_custom_integrations", "freezer") +async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: """Test a flow that creates an account.""" mock_platform(hass, "test.config_flow", None) @@ -534,6 +550,7 @@ async def test_create_account(hass: HomeAssistant, client) -> None: entries = hass.config_entries.async_entries("test") assert len(entries) == 1 + timestamp = utcnow().timestamp() data = await resp.json() data.pop("flow_id") assert data == { @@ -542,11 +559,13 @@ async def test_create_account(hass: HomeAssistant, client) -> None: "type": "create_entry", "version": 1, "result": { + "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entries[0].entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -565,8 +584,8 @@ async def test_create_account(hass: HomeAssistant, client) -> None: } -@pytest.mark.usefixtures("enable_custom_integrations") -async def test_two_step_flow(hass: HomeAssistant, client) -> None: +@pytest.mark.usefixtures("enable_custom_integrations", "freezer") +async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can finish a two step flow.""" mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) @@ -614,6 +633,7 @@ async def test_two_step_flow(hass: HomeAssistant, client) -> None: entries = hass.config_entries.async_entries("test") assert len(entries) == 1 + timestamp = utcnow().timestamp() data = await resp.json() data.pop("flow_id") assert data == { @@ -622,11 +642,13 @@ async def test_two_step_flow(hass: HomeAssistant, client) -> None: "title": "user-title", "version": 1, "result": { + "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entries[0].entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -646,7 +668,7 @@ async def test_two_step_flow(hass: HomeAssistant, client) -> None: async def test_continue_flow_unauth( - hass: HomeAssistant, client, hass_admin_user: MockUser + hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: """Test we can't finish a two step flow.""" mock_integration( @@ -745,7 +767,7 @@ async def test_get_progress_index_unauth( assert response["error"]["code"] == "unauthorized" -async def test_get_progress_flow(hass: HomeAssistant, client) -> None: +async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can query the API for same result as we get from init a flow.""" mock_platform(hass, "test.config_flow", None) @@ -780,7 +802,7 @@ async def test_get_progress_flow(hass: HomeAssistant, client) -> None: async def test_get_progress_flow_unauth( - hass: HomeAssistant, client, hass_admin_user: MockUser + hass: HomeAssistant, client: TestClient, hass_admin_user: MockUser ) -> None: """Test we can can't query the API for result of flow.""" mock_platform(hass, "test.config_flow", None) @@ -814,7 +836,7 @@ async def test_get_progress_flow_unauth( assert resp2.status == HTTPStatus.UNAUTHORIZED -async def test_options_flow(hass: HomeAssistant, client) -> None: +async def test_options_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can change options.""" class TestFlow(core_ce.ConfigFlow): @@ -874,7 +896,11 @@ async def test_options_flow(hass: HomeAssistant, client) -> None: ], ) async def test_options_flow_unauth( - hass: HomeAssistant, client, hass_admin_user: MockUser, endpoint: str, method: str + hass: HomeAssistant, + client: TestClient, + hass_admin_user: MockUser, + endpoint: str, + method: str, ) -> None: """Test unauthorized on options flow.""" @@ -911,7 +937,7 @@ async def test_options_flow_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED -async def test_two_step_options_flow(hass: HomeAssistant, client) -> None: +async def test_two_step_options_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can finish a two step options flow.""" mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) @@ -977,7 +1003,9 @@ async def test_two_step_options_flow(hass: HomeAssistant, client) -> None: } -async def test_options_flow_with_invalid_data(hass: HomeAssistant, client) -> None: +async def test_options_flow_with_invalid_data( + hass: HomeAssistant, client: TestClient +) -> None: """Test an options flow with invalid_data.""" mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) @@ -1051,6 +1079,7 @@ async def test_options_flow_with_invalid_data(hass: HomeAssistant, client) -> No assert data == {"errors": {"choices": "invalid is not a valid option"}} +@pytest.mark.usefixtures("freezer") async def test_get_single( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -1072,13 +1101,16 @@ async def test_get_single( ) response = await ws_client.receive_json() + timestamp = utcnow().timestamp() assert response["success"] assert response["result"]["config_entry"] == { + "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entry.entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1358,8 +1390,9 @@ async def test_ignore_flow_nonexisting( assert response["error"]["code"] == "not_found" +@pytest.mark.usefixtures("clear_handlers", "freezer") async def test_get_matching_entries_ws( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, clear_handlers + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get entries with the websocket api.""" assert await async_setup_component(hass, "config", {}) @@ -1411,13 +1444,16 @@ async def test_get_matching_entries_ws( await ws_client.send_json_auto_id({"type": "config_entries/get"}) response = await ws_client.receive_json() + timestamp = utcnow().timestamp() assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1430,11 +1466,13 @@ async def test_get_matching_entries_ws( "title": "Test 1", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp2", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -1447,11 +1485,13 @@ async def test_get_matching_entries_ws( "title": "Test 2", }, { + "created_at": timestamp, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1464,11 +1504,13 @@ async def test_get_matching_entries_ws( "title": "Test 3", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp4", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1481,11 +1523,13 @@ async def test_get_matching_entries_ws( "title": "Test 4", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp5", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1509,11 +1553,13 @@ async def test_get_matching_entries_ws( response = await ws_client.receive_json() assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1536,11 +1582,13 @@ async def test_get_matching_entries_ws( response = await ws_client.receive_json() assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp4", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1553,11 +1601,13 @@ async def test_get_matching_entries_ws( "title": "Test 4", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp5", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1580,11 +1630,13 @@ async def test_get_matching_entries_ws( response = await ws_client.receive_json() assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1597,11 +1649,13 @@ async def test_get_matching_entries_ws( "title": "Test 1", }, { + "created_at": timestamp, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1630,11 +1684,13 @@ async def test_get_matching_entries_ws( assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1647,11 +1703,13 @@ async def test_get_matching_entries_ws( "title": "Test 1", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp2", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -1664,11 +1722,13 @@ async def test_get_matching_entries_ws( "title": "Test 2", }, { + "created_at": timestamp, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1681,11 +1741,13 @@ async def test_get_matching_entries_ws( "title": "Test 3", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp4", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1698,11 +1760,13 @@ async def test_get_matching_entries_ws( "title": "Test 4", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp5", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1748,8 +1812,11 @@ async def test_get_matching_entries_ws( assert response["success"] is False +@pytest.mark.usefixtures("clear_handlers") async def test_subscribe_entries_ws( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, clear_handlers + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, ) -> None: """Test subscribe entries with the websocket api.""" assert await async_setup_component(hass, "config", {}) @@ -1795,15 +1862,18 @@ async def test_subscribe_entries_ws( assert response["type"] == "result" response = await ws_client.receive_json() assert response["id"] == 5 + created = utcnow().timestamp() assert response["event"] == [ { "type": None, "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1819,11 +1889,13 @@ async def test_subscribe_entries_ws( { "type": None, "entry": { + "created_at": created, "disabled_by": None, "domain": "comp2", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -1839,11 +1911,13 @@ async def test_subscribe_entries_ws( { "type": None, "entry": { + "created_at": created, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1857,17 +1931,21 @@ async def test_subscribe_entries_ws( }, }, ] + freezer.tick() + modified = utcnow().timestamp() assert hass.config_entries.async_update_entry(entry, title="changed") response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1882,17 +1960,21 @@ async def test_subscribe_entries_ws( "type": "updated", } ] + freezer.tick() + modified = utcnow().timestamp() await hass.config_entries.async_remove(entry.entry_id) response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1907,17 +1989,20 @@ async def test_subscribe_entries_ws( "type": "removed", } ] + freezer.tick() await hass.config_entries.async_add(entry) response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { + "created_at": entry.created_at.timestamp(), "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": entry.modified_at.timestamp(), "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1934,10 +2019,14 @@ async def test_subscribe_entries_ws( ] +@pytest.mark.usefixtures("clear_handlers") async def test_subscribe_entries_ws_filtered( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, clear_handlers + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, ) -> None: """Test subscribe entries with the websocket api with a type filter.""" + created = utcnow().timestamp() assert await async_setup_component(hass, "config", {}) mock_integration(hass, MockModule("comp1")) mock_integration( @@ -1997,11 +2086,13 @@ async def test_subscribe_entries_ws_filtered( { "type": None, "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2017,11 +2108,13 @@ async def test_subscribe_entries_ws_filtered( { "type": None, "entry": { + "created_at": created, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2035,6 +2128,8 @@ async def test_subscribe_entries_ws_filtered( }, }, ] + freezer.tick() + modified = utcnow().timestamp() assert hass.config_entries.async_update_entry(entry, title="changed") assert hass.config_entries.async_update_entry(entry3, title="changed too") assert hass.config_entries.async_update_entry(entry4, title="changed but ignored") @@ -2043,11 +2138,13 @@ async def test_subscribe_entries_ws_filtered( assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2067,11 +2164,13 @@ async def test_subscribe_entries_ws_filtered( assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2086,6 +2185,8 @@ async def test_subscribe_entries_ws_filtered( "type": "updated", } ] + freezer.tick() + modified = utcnow().timestamp() await hass.config_entries.async_remove(entry.entry_id) await hass.config_entries.async_remove(entry2.entry_id) response = await ws_client.receive_json() @@ -2093,11 +2194,13 @@ async def test_subscribe_entries_ws_filtered( assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2112,17 +2215,20 @@ async def test_subscribe_entries_ws_filtered( "type": "removed", } ] + freezer.tick() await hass.config_entries.async_add(entry) response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { + "created_at": entry.created_at.timestamp(), "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": entry.modified_at.timestamp(), "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2139,7 +2245,9 @@ async def test_subscribe_entries_ws_filtered( ] -async def test_flow_with_multiple_schema_errors(hass: HomeAssistant, client) -> None: +async def test_flow_with_multiple_schema_errors( + hass: HomeAssistant, client: TestClient +) -> None: """Test an config flow with multiple schema errors.""" mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) @@ -2182,7 +2290,7 @@ async def test_flow_with_multiple_schema_errors(hass: HomeAssistant, client) -> async def test_flow_with_multiple_schema_errors_base( - hass: HomeAssistant, client + hass: HomeAssistant, client: TestClient ) -> None: """Test an config flow with multiple schema errors where fields are not in the schema.""" mock_integration( @@ -2225,8 +2333,11 @@ async def test_flow_with_multiple_schema_errors_base( } -@pytest.mark.usefixtures("enable_custom_integrations") -async def test_supports_reconfigure(hass: HomeAssistant, client) -> None: +@pytest.mark.usefixtures("enable_custom_integrations", "freezer") +async def test_supports_reconfigure( + hass: HomeAssistant, + client: TestClient, +) -> None: """Test a flow that support reconfigure step.""" mock_platform(hass, "test.config_flow", None) @@ -2284,6 +2395,7 @@ async def test_supports_reconfigure(hass: HomeAssistant, client) -> None: assert len(entries) == 1 data = await resp.json() + timestamp = utcnow().timestamp() data.pop("flow_id") assert data == { "handler": "test", @@ -2291,11 +2403,13 @@ async def test_supports_reconfigure(hass: HomeAssistant, client) -> None: "type": "create_entry", "version": 1, "result": { + "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entries[0].entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, diff --git a/tests/components/config/test_core.py b/tests/components/config/test_core.py index 7d02063b2b9..4550f2e08e5 100644 --- a/tests/components/config/test_core.py +++ b/tests/components/config/test_core.py @@ -5,11 +5,11 @@ from unittest.mock import Mock, patch import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import core from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util, location from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM diff --git a/tests/components/config/test_device_registry.py b/tests/components/config/test_device_registry.py index 804cf29979e..aab898f5fd6 100644 --- a/tests/components/config/test_device_registry.py +++ b/tests/components/config/test_device_registry.py @@ -1,5 +1,8 @@ """Test device_registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered @@ -7,6 +10,7 @@ from homeassistant.components.config import device_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component +from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, MockModule, mock_integration from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -26,6 +30,7 @@ async def client_fixture( return await hass_ws_client(hass) +@pytest.mark.usefixtures("freezer") async def test_list_devices( hass: HomeAssistant, client: MockHAClientWebSocket, @@ -61,6 +66,7 @@ async def test_list_devices( "config_entries": [entry.entry_id], "configuration_url": None, "connections": [["ethernet", "12:34:56:78:90:AB:CD:EF"]], + "created_at": utcnow().timestamp(), "disabled_by": None, "entry_type": None, "hw_version": None, @@ -68,8 +74,11 @@ async def test_list_devices( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().timestamp(), "name_by_user": None, "name": None, + "primary_config_entry": entry.entry_id, "serial_number": None, "sw_version": None, "via_device_id": None, @@ -79,6 +88,7 @@ async def test_list_devices( "config_entries": [entry.entry_id], "configuration_url": None, "connections": [], + "created_at": utcnow().timestamp(), "disabled_by": None, "entry_type": dr.DeviceEntryType.SERVICE, "hw_version": None, @@ -86,8 +96,11 @@ async def test_list_devices( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().timestamp(), "name_by_user": None, "name": None, + "primary_config_entry": entry.entry_id, "serial_number": None, "sw_version": None, "via_device_id": dev1, @@ -109,6 +122,7 @@ async def test_list_devices( "config_entries": [entry.entry_id], "configuration_url": None, "connections": [["ethernet", "12:34:56:78:90:AB:CD:EF"]], + "created_at": utcnow().timestamp(), "disabled_by": None, "entry_type": None, "hw_version": None, @@ -117,8 +131,11 @@ async def test_list_devices( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().timestamp(), "name_by_user": None, "name": None, + "primary_config_entry": entry.entry_id, "serial_number": None, "sw_version": None, "via_device_id": None, @@ -145,12 +162,15 @@ async def test_update_device( hass: HomeAssistant, client: MockHAClientWebSocket, device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, payload_key: str, payload_value: str | dr.DeviceEntryDisabler | None, ) -> None: """Test update entry.""" entry = MockConfigEntry(title=None) entry.add_to_hass(hass) + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) device = device_registry.async_get_or_create( config_entry_id=entry.entry_id, connections={("ethernet", "12:34:56:78:90:AB:CD:EF")}, @@ -161,6 +181,9 @@ async def test_update_device( assert not getattr(device, payload_key) + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "type": "config/device_registry/update", @@ -180,6 +203,12 @@ async def test_update_device( assert msg["result"][payload_key] == payload_value assert getattr(device, payload_key) == payload_value + for key, value in ( + ("created_at", created_at), + ("modified_at", modified_at if payload_value is not None else created_at), + ): + assert msg["result"][key] == value.timestamp() + assert getattr(device, key) == value assert isinstance(device.disabled_by, (dr.DeviceEntryDisabler, type(None))) @@ -188,10 +217,13 @@ async def test_update_device_labels( hass: HomeAssistant, client: MockHAClientWebSocket, device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry labels.""" entry = MockConfigEntry(title=None) entry.add_to_hass(hass) + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) device = device_registry.async_get_or_create( config_entry_id=entry.entry_id, connections={("ethernet", "12:34:56:78:90:AB:CD:EF")}, @@ -201,6 +233,8 @@ async def test_update_device_labels( ) assert not device.labels + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) await client.send_json_auto_id( { @@ -221,6 +255,12 @@ async def test_update_device_labels( assert msg["result"]["labels"] == unordered(["label1", "label2"]) assert device.labels == {"label1", "label2"} + for key, value in ( + ("created_at", created_at), + ("modified_at", modified_at), + ): + assert msg["result"][key] == value.timestamp() + assert getattr(device, key) == value async def test_remove_config_entry_from_device( @@ -274,7 +314,7 @@ async def test_remove_config_entry_from_device( config_entry_id=entry_2.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - assert device_entry.config_entries == [entry_1.entry_id, entry_2.entry_id] + assert device_entry.config_entries == {entry_1.entry_id, entry_2.entry_id} # Try removing a config entry from the device, it should fail because # async_remove_config_entry_device returns False @@ -293,9 +333,9 @@ async def test_remove_config_entry_from_device( assert response["result"]["config_entries"] == [entry_2.entry_id] # Check that the config entry was removed from the device - assert device_registry.async_get(device_entry.id).config_entries == [ + assert device_registry.async_get(device_entry.id).config_entries == { entry_2.entry_id - ] + } # Remove the 2nd config entry response = await ws_client.remove_device(device_entry.id, entry_2.entry_id) @@ -365,11 +405,11 @@ async def test_remove_config_entry_from_device_fails( config_entry_id=entry_3.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - assert device_entry.config_entries == [ + assert device_entry.config_entries == { entry_1.entry_id, entry_2.entry_id, entry_3.entry_id, - ] + } fake_entry_id = "abc123" assert entry_1.entry_id != fake_entry_id @@ -420,3 +460,91 @@ async def test_remove_config_entry_from_device_fails( assert not response["success"] assert response["error"]["code"] == "home_assistant_error" assert response["error"]["message"] == "Integration not found" + + +async def test_remove_config_entry_from_device_if_integration_remove( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, +) -> None: + """Test removing config entry from device doesn't lead to an error when the integration removes the entry.""" + assert await async_setup_component(hass, "config", {}) + ws_client = await hass_ws_client(hass) + + can_remove = False + + async def async_remove_config_entry_device(hass, config_entry, device_entry): + if can_remove: + device_registry.async_update_device( + device_entry.id, remove_config_entry_id=config_entry.entry_id + ) + return can_remove + + mock_integration( + hass, + MockModule( + "comp1", async_remove_config_entry_device=async_remove_config_entry_device + ), + ) + mock_integration( + hass, + MockModule( + "comp2", async_remove_config_entry_device=async_remove_config_entry_device + ), + ) + + entry_1 = MockConfigEntry( + domain="comp1", + title="Test 1", + source="bla", + ) + entry_1.supports_remove_device = True + entry_1.add_to_hass(hass) + + entry_2 = MockConfigEntry( + domain="comp1", + title="Test 1", + source="bla", + ) + entry_2.supports_remove_device = True + entry_2.add_to_hass(hass) + + device_registry.async_get_or_create( + config_entry_id=entry_1.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + device_entry = device_registry.async_get_or_create( + config_entry_id=entry_2.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + assert device_entry.config_entries == {entry_1.entry_id, entry_2.entry_id} + + # Try removing a config entry from the device, it should fail because + # async_remove_config_entry_device returns False + response = await ws_client.remove_device(device_entry.id, entry_1.entry_id) + + assert not response["success"] + assert response["error"]["code"] == "home_assistant_error" + + # Make async_remove_config_entry_device return True + can_remove = True + + # Remove the 1st config entry + response = await ws_client.remove_device(device_entry.id, entry_1.entry_id) + + assert response["success"] + assert response["result"]["config_entries"] == [entry_2.entry_id] + + # Check that the config entry was removed from the device + assert device_registry.async_get(device_entry.id).config_entries == { + entry_2.entry_id + } + + # Remove the 2nd config entry + response = await ws_client.remove_device(device_entry.id, entry_2.entry_id) + + assert response["success"] + assert response["result"] is None + + # This was the last config entry, the device is removed + assert not device_registry.async_get(device_entry.id) diff --git a/tests/components/config/test_entity_registry.py b/tests/components/config/test_entity_registry.py index 813ec654abb..60657d4a77b 100644 --- a/tests/components/config/test_entity_registry.py +++ b/tests/components/config/test_entity_registry.py @@ -1,5 +1,8 @@ """Test entity_registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered @@ -13,6 +16,7 @@ from homeassistant.helpers.entity_registry import ( RegistryEntryDisabler, RegistryEntryHider, ) +from homeassistant.util.dt import utcnow from tests.common import ( ANY, @@ -33,6 +37,7 @@ async def client( return await hass_ws_client(hass) +@pytest.mark.usefixtures("freezer") async def test_list_entities( hass: HomeAssistant, client: MockHAClientWebSocket ) -> None: @@ -62,6 +67,7 @@ async def test_list_entities( "area_id": None, "categories": {}, "config_entry_id": None, + "created_at": utcnow().timestamp(), "device_id": None, "disabled_by": None, "entity_category": None, @@ -71,6 +77,7 @@ async def test_list_entities( "icon": None, "id": ANY, "labels": [], + "modified_at": utcnow().timestamp(), "name": "Hello World", "options": {}, "original_name": None, @@ -82,6 +89,7 @@ async def test_list_entities( "area_id": None, "categories": {}, "config_entry_id": None, + "created_at": utcnow().timestamp(), "device_id": None, "disabled_by": None, "entity_category": None, @@ -91,6 +99,7 @@ async def test_list_entities( "icon": None, "id": ANY, "labels": [], + "modified_at": utcnow().timestamp(), "name": None, "options": {}, "original_name": None, @@ -129,6 +138,7 @@ async def test_list_entities( "area_id": None, "categories": {}, "config_entry_id": None, + "created_at": utcnow().timestamp(), "device_id": None, "disabled_by": None, "entity_category": None, @@ -138,6 +148,7 @@ async def test_list_entities( "icon": None, "id": ANY, "labels": [], + "modified_at": utcnow().timestamp(), "name": "Hello World", "options": {}, "original_name": None, @@ -325,6 +336,8 @@ async def test_list_entities_for_display( async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> None: """Test get entry.""" + name_created_at = datetime(1994, 2, 14, 12, 0, 0) + no_name_created_at = datetime(2024, 2, 14, 12, 0, 1) mock_registry( hass, { @@ -333,11 +346,15 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> unique_id="1234", platform="test_platform", name="Hello World", + created_at=name_created_at, + modified_at=name_created_at, ), "test_domain.no_name": RegistryEntry( entity_id="test_domain.no_name", unique_id="6789", platform="test_platform", + created_at=no_name_created_at, + modified_at=no_name_created_at, ), }, ) @@ -353,6 +370,7 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -363,6 +381,7 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "icon": None, "id": ANY, "labels": [], + "modified_at": name_created_at.timestamp(), "name": "Hello World", "options": {}, "original_device_class": None, @@ -387,6 +406,7 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": no_name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -397,6 +417,7 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "icon": None, "id": ANY, "labels": [], + "modified_at": no_name_created_at.timestamp(), "name": None, "options": {}, "original_device_class": None, @@ -410,6 +431,8 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) -> None: """Test get entry.""" + name_created_at = datetime(1994, 2, 14, 12, 0, 0) + no_name_created_at = datetime(2024, 2, 14, 12, 0, 1) mock_registry( hass, { @@ -418,11 +441,15 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) unique_id="1234", platform="test_platform", name="Hello World", + created_at=name_created_at, + modified_at=name_created_at, ), "test_domain.no_name": RegistryEntry( entity_id="test_domain.no_name", unique_id="6789", platform="test_platform", + created_at=no_name_created_at, + modified_at=no_name_created_at, ), }, ) @@ -446,6 +473,7 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -456,6 +484,7 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "icon": None, "id": ANY, "labels": [], + "modified_at": name_created_at.timestamp(), "name": "Hello World", "options": {}, "original_device_class": None, @@ -471,6 +500,7 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": no_name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -481,6 +511,7 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "icon": None, "id": ANY, "labels": [], + "modified_at": no_name_created_at.timestamp(), "name": None, "options": {}, "original_device_class": None, @@ -495,9 +526,11 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) async def test_update_entity( - hass: HomeAssistant, client: MockHAClientWebSocket + hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory ) -> None: """Test updating entity.""" + created = datetime.fromisoformat("2024-02-14T12:00:00.900075+00:00") + freezer.move_to(created) registry = mock_registry( hass, { @@ -520,6 +553,9 @@ async def test_update_entity( assert state.name == "before update" assert state.attributes[ATTR_ICON] == "icon:before update" + modified = datetime.fromisoformat("2024-07-17T13:30:00.900075+00:00") + freezer.move_to(modified) + # Update area, categories, device_class, hidden_by, icon, labels & name await client.send_json_auto_id( { @@ -544,6 +580,7 @@ async def test_update_entity( "area_id": "mock-area-id", "capabilities": None, "categories": {"scope1": "id", "scope2": "id"}, + "created_at": created.timestamp(), "config_entry_id": None, "device_class": "custom_device_class", "device_id": None, @@ -555,6 +592,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {}, "original_device_class": None, @@ -570,6 +608,9 @@ async def test_update_entity( assert state.name == "after update" assert state.attributes[ATTR_ICON] == "icon:after update" + modified = datetime.fromisoformat("2024-07-20T00:00:00.900075+00:00") + freezer.move_to(modified) + # Update hidden_by to illegal value await client.send_json_auto_id( { @@ -597,9 +638,13 @@ async def test_update_entity( assert msg["success"] assert hass.states.get("test_domain.world") is None - assert ( - registry.entities["test_domain.world"].disabled_by is RegistryEntryDisabler.USER - ) + entry = registry.entities["test_domain.world"] + assert entry.disabled_by is RegistryEntryDisabler.USER + assert entry.created_at == created + assert entry.modified_at == modified + + modified = datetime.fromisoformat("2024-07-21T00:00:00.900075+00:00") + freezer.move_to(modified) # Update disabled_by to None await client.send_json_auto_id( @@ -619,6 +664,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -629,6 +675,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {}, "original_device_class": None, @@ -641,6 +688,9 @@ async def test_update_entity( "require_restart": True, } + modified = datetime.fromisoformat("2024-07-22T00:00:00.900075+00:00") + freezer.move_to(modified) + # Update entity option await client.send_json_auto_id( { @@ -660,6 +710,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -670,6 +721,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -681,6 +733,9 @@ async def test_update_entity( }, } + modified = datetime.fromisoformat("2024-07-23T00:00:00.900075+00:00") + freezer.move_to(modified) + # Add a category to the entity await client.send_json_auto_id( { @@ -700,6 +755,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id", "scope3": "id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -710,6 +766,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -721,6 +778,9 @@ async def test_update_entity( }, } + modified = datetime.fromisoformat("2024-07-24T00:00:00.900075+00:00") + freezer.move_to(modified) + # Move the entity to a different category await client.send_json_auto_id( { @@ -740,6 +800,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id", "scope3": "other_id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -750,6 +811,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -761,6 +823,9 @@ async def test_update_entity( }, } + modified = datetime.fromisoformat("2024-07-23T10:00:00.900075+00:00") + freezer.move_to(modified) + # Move the entity to a different category await client.send_json_auto_id( { @@ -780,6 +845,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope3": "other_id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -790,6 +856,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -803,9 +870,11 @@ async def test_update_entity( async def test_update_entity_require_restart( - hass: HomeAssistant, client: MockHAClientWebSocket + hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory ) -> None: """Test updating entity.""" + created = datetime.fromisoformat("2024-02-14T12:00:00+00:00") + freezer.move_to(created) entity_id = "test_domain.test_platform_1234" config_entry = MockConfigEntry(domain="test_platform") config_entry.add_to_hass(hass) @@ -817,6 +886,9 @@ async def test_update_entity_require_restart( state = hass.states.get(entity_id) assert state is not None + modified = datetime.fromisoformat("2024-07-20T13:30:00+00:00") + freezer.move_to(modified) + # UPDATE DISABLED_BY TO NONE await client.send_json_auto_id( { @@ -835,6 +907,7 @@ async def test_update_entity_require_restart( "capabilities": None, "categories": {}, "config_entry_id": config_entry.entry_id, + "created_at": created.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -845,6 +918,7 @@ async def test_update_entity_require_restart( "icon": None, "id": ANY, "labels": [], + "modified_at": created.timestamp(), "name": None, "options": {}, "original_device_class": None, @@ -909,9 +983,11 @@ async def test_enable_entity_disabled_device( async def test_update_entity_no_changes( - hass: HomeAssistant, client: MockHAClientWebSocket + hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory ) -> None: """Test update entity with no changes.""" + created = datetime.fromisoformat("2024-02-14T12:00:00.900075+00:00") + freezer.move_to(created) mock_registry( hass, { @@ -932,6 +1008,9 @@ async def test_update_entity_no_changes( assert state is not None assert state.name == "name of entity" + modified = datetime.fromisoformat("2024-07-20T13:30:00.900075+00:00") + freezer.move_to(modified) + await client.send_json_auto_id( { "type": "config/entity_registry/update", @@ -949,6 +1028,7 @@ async def test_update_entity_no_changes( "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -959,6 +1039,7 @@ async def test_update_entity_no_changes( "icon": None, "id": ANY, "labels": [], + "modified_at": created.timestamp(), "name": "name of entity", "options": {}, "original_device_class": None, @@ -1002,9 +1083,11 @@ async def test_update_nonexisting_entity(client: MockHAClientWebSocket) -> None: async def test_update_entity_id( - hass: HomeAssistant, client: MockHAClientWebSocket + hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory ) -> None: """Test update entity id.""" + created = datetime.fromisoformat("2024-02-14T12:00:00.900075+00:00") + freezer.move_to(created) mock_registry( hass, { @@ -1022,6 +1105,9 @@ async def test_update_entity_id( assert hass.states.get("test_domain.world") is not None + modified = datetime.fromisoformat("2024-07-20T13:30:00.900075+00:00") + freezer.move_to(modified) + await client.send_json_auto_id( { "type": "config/entity_registry/update", @@ -1039,6 +1125,7 @@ async def test_update_entity_id( "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -1049,6 +1136,7 @@ async def test_update_entity_id( "icon": None, "id": ANY, "labels": [], + "modified_at": modified.timestamp(), "name": None, "options": {}, "original_device_class": None, diff --git a/tests/components/config/test_floor_registry.py b/tests/components/config/test_floor_registry.py index b4e3907bc4d..da6e550b1f6 100644 --- a/tests/components/config/test_floor_registry.py +++ b/tests/components/config/test_floor_registry.py @@ -1,11 +1,15 @@ """Test floor registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered from homeassistant.components.config import floor_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import floor_registry as fr +from homeassistant.util.dt import utcnow from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -22,9 +26,15 @@ async def client_fixture( async def test_list_floors( client: MockHAClientWebSocket, floor_registry: fr.FloorRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test list entries.""" + created_1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_1) floor_registry.async_create("First floor") + + created_2 = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(created_2) floor_registry.async_create( name="Second floor", aliases={"top floor", "attic"}, @@ -34,6 +44,12 @@ async def test_list_floors( assert len(floor_registry.floors) == 2 + # update first floor to change modified_at + floor_registry.async_update( + "first_floor", + name="First floor...", + ) + await client.send_json_auto_id({"type": "config/floor_registry/list"}) msg = await client.receive_json() @@ -41,20 +57,25 @@ async def test_list_floors( assert len(msg["result"]) == len(floor_registry.floors) assert msg["result"][0] == { "aliases": [], + "created_at": created_1.timestamp(), "icon": None, "floor_id": "first_floor", - "name": "First floor", + "modified_at": created_2.timestamp(), + "name": "First floor...", "level": None, } assert msg["result"][1] == { "aliases": unordered(["top floor", "attic"]), + "created_at": created_2.timestamp(), "icon": "mdi:home-floor-2", "floor_id": "second_floor", + "modified_at": created_2.timestamp(), "name": "Second floor", "level": 2, } +@pytest.mark.usefixtures("freezer") async def test_create_floor( client: MockHAClientWebSocket, floor_registry: fr.FloorRegistry, @@ -69,8 +90,10 @@ async def test_create_floor( assert len(floor_registry.floors) == 1 assert msg["result"] == { "aliases": [], + "created_at": utcnow().timestamp(), "icon": None, "floor_id": "first_floor", + "modified_at": utcnow().timestamp(), "name": "First floor", "level": None, } @@ -90,8 +113,10 @@ async def test_create_floor( assert len(floor_registry.floors) == 2 assert msg["result"] == { "aliases": unordered(["top floor", "attic"]), + "created_at": utcnow().timestamp(), "icon": "mdi:home-floor-2", "floor_id": "second_floor", + "modified_at": utcnow().timestamp(), "name": "Second floor", "level": 2, } @@ -163,10 +188,15 @@ async def test_delete_non_existing_floor( async def test_update_floor( client: MockHAClientWebSocket, floor_registry: fr.FloorRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) floor = floor_registry.async_create("First floor") assert len(floor_registry.floors) == 1 + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) await client.send_json_auto_id( { @@ -184,12 +214,16 @@ async def test_update_floor( assert len(floor_registry.floors) == 1 assert msg["result"] == { "aliases": unordered(["top floor", "attic"]), + "created_at": created_at.timestamp(), "icon": "mdi:home-floor-2", "floor_id": floor.floor_id, + "modified_at": modified_at.timestamp(), "name": "Second floor", "level": 2, } + modified_at = datetime.fromisoformat("2024-07-16T13:50:00.900075+00:00") + freezer.move_to(modified_at) await client.send_json_auto_id( { "floor_id": floor.floor_id, @@ -206,8 +240,10 @@ async def test_update_floor( assert len(floor_registry.floors) == 1 assert msg["result"] == { "aliases": [], + "created_at": created_at.timestamp(), "icon": None, "floor_id": floor.floor_id, + "modified_at": modified_at.timestamp(), "name": "First floor", "level": None, } diff --git a/tests/components/config/test_label_registry.py b/tests/components/config/test_label_registry.py index 040b3bfe28a..3eff759132f 100644 --- a/tests/components/config/test_label_registry.py +++ b/tests/components/config/test_label_registry.py @@ -1,5 +1,8 @@ """Test label registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.config import label_registry @@ -21,9 +24,15 @@ async def client_fixture( async def test_list_labels( client: MockHAClientWebSocket, label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test list entries.""" + created_1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_1) label_registry.async_create("mock 1") + + created_2 = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(created_2) label_registry.async_create( name="mock 2", color="#00FF00", @@ -33,6 +42,12 @@ async def test_list_labels( assert len(label_registry.labels) == 2 + # update mock 1 to change modified_at + label_registry.async_update( + "mock_1", + name="Mock 1...", + ) + await client.send_json_auto_id({"type": "config/label_registry/list"}) msg = await client.receive_json() @@ -40,16 +55,20 @@ async def test_list_labels( assert len(msg["result"]) == len(label_registry.labels) assert msg["result"][0] == { "color": None, + "created_at": created_1.timestamp(), "description": None, "icon": None, "label_id": "mock_1", - "name": "mock 1", + "modified_at": created_2.timestamp(), + "name": "Mock 1...", } assert msg["result"][1] == { "color": "#00FF00", + "created_at": created_2.timestamp(), "description": "This is the second label", "icon": "mdi:two", "label_id": "mock_2", + "modified_at": created_2.timestamp(), "name": "mock 2", } @@ -57,8 +76,11 @@ async def test_list_labels( async def test_create_label( client: MockHAClientWebSocket, label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test create entry.""" + created_1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_1) await client.send_json_auto_id( { "name": "MOCK", @@ -71,12 +93,16 @@ async def test_create_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": None, + "created_at": created_1.timestamp(), "description": None, "icon": None, "label_id": "mock", "name": "MOCK", + "modified_at": created_1.timestamp(), } + created_2 = datetime.fromisoformat("2024-07-17T13:30:00.900075+00:00") + freezer.move_to(created_2) await client.send_json_auto_id( { "id": 2, @@ -93,12 +119,16 @@ async def test_create_label( assert len(label_registry.labels) == 2 assert msg["result"] == { "color": "#00FF00", + "created_at": created_2.timestamp(), "description": "This is the second label", "icon": "mdi:two", "label_id": "mockery", + "modified_at": created_2.timestamp(), "name": "MOCKERY", } + created_3 = datetime.fromisoformat("2024-07-18T13:30:00.900075+00:00") + freezer.move_to(created_3) await client.send_json_auto_id( { "name": "MAGIC", @@ -114,9 +144,11 @@ async def test_create_label( assert len(label_registry.labels) == 3 assert msg["result"] == { "color": "indigo", + "created_at": created_3.timestamp(), "description": "This is the third label", "icon": "mdi:three", "label_id": "magic", + "modified_at": created_3.timestamp(), "name": "MAGIC", } @@ -182,11 +214,17 @@ async def test_delete_non_existing_label( async def test_update_label( client: MockHAClientWebSocket, label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) label = label_registry.async_create("mock") assert len(label_registry.labels) == 1 + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "label_id": label.label_id, @@ -203,12 +241,17 @@ async def test_update_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": "#00FF00", + "created_at": created_at.timestamp(), "description": "This is a label description", "icon": "mdi:test", "label_id": "mock", + "modified_at": modified_at.timestamp(), "name": "UPDATED", } + modified_at = datetime.fromisoformat("2024-07-16T13:50:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "label_id": label.label_id, @@ -225,12 +268,17 @@ async def test_update_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": None, + "created_at": created_at.timestamp(), "description": None, "icon": None, "label_id": "mock", + "modified_at": modified_at.timestamp(), "name": "UPDATED AGAIN", } + modified_at = datetime.fromisoformat("2024-07-16T13:55:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "label_id": label.label_id, @@ -247,9 +295,11 @@ async def test_update_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": "primary", + "created_at": created_at.timestamp(), "description": None, "icon": None, "label_id": "mock", + "modified_at": modified_at.timestamp(), "name": "UPDATED YET AGAIN", } diff --git a/tests/components/config/test_scene.py b/tests/components/config/test_scene.py index 6ca42e7f56d..c4c207f33f9 100644 --- a/tests/components/config/test_scene.py +++ b/tests/components/config/test_scene.py @@ -2,32 +2,33 @@ from http import HTTPStatus import json +from typing import Any from unittest.mock import ANY, patch import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import scene from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from tests.typing import ClientSessionGenerator @pytest.fixture -async def setup_scene(hass, scene_config): +async def setup_scene(hass: HomeAssistant, scene_config: dict[str, Any]) -> None: """Set up scene integration.""" assert await async_setup_component(hass, "scene", {"scene": scene_config}) await hass.async_block_till_done() @pytest.mark.parametrize("scene_config", [{}]) +@pytest.mark.usefixtures("setup_scene") async def test_create_scene( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - setup_scene, + hass_config_store: dict[str, Any], ) -> None: """Test creating a scene.""" with patch.object(config, "SECTIONS", [scene]): @@ -70,11 +71,11 @@ async def test_create_scene( @pytest.mark.parametrize("scene_config", [{}]) +@pytest.mark.usefixtures("setup_scene") async def test_update_scene( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - setup_scene, + hass_config_store: dict[str, Any], ) -> None: """Test updating a scene.""" with patch.object(config, "SECTIONS", [scene]): @@ -118,11 +119,11 @@ async def test_update_scene( @pytest.mark.parametrize("scene_config", [{}]) +@pytest.mark.usefixtures("setup_scene") async def test_bad_formatted_scene( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - setup_scene, + hass_config_store: dict[str, Any], ) -> None: """Test that we handle scene without ID.""" with patch.object(config, "SECTIONS", [scene]): @@ -184,12 +185,12 @@ async def test_bad_formatted_scene( ], ], ) +@pytest.mark.usefixtures("setup_scene") async def test_delete_scene( hass: HomeAssistant, hass_client: ClientSessionGenerator, entity_registry: er.EntityRegistry, - hass_config_store, - setup_scene, + hass_config_store: dict[str, Any], ) -> None: """Test deleting a scene.""" @@ -227,12 +228,12 @@ async def test_delete_scene( @pytest.mark.parametrize("scene_config", [{}]) +@pytest.mark.usefixtures("setup_scene") async def test_api_calls_require_admin( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_read_only_access_token: str, - hass_config_store, - setup_scene, + hass_config_store: dict[str, Any], ) -> None: """Test scene APIs endpoints do not work as a normal user.""" with patch.object(config, "SECTIONS", [scene]): diff --git a/tests/components/config/test_script.py b/tests/components/config/test_script.py index 3ee45aec26a..88245eb567f 100644 --- a/tests/components/config/test_script.py +++ b/tests/components/config/test_script.py @@ -7,12 +7,12 @@ from unittest.mock import patch import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import script from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from homeassistant.util import yaml from tests.typing import ClientSessionGenerator @@ -31,7 +31,9 @@ async def setup_script(hass: HomeAssistant, script_config: dict[str, Any]) -> No @pytest.mark.parametrize("script_config", [{}]) async def test_get_script_config( - hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_config_store + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_config_store: dict[str, Any], ) -> None: """Test getting script config.""" with patch.object(config, "SECTIONS", [script]): @@ -54,7 +56,9 @@ async def test_get_script_config( @pytest.mark.parametrize("script_config", [{}]) async def test_update_script_config( - hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_config_store + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_config_store: dict[str, Any], ) -> None: """Test updating script config.""" with patch.object(config, "SECTIONS", [script]): @@ -90,7 +94,9 @@ async def test_update_script_config( @pytest.mark.parametrize("script_config", [{}]) async def test_invalid_object_id( - hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_config_store + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_config_store: dict[str, Any], ) -> None: """Test creating a script with an invalid object_id.""" with patch.object(config, "SECTIONS", [script]): @@ -152,7 +158,7 @@ async def test_invalid_object_id( async def test_update_script_config_with_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, + hass_config_store: dict[str, Any], caplog: pytest.LogCaptureFixture, updated_config: Any, validation_error: str, @@ -202,8 +208,7 @@ async def test_update_script_config_with_error( async def test_update_script_config_with_blueprint_substitution_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, - hass_config_store, - # setup_automation, + hass_config_store: dict[str, Any], caplog: pytest.LogCaptureFixture, updated_config: Any, validation_error: str, @@ -239,7 +244,9 @@ async def test_update_script_config_with_blueprint_substitution_error( @pytest.mark.parametrize("script_config", [{}]) async def test_update_remove_key_script_config( - hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_config_store + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_config_store: dict[str, Any], ) -> None: """Test updating script config while removing a key.""" with patch.object(config, "SECTIONS", [script]): @@ -286,7 +293,7 @@ async def test_delete_script( hass: HomeAssistant, hass_client: ClientSessionGenerator, entity_registry: er.EntityRegistry, - hass_config_store, + hass_config_store: dict[str, Any], ) -> None: """Test deleting a script.""" with patch.object(config, "SECTIONS", [script]): @@ -325,7 +332,7 @@ async def test_api_calls_require_admin( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_read_only_access_token: str, - hass_config_store, + hass_config_store: dict[str, Any], ) -> None: """Test script APIs endpoints do not work as a normal user.""" with patch.object(config, "SECTIONS", [script]): diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 42746525a0d..7d15bde88c0 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Generator +from importlib.util import find_spec from pathlib import Path from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant @@ -21,9 +21,9 @@ if TYPE_CHECKING: from .switch.common import MockSwitch -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="session", autouse=find_spec("zeroconf") is not None) def patch_zeroconf_multiple_catcher() -> Generator[None]: - """Patch zeroconf wrapper that detects if multiple instances are used.""" + """If installed, patch zeroconf wrapper that detects if multiple instances are used.""" with patch( "homeassistant.components.zeroconf.install_multiple_zeroconf_catcher", side_effect=lambda zc: None, @@ -124,9 +124,9 @@ def mock_conversation_agent_fixture(hass: HomeAssistant) -> MockAgent: return mock_conversation_agent_fixture_helper(hass) -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="session", autouse=find_spec("ffmpeg") is not None) def prevent_ffmpeg_subprocess() -> Generator[None]: - """Prevent ffmpeg from creating a subprocess.""" + """If installed, prevent ffmpeg from creating a subprocess.""" with patch( "homeassistant.components.ffmpeg.FFVersion.get_version", return_value="6.0" ): diff --git a/tests/components/conversation/__init__.py b/tests/components/conversation/__init__.py index fb9bcab7498..1ae3372968e 100644 --- a/tests/components/conversation/__init__.py +++ b/tests/components/conversation/__init__.py @@ -11,7 +11,6 @@ from homeassistant.components.conversation.models import ( ) from homeassistant.components.homeassistant.exposed_entities import ( DATA_EXPOSED_ENTITIES, - ExposedEntities, async_expose_entity, ) from homeassistant.core import HomeAssistant @@ -45,12 +44,12 @@ class MockAgent(conversation.AbstractConversationAgent): ) -def expose_new(hass: HomeAssistant, expose_new: bool): +def expose_new(hass: HomeAssistant, expose_new: bool) -> None: """Enable exposing new entities to the default agent.""" - exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES] + exposed_entities = hass.data[DATA_EXPOSED_ENTITIES] exposed_entities.async_set_expose_new_entities(conversation.DOMAIN, expose_new) -def expose_entity(hass: HomeAssistant, entity_id: str, should_expose: bool): +def expose_entity(hass: HomeAssistant, entity_id: str, should_expose: bool) -> None: """Expose an entity to the default agent.""" async_expose_entity(hass, conversation.DOMAIN, entity_id, should_expose) diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr new file mode 100644 index 00000000000..051613f0300 --- /dev/null +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -0,0 +1,496 @@ +# serializer version: 1 +# name: test_custom_sentences + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en-us', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'You ordered a stout', + }), + }), + }), + }) +# --- +# name: test_custom_sentences.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en-us', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'You ordered a lager', + }), + }), + }), + }) +# --- +# name: test_custom_sentences_config + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Stealth mode engaged', + }), + }), + }), + }) +# --- +# name: test_intent_alias_added_removed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_alias_added_removed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_alias_added_removed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called late added alias', + }), + }), + }), + }) +# --- +# name: test_intent_conversion_not_expose_new + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_intent_conversion_not_expose_new.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_added_removed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_added_removed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.late', + 'name': 'friendly light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_added_removed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.late', + 'name': 'friendly light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_added_removed.3 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called late added light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_exposed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_fail_if_unexposed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_remove_custom_name + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_remove_custom_name.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_remove_custom_name.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called renamed light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_renamed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_renamed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'renamed light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr new file mode 100644 index 00000000000..fd02646df48 --- /dev/null +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -0,0 +1,711 @@ +# serializer version: 1 +# name: test_get_agent_list + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'af', + 'ar', + 'bg', + 'bn', + 'ca', + 'cs', + 'da', + 'de', + 'de-CH', + 'el', + 'en', + 'es', + 'et', + 'eu', + 'fa', + 'fi', + 'fr', + 'fr-CA', + 'gl', + 'gu', + 'he', + 'hi', + 'hr', + 'hu', + 'id', + 'is', + 'it', + 'ka', + 'kn', + 'ko', + 'lb', + 'lt', + 'lv', + 'ml', + 'mn', + 'ms', + 'nb', + 'nl', + 'pl', + 'pt', + 'pt-br', + 'ro', + 'ru', + 'sk', + 'sl', + 'sr', + 'sv', + 'sw', + 'te', + 'tr', + 'uk', + 'ur', + 'vi', + 'zh-cn', + 'zh-hk', + 'zh-tw', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + 'smurfish', + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.1 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + 'smurfish', + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.2 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'en', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.3 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'en', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.4 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'de', + 'de-CH', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.5 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'de-CH', + 'de', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_http_api_handle_failure + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'failed_to_handle', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'An unexpected error occurred', + }), + }), + }), + }) +# --- +# name: test_http_api_no_match + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_http_api_unexpected_failure + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'unknown', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'An unexpected error occurred', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent[None] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent[conversation.home_assistant] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent[homeassistant] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_ws_api[payload0] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload1] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'test-language', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload2] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload3] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload4] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'test-language', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload5] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_hass_agent_debug + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'name': dict({ + 'name': 'name', + 'text': 'my cool light', + 'value': 'my cool light', + }), + }), + 'intent': dict({ + 'name': 'HassTurnOn', + }), + 'match': True, + 'sentence_template': ' on ( | [in ])', + 'slots': dict({ + 'name': 'my cool light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + dict({ + 'details': dict({ + 'name': dict({ + 'name': 'name', + 'text': 'my cool light', + 'value': 'my cool light', + }), + }), + 'intent': dict({ + 'name': 'HassTurnOff', + }), + 'match': True, + 'sentence_template': '[] ( | [in ]) [to] off', + 'slots': dict({ + 'name': 'my cool light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + dict({ + 'details': dict({ + 'area': dict({ + 'name': 'area', + 'text': 'kitchen', + 'value': 'kitchen', + }), + 'domain': dict({ + 'name': 'domain', + 'text': '', + 'value': 'light', + }), + }), + 'intent': dict({ + 'name': 'HassTurnOn', + }), + 'match': True, + 'sentence_template': ' on [all] in ', + 'slots': dict({ + 'area': 'kitchen', + 'domain': 'light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + dict({ + 'details': dict({ + 'area': dict({ + 'name': 'area', + 'text': 'kitchen', + 'value': 'kitchen', + }), + 'domain': dict({ + 'name': 'domain', + 'text': 'lights', + 'value': 'light', + }), + 'state': dict({ + 'name': 'state', + 'text': 'on', + 'value': 'on', + }), + }), + 'intent': dict({ + 'name': 'HassGetState', + }), + 'match': True, + 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} [in ]', + 'slots': dict({ + 'area': 'kitchen', + 'domain': 'lights', + 'state': 'on', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': False, + }), + }), + 'unmatched_slots': dict({ + }), + }), + None, + ]), + }) +# --- +# name: test_ws_hass_agent_debug_custom_sentence + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'beer_style': dict({ + 'name': 'beer_style', + 'text': 'lager', + 'value': 'lager', + }), + }), + 'file': 'en/beer.yaml', + 'intent': dict({ + 'name': 'OrderBeer', + }), + 'match': True, + 'sentence_template': "I'd like to order a {beer_style} [please]", + 'slots': dict({ + 'beer_style': 'lager', + }), + 'source': 'custom', + 'targets': dict({ + }), + 'unmatched_slots': dict({ + }), + }), + ]), + }) +# --- +# name: test_ws_hass_agent_debug_null_result + dict({ + 'results': list([ + None, + ]), + }) +# --- +# name: test_ws_hass_agent_debug_out_of_range + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'brightness': dict({ + 'name': 'brightness', + 'text': '100%', + 'value': 100, + }), + 'name': dict({ + 'name': 'name', + 'text': 'test light', + 'value': 'test light', + }), + }), + 'intent': dict({ + 'name': 'HassLightSet', + }), + 'match': True, + 'sentence_template': '[] brightness [to] ', + 'slots': dict({ + 'brightness': '100%', + 'name': 'test light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.demo_1234': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + ]), + }) +# --- +# name: test_ws_hass_agent_debug_out_of_range.1 + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'name': dict({ + 'name': 'name', + 'text': 'test light', + 'value': 'test light', + }), + }), + 'intent': dict({ + 'name': 'HassLightSet', + }), + 'match': False, + 'sentence_template': '[] brightness [to] ', + 'slots': dict({ + 'name': 'test light', + }), + 'source': 'builtin', + 'targets': dict({ + }), + 'unmatched_slots': dict({ + 'brightness': 1001, + }), + }), + ]), + }) +# --- +# name: test_ws_hass_agent_debug_sentence_trigger + dict({ + 'results': list([ + dict({ + 'match': True, + 'sentence_template': 'hello[ world]', + 'source': 'trigger', + }), + ]), + }) +# --- diff --git a/tests/components/conversation/snapshots/test_init.ambr b/tests/components/conversation/snapshots/test_init.ambr index 403c72aaa10..0327be064d4 100644 --- a/tests/components/conversation/snapshots/test_init.ambr +++ b/tests/components/conversation/snapshots/test_init.ambr @@ -24,81 +24,6 @@ }), }) # --- -# name: test_custom_sentences - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - ]), - 'targets': list([ - ]), - }), - 'language': 'en-us', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'You ordered a stout', - }), - }), - }), - }) -# --- -# name: test_custom_sentences.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - ]), - 'targets': list([ - ]), - }), - 'language': 'en-us', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'You ordered a lager', - }), - }), - }), - }) -# --- -# name: test_custom_sentences_config - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Stealth mode engaged', - }), - }), - }), - }) -# --- # name: test_get_agent_info dict({ 'id': 'conversation.home_assistant', @@ -117,918 +42,6 @@ 'name': 'Home Assistant', }) # --- -# name: test_get_agent_list - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'af', - 'ar', - 'bg', - 'bn', - 'ca', - 'cs', - 'da', - 'de', - 'de-CH', - 'el', - 'en', - 'es', - 'et', - 'eu', - 'fa', - 'fi', - 'fr', - 'fr-CA', - 'gl', - 'gu', - 'he', - 'hi', - 'hr', - 'hu', - 'id', - 'is', - 'it', - 'ka', - 'kn', - 'ko', - 'lb', - 'lt', - 'lv', - 'ml', - 'mn', - 'ms', - 'nb', - 'nl', - 'pl', - 'pt', - 'pt-br', - 'ro', - 'ru', - 'sk', - 'sl', - 'sr', - 'sv', - 'sw', - 'te', - 'tr', - 'uk', - 'ur', - 'vi', - 'zh-cn', - 'zh-hk', - 'zh-tw', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - 'smurfish', - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.1 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - 'smurfish', - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.2 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'en', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.3 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'en', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.4 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'de', - 'de-CH', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.5 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'de-CH', - 'de', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_http_api_handle_failure - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'failed_to_handle', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'An unexpected error occurred', - }), - }), - }), - }) -# --- -# name: test_http_api_no_match - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_http_api_unexpected_failure - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'unknown', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'An unexpected error occurred', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent[None] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent[conversation.home_assistant] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent[homeassistant] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_alias_added_removed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_alias_added_removed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_alias_added_removed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added alias', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_conversion_not_expose_new - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_conversion_not_expose_new.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_added_removed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_added_removed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.late', - 'name': 'friendly light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_added_removed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.late', - 'name': 'friendly light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_added_removed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called my cool', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.4 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.5 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'renamed light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed.4 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called renamed', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_target_ha_agent - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- # name: test_turn_on_intent[None-turn kitchen on-None] dict({ 'conversation_id': None, @@ -1389,361 +402,3 @@ }), }) # --- -# name: test_ws_api[payload0] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload1] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'test-language', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload2] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload3] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload4] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'test-language', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload5] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_hass_agent_debug - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'name': dict({ - 'name': 'name', - 'text': 'my cool light', - 'value': 'my cool light', - }), - }), - 'intent': dict({ - 'name': 'HassTurnOn', - }), - 'match': True, - 'sentence_template': ' on ( | [in ])', - 'slots': dict({ - 'name': 'my cool light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - dict({ - 'details': dict({ - 'name': dict({ - 'name': 'name', - 'text': 'my cool light', - 'value': 'my cool light', - }), - }), - 'intent': dict({ - 'name': 'HassTurnOff', - }), - 'match': True, - 'sentence_template': '[] ( | [in ]) [to] off', - 'slots': dict({ - 'name': 'my cool light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - dict({ - 'details': dict({ - 'area': dict({ - 'name': 'area', - 'text': 'kitchen', - 'value': 'kitchen', - }), - 'domain': dict({ - 'name': 'domain', - 'text': '', - 'value': 'light', - }), - }), - 'intent': dict({ - 'name': 'HassTurnOn', - }), - 'match': True, - 'sentence_template': ' on [all] in ', - 'slots': dict({ - 'area': 'kitchen', - 'domain': 'light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - dict({ - 'details': dict({ - 'area': dict({ - 'name': 'area', - 'text': 'kitchen', - 'value': 'kitchen', - }), - 'domain': dict({ - 'name': 'domain', - 'text': 'lights', - 'value': 'light', - }), - 'state': dict({ - 'name': 'state', - 'text': 'on', - 'value': 'on', - }), - }), - 'intent': dict({ - 'name': 'HassGetState', - }), - 'match': True, - 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} [in ]', - 'slots': dict({ - 'area': 'kitchen', - 'domain': 'lights', - 'state': 'on', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': False, - }), - }), - 'unmatched_slots': dict({ - }), - }), - None, - ]), - }) -# --- -# name: test_ws_hass_agent_debug_custom_sentence - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'beer_style': dict({ - 'name': 'beer_style', - 'text': 'lager', - 'value': 'lager', - }), - }), - 'file': 'en/beer.yaml', - 'intent': dict({ - 'name': 'OrderBeer', - }), - 'match': True, - 'sentence_template': "I'd like to order a {beer_style} [please]", - 'slots': dict({ - 'beer_style': 'lager', - }), - 'source': 'custom', - 'targets': dict({ - }), - 'unmatched_slots': dict({ - }), - }), - ]), - }) -# --- -# name: test_ws_hass_agent_debug_null_result - dict({ - 'results': list([ - None, - ]), - }) -# --- -# name: test_ws_hass_agent_debug_out_of_range - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'brightness': dict({ - 'name': 'brightness', - 'text': '100%', - 'value': 100, - }), - 'name': dict({ - 'name': 'name', - 'text': 'test light', - 'value': 'test light', - }), - }), - 'intent': dict({ - 'name': 'HassLightSet', - }), - 'match': True, - 'sentence_template': '[] brightness [to] ', - 'slots': dict({ - 'brightness': '100%', - 'name': 'test light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.demo_1234': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - ]), - }) -# --- -# name: test_ws_hass_agent_debug_out_of_range.1 - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'name': dict({ - 'name': 'name', - 'text': 'test light', - 'value': 'test light', - }), - }), - 'intent': dict({ - 'name': 'HassLightSet', - }), - 'match': False, - 'sentence_template': '[] brightness [to] ', - 'slots': dict({ - 'name': 'test light', - }), - 'source': 'builtin', - 'targets': dict({ - }), - 'unmatched_slots': dict({ - 'brightness': 1001, - }), - }), - ]), - }) -# --- -# name: test_ws_hass_agent_debug_sentence_trigger - dict({ - 'results': list([ - dict({ - 'match': True, - 'sentence_template': 'hello[ world]', - 'source': 'trigger', - }), - ]), - }) -# --- diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 511967e3a9c..315b73bacfd 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -1,13 +1,20 @@ """Test for the default agent.""" from collections import defaultdict +import os +import tempfile +from typing import Any from unittest.mock import AsyncMock, patch from hassil.recognize import Intent, IntentData, MatchEntity, RecognizeResult import pytest +from syrupy import SnapshotAssertion +import yaml from homeassistant.components import conversation, cover, media_player from homeassistant.components.conversation import default_agent +from homeassistant.components.conversation.models import ConversationInput +from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.components.homeassistant.exposed_entities import ( async_get_assistant_settings, ) @@ -16,25 +23,56 @@ from homeassistant.components.intent import ( TimerInfo, async_register_timer_handler, ) -from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, STATE_CLOSED -from homeassistant.core import DOMAIN as HASS_DOMAIN, Context, HomeAssistant, callback +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_FRIENDLY_NAME, + STATE_CLOSED, + STATE_ON, + STATE_UNKNOWN, + EntityCategory, +) +from homeassistant.core import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + Context, + HomeAssistant, + callback, +) from homeassistant.helpers import ( area_registry as ar, device_registry as dr, - entity, entity_registry as er, floor_registry as fr, intent, ) from homeassistant.setup import async_setup_component -from . import expose_entity +from . import expose_entity, expose_new -from tests.common import MockConfigEntry, async_mock_service +from tests.common import ( + MockConfigEntry, + MockUser, + async_mock_service, + setup_test_component_platform, +) +from tests.components.light.common import MockLight + + +class OrderBeerIntentHandler(intent.IntentHandler): + """Handle OrderBeer intent.""" + + intent_type = "OrderBeer" + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Return speech response.""" + beer_style = intent_obj.slots["beer_style"]["value"] + response = intent_obj.create_response() + response.async_set_speech(f"You ordered a {beer_style}") + return response @pytest.fixture -async def init_components(hass): +async def init_components(hass: HomeAssistant) -> None: """Initialize relevant components with empty configs.""" assert await async_setup_component(hass, "homeassistant", {}) assert await async_setup_component(hass, "conversation", {}) @@ -46,12 +84,13 @@ async def init_components(hass): [ {"hidden_by": er.RegistryEntryHider.USER}, {"hidden_by": er.RegistryEntryHider.INTEGRATION}, - {"entity_category": entity.EntityCategory.CONFIG}, - {"entity_category": entity.EntityCategory.DIAGNOSTIC}, + {"entity_category": EntityCategory.CONFIG}, + {"entity_category": EntityCategory.DIAGNOSTIC}, ], ) +@pytest.mark.usefixtures("init_components") async def test_hidden_entities_skipped( - hass: HomeAssistant, init_components, er_kwargs, entity_registry: er.EntityRegistry + hass: HomeAssistant, er_kwargs: dict[str, Any], entity_registry: er.EntityRegistry ) -> None: """Test we skip hidden entities.""" @@ -59,7 +98,7 @@ async def test_hidden_entities_skipped( "light", "demo", "1234", suggested_object_id="Test light", **er_kwargs ) hass.states.async_set("light.test_light", "off") - calls = async_mock_service(hass, HASS_DOMAIN, "turn_on") + calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, "turn_on") result = await conversation.async_converse( hass, "turn on test light", None, Context(), None ) @@ -69,7 +108,8 @@ async def test_hidden_entities_skipped( assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS -async def test_exposed_domains(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_exposed_domains(hass: HomeAssistant) -> None: """Test that we can't interact with entities that aren't exposed.""" hass.states.async_set( "lock.front_door", "off", attributes={ATTR_FRIENDLY_NAME: "Front Door"} @@ -93,9 +133,9 @@ async def test_exposed_domains(hass: HomeAssistant, init_components) -> None: assert result.response.error_code == intent.IntentResponseErrorCode.NO_VALID_TARGETS +@pytest.mark.usefixtures("init_components") async def test_exposed_areas( hass: HomeAssistant, - init_components, area_registry: ar.AreaRegistry, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, @@ -160,10 +200,8 @@ async def test_exposed_areas( assert result.response.response_type == intent.IntentResponseType.QUERY_ANSWER -async def test_conversation_agent( - hass: HomeAssistant, - init_components, -) -> None: +@pytest.mark.usefixtures("init_components") +async def test_conversation_agent(hass: HomeAssistant) -> None: """Test DefaultAgent.""" agent = default_agent.async_get_default_agent(hass) with patch( @@ -172,6 +210,14 @@ async def test_conversation_agent( ): assert agent.supported_languages == ["dwarvish", "elvish", "entish"] + state = hass.states.get(agent.entity_id) + assert state + assert state.state == STATE_UNKNOWN + assert ( + state.attributes["supported_features"] + == conversation.ConversationEntityFeature.CONTROL + ) + async def test_expose_flag_automatically_set( hass: HomeAssistant, @@ -209,9 +255,9 @@ async def test_expose_flag_automatically_set( } +@pytest.mark.usefixtures("init_components") async def test_unexposed_entities_skipped( hass: HomeAssistant, - init_components, area_registry: ar.AreaRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -262,7 +308,8 @@ async def test_unexposed_entities_skipped( assert result.response.matched_states[0].entity_id == exposed_light.entity_id -async def test_trigger_sentences(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_trigger_sentences(hass: HomeAssistant) -> None: """Test registering/unregistering/matching a few trigger sentences.""" trigger_sentences = ["It's party time", "It is time to party"] trigger_response = "Cowabunga!" @@ -303,9 +350,8 @@ async def test_trigger_sentences(hass: HomeAssistant, init_components) -> None: assert len(callback.mock_calls) == 0 -async def test_shopping_list_add_item( - hass: HomeAssistant, init_components, sl_setup -) -> None: +@pytest.mark.usefixtures("init_components", "sl_setup") +async def test_shopping_list_add_item(hass: HomeAssistant) -> None: """Test adding an item to the shopping list through the default agent.""" result = await conversation.async_converse( hass, "add apples to my shopping list", None, Context() @@ -316,7 +362,8 @@ async def test_shopping_list_add_item( } -async def test_nevermind_item(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_nevermind_item(hass: HomeAssistant) -> None: """Test HassNevermind intent through the default agent.""" result = await conversation.async_converse(hass, "nevermind", None, Context()) assert result.response.intent is not None @@ -326,9 +373,9 @@ async def test_nevermind_item(hass: HomeAssistant, init_components) -> None: assert not result.response.speech +@pytest.mark.usefixtures("init_components") async def test_device_area_context( hass: HomeAssistant, - init_components, area_registry: ar.AreaRegistry, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, @@ -465,7 +512,8 @@ async def test_device_area_context( } -async def test_error_no_device(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_no_device(hass: HomeAssistant) -> None: """Test error message when device/entity is missing.""" result = await conversation.async_converse( hass, "turn on missing entity", None, Context(), None @@ -479,7 +527,8 @@ async def test_error_no_device(hass: HomeAssistant, init_components) -> None: ) -async def test_error_no_area(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_no_area(hass: HomeAssistant) -> None: """Test error message when area is missing.""" result = await conversation.async_converse( hass, "turn on the lights in missing area", None, Context(), None @@ -493,7 +542,8 @@ async def test_error_no_area(hass: HomeAssistant, init_components) -> None: ) -async def test_error_no_floor(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_no_floor(hass: HomeAssistant) -> None: """Test error message when floor is missing.""" result = await conversation.async_converse( hass, "turn on all the lights on missing floor", None, Context(), None @@ -507,8 +557,9 @@ async def test_error_no_floor(hass: HomeAssistant, init_components) -> None: ) +@pytest.mark.usefixtures("init_components") async def test_error_no_device_in_area( - hass: HomeAssistant, init_components, area_registry: ar.AreaRegistry + hass: HomeAssistant, area_registry: ar.AreaRegistry ) -> None: """Test error message when area is missing a device/entity.""" area_kitchen = area_registry.async_get_or_create("kitchen_id") @@ -525,9 +576,8 @@ async def test_error_no_device_in_area( ) -async def test_error_no_domain( - hass: HomeAssistant, init_components, area_registry: ar.AreaRegistry -) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_no_domain(hass: HomeAssistant) -> None: """Test error message when no devices/entities exist for a domain.""" # We don't have a sentence for turning on all fans @@ -558,8 +608,9 @@ async def test_error_no_domain( ) +@pytest.mark.usefixtures("init_components") async def test_error_no_domain_in_area( - hass: HomeAssistant, init_components, area_registry: ar.AreaRegistry + hass: HomeAssistant, area_registry: ar.AreaRegistry ) -> None: """Test error message when no devices/entities for a domain exist in an area.""" area_kitchen = area_registry.async_get_or_create("kitchen_id") @@ -576,9 +627,9 @@ async def test_error_no_domain_in_area( ) +@pytest.mark.usefixtures("init_components") async def test_error_no_domain_in_floor( hass: HomeAssistant, - init_components, area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, ) -> None: @@ -618,7 +669,8 @@ async def test_error_no_domain_in_floor( ) -async def test_error_no_device_class(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_no_device_class(hass: HomeAssistant) -> None: """Test error message when no entities of a device class exist.""" # Create a cover entity that is not a window. # This ensures that the filtering below won't exit early because there are @@ -658,8 +710,9 @@ async def test_error_no_device_class(hass: HomeAssistant, init_components) -> No ) +@pytest.mark.usefixtures("init_components") async def test_error_no_device_class_in_area( - hass: HomeAssistant, init_components, area_registry: ar.AreaRegistry + hass: HomeAssistant, area_registry: ar.AreaRegistry ) -> None: """Test error message when no entities of a device class exist in an area.""" area_bedroom = area_registry.async_get_or_create("bedroom_id") @@ -676,7 +729,8 @@ async def test_error_no_device_class_in_area( ) -async def test_error_no_intent(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_no_intent(hass: HomeAssistant) -> None: """Test response with an intent match failure.""" with patch( "homeassistant.components.conversation.default_agent.recognize_all", @@ -696,8 +750,9 @@ async def test_error_no_intent(hass: HomeAssistant, init_components) -> None: ) +@pytest.mark.usefixtures("init_components") async def test_error_duplicate_names( - hass: HomeAssistant, init_components, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test error message when multiple devices have the same name (or alias).""" kitchen_light_1 = entity_registry.async_get_or_create("light", "demo", "1234") @@ -747,9 +802,9 @@ async def test_error_duplicate_names( ) +@pytest.mark.usefixtures("init_components") async def test_error_duplicate_names_in_area( hass: HomeAssistant, - init_components, area_registry: ar.AreaRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -805,7 +860,8 @@ async def test_error_duplicate_names_in_area( ) -async def test_error_wrong_state(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_wrong_state(hass: HomeAssistant) -> None: """Test error message when no entities are in the correct state.""" assert await async_setup_component(hass, media_player.DOMAIN, {}) @@ -824,9 +880,8 @@ async def test_error_wrong_state(hass: HomeAssistant, init_components) -> None: assert result.response.speech["plain"]["speech"] == "Sorry, no device is playing" -async def test_error_feature_not_supported( - hass: HomeAssistant, init_components -) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_feature_not_supported(hass: HomeAssistant) -> None: """Test error message when no devices support a required feature.""" assert await async_setup_component(hass, media_player.DOMAIN, {}) @@ -849,13 +904,28 @@ async def test_error_feature_not_supported( ) -async def test_error_no_timer_support(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_no_timer_support( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, +) -> None: """Test error message when a device does not support timers (no handler is registered).""" - device_id = "test_device" + area_kitchen = area_registry.async_create("kitchen") + + entry = MockConfigEntry() + entry.add_to_hass(hass) + device_kitchen = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "device-kitchen")}, + ) + device_registry.async_update_device(device_kitchen.id, area_id=area_kitchen.id) + device_id = device_kitchen.id # No timer handler is registered for the device result = await conversation.async_converse( - hass, "pause timer", None, Context(), None, device_id=device_id + hass, "set a 5 minute timer", None, Context(), None, device_id=device_id ) assert result.response.response_type == intent.IntentResponseType.ERROR @@ -866,7 +936,8 @@ async def test_error_no_timer_support(hass: HomeAssistant, init_components) -> N ) -async def test_error_timer_not_found(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_error_timer_not_found(hass: HomeAssistant) -> None: """Test error message when a timer cannot be matched.""" device_id = "test_device" @@ -888,9 +959,9 @@ async def test_error_timer_not_found(hass: HomeAssistant, init_components) -> No ) +@pytest.mark.usefixtures("init_components") async def test_error_multiple_timers_matched( hass: HomeAssistant, - init_components, area_registry: ar.AreaRegistry, device_registry: dr.DeviceRegistry, ) -> None: @@ -938,8 +1009,9 @@ async def test_error_multiple_timers_matched( ) +@pytest.mark.usefixtures("init_components") async def test_no_states_matched_default_error( - hass: HomeAssistant, init_components, area_registry: ar.AreaRegistry + hass: HomeAssistant, area_registry: ar.AreaRegistry ) -> None: """Test default response when no states match and slots are missing.""" area_kitchen = area_registry.async_get_or_create("kitchen_id") @@ -966,9 +1038,9 @@ async def test_no_states_matched_default_error( ) +@pytest.mark.usefixtures("init_components") async def test_empty_aliases( hass: HomeAssistant, - init_components, area_registry: ar.AreaRegistry, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, @@ -1031,7 +1103,8 @@ async def test_empty_aliases( assert floors.values[0].text_in.text == floor_1.name -async def test_all_domains_loaded(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_all_domains_loaded(hass: HomeAssistant) -> None: """Test that sentences for all domains are always loaded.""" # light domain is not loaded @@ -1050,9 +1123,9 @@ async def test_all_domains_loaded(hass: HomeAssistant, init_components) -> None: ) +@pytest.mark.usefixtures("init_components") async def test_same_named_entities_in_different_areas( hass: HomeAssistant, - init_components, area_registry: ar.AreaRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -1147,9 +1220,9 @@ async def test_same_named_entities_in_different_areas( assert result.response.response_type == intent.IntentResponseType.QUERY_ANSWER +@pytest.mark.usefixtures("init_components") async def test_same_aliased_entities_in_different_areas( hass: HomeAssistant, - init_components, area_registry: ar.AreaRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -1238,7 +1311,8 @@ async def test_same_aliased_entities_in_different_areas( assert result.response.response_type == intent.IntentResponseType.QUERY_ANSWER -async def test_device_id_in_handler(hass: HomeAssistant, init_components) -> None: +@pytest.mark.usefixtures("init_components") +async def test_device_id_in_handler(hass: HomeAssistant) -> None: """Test that the default agent passes device_id to intent handler.""" device_id = "test_device" @@ -1270,9 +1344,8 @@ async def test_device_id_in_handler(hass: HomeAssistant, init_components) -> Non assert handler.device_id == device_id -async def test_name_wildcard_lower_priority( - hass: HomeAssistant, init_components -) -> None: +@pytest.mark.usefixtures("init_components") +async def test_name_wildcard_lower_priority(hass: HomeAssistant) -> None: """Test that the default agent does not prioritize a {name} slot when it's a wildcard.""" class OrderBeerIntentHandler(intent.IntentHandler): @@ -1322,3 +1395,685 @@ async def test_name_wildcard_lower_priority( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert not beer_handler.triggered assert food_handler.triggered + + +async def test_intent_entity_added_removed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with entities added later. + + We want to ensure that adding an entity later busts the cache + so that the new entity is available as well as any aliases. + """ + context = Context() + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) + await hass.async_block_till_done() + hass.states.async_set("light.kitchen", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + result = await conversation.async_converse( + hass, "turn on my cool light", None, context + ) + + assert len(calls) == 1 + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Add an entity + entity_registry.async_get_or_create( + "light", "demo", "5678", suggested_object_id="late" + ) + hass.states.async_set("light.late", "off", {"friendly_name": "friendly light"}) + + result = await conversation.async_converse( + hass, "turn on friendly light", None, context + ) + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Now add an alias + entity_registry.async_update_entity("light.late", aliases={"late added light"}) + + result = await conversation.async_converse( + hass, "turn on late added light", None, context + ) + + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Now delete the entity + hass.states.async_remove("light.late") + + result = await conversation.async_converse( + hass, "turn on late added light", None, context + ) + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + +async def test_intent_alias_added_removed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with aliases added later. + + We want to ensure that adding an alias later busts the cache + so that the new alias is available. + """ + context = Context() + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + hass.states.async_set("light.kitchen", "off", {"friendly_name": "kitchen light"}) + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + assert len(calls) == 1 + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Add an alias + entity_registry.async_update_entity("light.kitchen", aliases={"late added alias"}) + + result = await conversation.async_converse( + hass, "turn on late added alias", None, context + ) + + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Now remove the alieas + entity_registry.async_update_entity("light.kitchen", aliases={}) + + result = await conversation.async_converse( + hass, "turn on late added alias", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + +async def test_intent_entity_renamed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with entities renamed later. + + We want to ensure that renaming an entity later busts the cache + so that the new name is used. + """ + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + assert len(calls) == 1 + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Rename the entity + entity_registry.async_update_entity("light.kitchen", name="renamed light") + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on renamed light", None, context + ) + + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + +async def test_intent_entity_remove_custom_name( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that removing a custom name allows targeting the entity by its auto-generated name again.""" + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + # Should fail with auto-generated name + entity_registry.async_update_entity("light.kitchen", name="renamed light") + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + # Now clear the custom name + entity_registry.async_update_entity("light.kitchen", name=None) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert len(calls) == 1 + + result = await conversation.async_converse( + hass, "turn on renamed light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + +async def test_intent_entity_fail_if_unexposed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that an entity is not usable if unexposed.""" + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + # Unexpose the entity + expose_entity(hass, "light.kitchen", False) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert len(calls) == 0 + + +async def test_intent_entity_exposed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with manual expose. + + We want to ensure that manually exposing an entity later busts the cache + so that the new setting is used. + """ + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + # Unexpose, then expose the entity + expose_entity(hass, "light.kitchen", False) + await hass.async_block_till_done() + expose_entity(hass, "light.kitchen", True) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert len(calls) == 1 + + +async def test_intent_conversion_not_expose_new( + hass: HomeAssistant, + init_components, + hass_admin_user: MockUser, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API when not exposing new entities.""" + # Disable exposing new entities to the default agent + expose_new(hass, False) + + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + # Expose the entity + expose_entity(hass, "light.kitchen", True) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + assert len(calls) == 1 + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + +async def test_custom_sentences( + hass: HomeAssistant, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test custom sentences with a custom intent.""" + # Expecting testing_config/custom_sentences/en/beer.yaml + intent.async_register(hass, OrderBeerIntentHandler()) + + # Don't use "en" to test loading custom sentences with language variants. + language = "en-us" + + # Invoke intent via HTTP API + for beer_style in ("stout", "lager"): + result = await conversation.async_converse( + hass, + f"I'd like to order a {beer_style}, please", + None, + Context(), + language=language, + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert ( + data["response"]["speech"]["plain"]["speech"] + == f"You ordered a {beer_style}" + ) + + +async def test_custom_sentences_config( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test custom sentences with a custom intent in config.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component( + hass, + "conversation", + {"conversation": {"intents": {"StealthMode": ["engage stealth mode"]}}}, + ) + assert await async_setup_component(hass, "intent", {}) + assert await async_setup_component( + hass, + "intent_script", + { + "intent_script": { + "StealthMode": {"speech": {"text": "Stealth mode engaged"}} + } + }, + ) + + # Invoke intent via HTTP API + result = await conversation.async_converse( + hass, "engage stealth mode", None, Context(), None + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert data["response"]["speech"]["plain"]["speech"] == "Stealth mode engaged" + + +async def test_language_region(hass: HomeAssistant, init_components) -> None: + """Test regional languages.""" + hass.states.async_set("light.kitchen", "off") + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + # Add fake region + language = f"{hass.config.language}-YZ" + await hass.services.async_call( + "conversation", + "process", + { + conversation.ATTR_TEXT: "turn on the kitchen", + conversation.ATTR_LANGUAGE: language, + }, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": ["light.kitchen"]} + + +async def test_non_default_response(hass: HomeAssistant, init_components) -> None: + """Test intent response that is not the default.""" + hass.states.async_set("cover.front_door", "closed") + calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) + + agent = default_agent.async_get_default_agent(hass) + assert isinstance(agent, default_agent.DefaultAgent) + + result = await agent.async_process( + ConversationInput( + text="open the front door", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + ) + assert len(calls) == 1 + assert result.response.speech["plain"]["speech"] == "Opened" + + +async def test_turn_on_area( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test turning on an area.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + kitchen_area = area_registry.async_create("kitchen") + device_registry.async_update_device(device.id, area_id=kitchen_area.id) + + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="stove" + ) + entity_registry.async_update_entity( + "light.stove", aliases={"my stove light"}, area_id=kitchen_area.id + ) + hass.states.async_set("light.stove", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": ["light.stove"]} + + basement_area = area_registry.async_create("basement") + device_registry.async_update_device(device.id, area_id=basement_area.id) + entity_registry.async_update_entity("light.stove", area_id=basement_area.id) + calls.clear() + + # Test that the area is updated + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 0 + + # Test the new area works + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on lights in the basement"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": ["light.stove"]} + + +async def test_light_area_same_name( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test turning on a light with the same name as an area.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + kitchen_area = area_registry.async_create("kitchen") + device_registry.async_update_device(device.id, area_id=kitchen_area.id) + + kitchen_light = entity_registry.async_get_or_create( + "light", "demo", "1234", original_name="kitchen light" + ) + entity_registry.async_update_entity( + kitchen_light.entity_id, area_id=kitchen_area.id + ) + hass.states.async_set( + kitchen_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} + ) + + ceiling_light = entity_registry.async_get_or_create( + "light", "demo", "5678", original_name="ceiling light" + ) + entity_registry.async_update_entity( + ceiling_light.entity_id, area_id=kitchen_area.id + ) + hass.states.async_set( + ceiling_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "ceiling light"} + ) + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on kitchen light"}, + ) + await hass.async_block_till_done() + + # Should only turn on one light instead of all lights in the kitchen + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": [kitchen_light.entity_id]} + + +async def test_custom_sentences_priority( + hass: HomeAssistant, + hass_admin_user: MockUser, + snapshot: SnapshotAssertion, +) -> None: + """Test that user intents from custom_sentences have priority over builtin intents/sentences.""" + with tempfile.NamedTemporaryFile( + mode="w+", + encoding="utf-8", + suffix=".yaml", + dir=os.path.join(hass.config.config_dir, "custom_sentences", "en"), + ) as custom_sentences_file: + # Add a custom sentence that would match a builtin sentence. + # Custom sentences have priority. + yaml.dump( + { + "language": "en", + "intents": { + "CustomIntent": {"data": [{"sentences": ["turn on the lamp"]}]} + }, + }, + custom_sentences_file, + ) + custom_sentences_file.flush() + custom_sentences_file.seek(0) + + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + assert await async_setup_component(hass, "light", {}) + assert await async_setup_component(hass, "intent", {}) + assert await async_setup_component( + hass, + "intent_script", + { + "intent_script": { + "CustomIntent": {"speech": {"text": "custom response"}} + } + }, + ) + + # Ensure that a "lamp" exists so that we can verify the custom intent + # overrides the builtin sentence. + hass.states.async_set("light.lamp", "off") + + result = await conversation.async_converse( + hass, + "turn on the lamp", + None, + Context(), + language=hass.config.language, + ) + + data = result.as_dict() + assert data["response"]["response_type"] == "action_done" + assert data["response"]["speech"]["plain"]["speech"] == "custom response" + + +async def test_config_sentences_priority( + hass: HomeAssistant, + hass_admin_user: MockUser, + snapshot: SnapshotAssertion, +) -> None: + """Test that user intents from configuration.yaml have priority over builtin intents/sentences.""" + # Add a custom sentence that would match a builtin sentence. + # Custom sentences have priority. + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "intent", {}) + assert await async_setup_component( + hass, + "conversation", + {"conversation": {"intents": {"CustomIntent": ["turn on the lamp"]}}}, + ) + assert await async_setup_component(hass, "light", {}) + assert await async_setup_component( + hass, + "intent_script", + {"intent_script": {"CustomIntent": {"speech": {"text": "custom response"}}}}, + ) + + # Ensure that a "lamp" exists so that we can verify the custom intent + # overrides the builtin sentence. + hass.states.async_set("light.lamp", "off") + + result = await conversation.async_converse( + hass, + "turn on the lamp", + None, + Context(), + language=hass.config.language, + ) + data = result.as_dict() + assert data["response"]["response_type"] == "action_done" + assert data["response"]["speech"]["plain"]["speech"] == "custom response" diff --git a/tests/components/conversation/test_default_agent_intents.py b/tests/components/conversation/test_default_agent_intents.py index b1c4a6d51af..7bae9c43f70 100644 --- a/tests/components/conversation/test_default_agent_intents.py +++ b/tests/components/conversation/test_default_agent_intents.py @@ -1,7 +1,9 @@ """Test intents for the default agent.""" +from datetime import datetime from unittest.mock import patch +from freezegun import freeze_time import pytest from homeassistant.components import ( @@ -121,6 +123,34 @@ async def test_cover_set_position( assert call.data == {"entity_id": entity_id, cover.ATTR_POSITION: 50} +async def test_cover_device_class( + hass: HomeAssistant, + init_components, +) -> None: + """Test the open position for covers by device class.""" + await cover_intent.async_setup_intents(hass) + + entity_id = f"{cover.DOMAIN}.front" + hass.states.async_set( + entity_id, STATE_CLOSED, attributes={"device_class": "garage"} + ) + async_expose_entity(hass, conversation.DOMAIN, entity_id, True) + + # Open service + calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) + result = await conversation.async_converse( + hass, "open the garage door", None, Context(), None + ) + await hass.async_block_till_done() + + response = result.response + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.speech["plain"]["speech"] == "Opened the garage" + assert len(calls) == 1 + call = calls[0] + assert call.data == {"entity_id": entity_id} + + async def test_valve_intents( hass: HomeAssistant, init_components, @@ -413,3 +443,28 @@ async def test_todo_add_item_fr( assert mock_handle.call_args.args intent_obj = mock_handle.call_args.args[0] assert intent_obj.slots.get("item", {}).get("value", "").strip() == "farine" + + +@freeze_time(datetime(year=2013, month=9, day=17, hour=1, minute=2)) +async def test_date_time( + hass: HomeAssistant, + init_components, +) -> None: + """Test the date and time intents.""" + result = await conversation.async_converse( + hass, "what is the date", None, Context(), None + ) + await hass.async_block_till_done() + + response = result.response + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.speech["plain"]["speech"] == "September 17th, 2013" + + result = await conversation.async_converse( + hass, "what time is it", None, Context(), None + ) + await hass.async_block_till_done() + + response = result.response + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.speech["plain"]["speech"] == "1:02 AM" diff --git a/tests/components/conversation/test_http.py b/tests/components/conversation/test_http.py new file mode 100644 index 00000000000..1431fd6c17b --- /dev/null +++ b/tests/components/conversation/test_http.py @@ -0,0 +1,524 @@ +"""The tests for the HTTP API of the Conversation component.""" + +from http import HTTPStatus +from typing import Any +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.conversation import default_agent +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ATTR_FRIENDLY_NAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers import area_registry as ar, entity_registry as er, intent +from homeassistant.setup import async_setup_component + +from . import MockAgent + +from tests.common import async_mock_service +from tests.typing import ClientSessionGenerator, WebSocketGenerator + +AGENT_ID_OPTIONS = [ + None, + # Old value of conversation.HOME_ASSISTANT_AGENT, + "homeassistant", + # Current value of conversation.HOME_ASSISTANT_AGENT, + "conversation.home_assistant", +] + + +class OrderBeerIntentHandler(intent.IntentHandler): + """Handle OrderBeer intent.""" + + intent_type = "OrderBeer" + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Return speech response.""" + beer_style = intent_obj.slots["beer_style"]["value"] + response = intent_obj.create_response() + response.async_set_speech(f"You ordered a {beer_style}") + return response + + +@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) +async def test_http_processing_intent( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + agent_id, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API.""" + # Add an alias + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) + hass.states.async_set("light.kitchen", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + data: dict[str, Any] = {"text": "turn on my cool light"} + if agent_id: + data["agent_id"] = agent_id + resp = await client.post("/api/conversation/process", json=data) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + + assert data == snapshot + + +async def test_http_api_no_match( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the HTTP conversation API with an intent match failure.""" + client = await hass_client() + + # Shouldn't match any intents + resp = await client.post("/api/conversation/process", json={"text": "do something"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert data["response"]["data"]["code"] == "no_intent_match" + + +async def test_http_api_handle_failure( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the HTTP conversation API with an error during handling.""" + client = await hass_client() + + hass.states.async_set("light.kitchen", "off") + + # Raise an error during intent handling + def async_handle_error(*args, **kwargs): + raise intent.IntentHandleError + + with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): + resp = await client.post( + "/api/conversation/process", json={"text": "turn on the kitchen"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert data["response"]["data"]["code"] == "failed_to_handle" + + +async def test_http_api_unexpected_failure( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the HTTP conversation API with an unexpected error during handling.""" + client = await hass_client() + + hass.states.async_set("light.kitchen", "off") + + # Raise an "unexpected" error during intent handling + def async_handle_error(*args, **kwargs): + raise intent.IntentUnexpectedError + + with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): + resp = await client.post( + "/api/conversation/process", json={"text": "turn on the kitchen"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert data["response"]["data"]["code"] == "unknown" + + +async def test_http_api_wrong_data( + hass: HomeAssistant, init_components, hass_client: ClientSessionGenerator +) -> None: + """Test the HTTP conversation API.""" + client = await hass_client() + + resp = await client.post("/api/conversation/process", json={"text": 123}) + assert resp.status == HTTPStatus.BAD_REQUEST + + resp = await client.post("/api/conversation/process", json={}) + assert resp.status == HTTPStatus.BAD_REQUEST + + +@pytest.mark.parametrize( + "payload", + [ + { + "text": "Test Text", + }, + { + "text": "Test Text", + "language": "test-language", + }, + { + "text": "Test Text", + "conversation_id": "test-conv-id", + }, + { + "text": "Test Text", + "conversation_id": None, + }, + { + "text": "Test Text", + "conversation_id": "test-conv-id", + "language": "test-language", + }, + { + "text": "Test Text", + "agent_id": "homeassistant", + }, + ], +) +async def test_ws_api( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + payload, + snapshot: SnapshotAssertion, +) -> None: + """Test the Websocket conversation API.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "conversation/process", **payload}) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + assert msg["result"]["response"]["data"]["code"] == "no_intent_match" + + +@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) +async def test_ws_prepare( + hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator, agent_id +) -> None: + """Test the Websocket prepare conversation API.""" + agent = default_agent.async_get_default_agent(hass) + assert isinstance(agent, default_agent.DefaultAgent) + + # No intents should be loaded yet + assert not agent._lang_intents.get(hass.config.language) + + client = await hass_ws_client(hass) + + msg = {"type": "conversation/prepare"} + if agent_id is not None: + msg["agent_id"] = agent_id + await client.send_json_auto_id(msg) + + msg = await client.receive_json() + + assert msg["success"] + + # Intents should now be load + assert agent._lang_intents.get(hass.config.language) + + +async def test_get_agent_list( + hass: HomeAssistant, + init_components, + mock_conversation_agent: MockAgent, + mock_agent_support_all: MockAgent, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test getting agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "conversation/agent/list"}) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "smurfish"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "en"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "en-UK"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "de"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "de", "country": "ch"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + +async def test_ws_hass_agent_debug( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test homeassistant agent debug websocket command.""" + client = await hass_ws_client(hass) + + kitchen_area = area_registry.async_create("kitchen") + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity( + "light.kitchen", + aliases={"my cool light"}, + area_id=kitchen_area.id, + ) + await hass.async_block_till_done() + hass.states.async_set("light.kitchen", "off") + + on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") + + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "turn on my cool light", + "turn my cool light off", + "turn on all lights in the kitchen", + "how many lights are on in the kitchen?", + "this will not match anything", # None in results + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + # Last sentence should be a failed match + assert msg["result"]["results"][-1] is None + + # Light state should not have been changed + assert len(on_calls) == 0 + assert len(off_calls) == 0 + + +async def test_ws_hass_agent_debug_null_result( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test homeassistant agent debug websocket command with a null result.""" + client = await hass_ws_client(hass) + + async def async_recognize(self, user_input, *args, **kwargs): + if user_input.text == "bad sentence": + return None + + return await self.async_recognize(user_input, *args, **kwargs) + + with patch( + "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize", + async_recognize, + ): + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "bad sentence", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + assert msg["result"]["results"] == [None] + + +async def test_ws_hass_agent_debug_out_of_range( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test homeassistant agent debug websocket command with an out of range entity.""" + test_light = entity_registry.async_get_or_create("light", "demo", "1234") + hass.states.async_set( + test_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "test light"} + ) + + client = await hass_ws_client(hass) + + # Brightness is in range (0-100) + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "set test light brightness to 100%", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + results = msg["result"]["results"] + assert len(results) == 1 + assert results[0]["match"] + + # Brightness is out of range + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "set test light brightness to 1001%", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + results = msg["result"]["results"] + assert len(results) == 1 + assert not results[0]["match"] + + # Name matched, but brightness didn't + assert results[0]["slots"] == {"name": "test light"} + assert results[0]["unmatched_slots"] == {"brightness": 1001} + + +async def test_ws_hass_agent_debug_custom_sentence( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test homeassistant agent debug websocket command with a custom sentence.""" + # Expecting testing_config/custom_sentences/en/beer.yaml + intent.async_register(hass, OrderBeerIntentHandler()) + + client = await hass_ws_client(hass) + + # Brightness is in range (0-100) + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "I'd like to order a lager, please.", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + debug_results = msg["result"].get("results", []) + assert len(debug_results) == 1 + assert debug_results[0].get("match") + assert debug_results[0].get("source") == "custom" + assert debug_results[0].get("file") == "en/beer.yaml" + + +async def test_ws_hass_agent_debug_sentence_trigger( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test homeassistant agent debug websocket command with a sentence trigger.""" + calls = async_mock_service(hass, "test", "automation") + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": { + "platform": "conversation", + "command": ["hello", "hello[ world]"], + }, + "action": { + "service": "test.automation", + "data_template": {"data": "{{ trigger }}"}, + }, + } + }, + ) + + client = await hass_ws_client(hass) + + # Use trigger sentence + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": ["hello world"], + } + ) + await hass.async_block_till_done() + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + debug_results = msg["result"].get("results", []) + assert len(debug_results) == 1 + assert debug_results[0].get("match") + assert debug_results[0].get("source") == "trigger" + assert debug_results[0].get("sentence_template") == "hello[ world]" + + # Trigger should not have been executed + assert len(calls) == 0 diff --git a/tests/components/conversation/test_init.py b/tests/components/conversation/test_init.py index 48f227e9497..34a8fce636d 100644 --- a/tests/components/conversation/test_init.py +++ b/tests/components/conversation/test_init.py @@ -1,7 +1,6 @@ """The tests for the Conversation component.""" from http import HTTPStatus -from typing import Any from unittest.mock import patch import pytest @@ -10,30 +9,16 @@ import voluptuous as vol from homeassistant.components import conversation from homeassistant.components.conversation import default_agent -from homeassistant.components.conversation.models import ConversationInput -from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_ON -from homeassistant.core import Context, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import ( - area_registry as ar, - device_registry as dr, - entity_registry as er, - intent, -) +from homeassistant.helpers import intent from homeassistant.setup import async_setup_component -from . import MockAgent, expose_entity, expose_new +from . import MockAgent -from tests.common import ( - MockConfigEntry, - MockUser, - async_mock_service, - setup_test_component_platform, -) -from tests.components.light.common import MockLight -from tests.typing import ClientSessionGenerator, WebSocketGenerator +from tests.common import MockUser, async_mock_service +from tests.typing import ClientSessionGenerator AGENT_ID_OPTIONS = [ None, @@ -44,460 +29,6 @@ AGENT_ID_OPTIONS = [ ] -class OrderBeerIntentHandler(intent.IntentHandler): - """Handle OrderBeer intent.""" - - intent_type = "OrderBeer" - - async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: - """Return speech response.""" - beer_style = intent_obj.slots["beer_style"]["value"] - response = intent_obj.create_response() - response.async_set_speech(f"You ordered a {beer_style}") - return response - - -@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) -async def test_http_processing_intent( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - agent_id, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API.""" - # Add an alias - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - hass.states.async_set("light.kitchen", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - data: dict[str, Any] = {"text": "turn on my cool light"} - if agent_id: - data["agent_id"] = agent_id - resp = await client.post("/api/conversation/process", json=data) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - - -async def test_http_processing_intent_target_ha_agent( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - mock_conversation_agent: MockAgent, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent can be processed via HTTP API with picking agent.""" - # Add an alias - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - hass.states.async_set("light.kitchen", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", - json={"text": "turn on my cool light", "agent_id": "homeassistant"}, - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - -async def test_http_processing_intent_entity_added_removed( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with entities added later. - - We want to ensure that adding an entity later busts the cache - so that the new entity is available as well as any aliases. - """ - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - hass.states.async_set("light.kitchen", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on my cool light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Add an entity - entity_registry.async_get_or_create( - "light", "demo", "5678", suggested_object_id="late" - ) - hass.states.async_set("light.late", "off", {"friendly_name": "friendly light"}) - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on friendly light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Now add an alias - entity_registry.async_update_entity("light.late", aliases={"late added light"}) - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on late added light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Now delete the entity - hass.states.async_remove("light.late") - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on late added light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - -async def test_http_processing_intent_alias_added_removed( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with aliases added later. - - We want to ensure that adding an alias later busts the cache - so that the new alias is available. - """ - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - hass.states.async_set("light.kitchen", "off", {"friendly_name": "kitchen light"}) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Add an alias - entity_registry.async_update_entity("light.kitchen", aliases={"late added alias"}) - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on late added alias"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Now remove the alieas - entity_registry.async_update_entity("light.kitchen", aliases={}) - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on late added alias"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - -async def test_http_processing_intent_entity_renamed( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with entities renamed later. - - We want to ensure that renaming an entity later busts the cache - so that the new name is used. - """ - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Rename the entity - entity_registry.async_update_entity("light.kitchen", name="renamed light") - await hass.async_block_till_done() - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on renamed light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - # Now clear the custom name - entity_registry.async_update_entity("light.kitchen", name=None) - await hass.async_block_till_done() - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on renamed light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - -async def test_http_processing_intent_entity_exposed( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with manual expose. - - We want to ensure that manually exposing an entity later busts the cache - so that the new setting is used. - """ - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - await hass.async_block_till_done() - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on my cool light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Unexpose the entity - expose_entity(hass, "light.kitchen", False) - await hass.async_block_till_done() - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on my cool light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - # Now expose the entity - expose_entity(hass, "light.kitchen", True) - await hass.async_block_till_done() - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on my cool light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - -async def test_http_processing_intent_conversion_not_expose_new( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API when not exposing new entities.""" - # Disable exposing new entities to the default agent - expose_new(hass, False) - - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - await hass.async_block_till_done() - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - # Expose the entity - expose_entity(hass, "light.kitchen", True) - await hass.async_block_till_done() - - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - @pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) @pytest.mark.parametrize("sentence", ["turn on kitchen", "turn kitchen on"]) @pytest.mark.parametrize("conversation_id", ["my_new_conversation", None]) @@ -570,95 +101,7 @@ async def test_turn_off_intent(hass: HomeAssistant, init_components, sentence) - assert call.data == {"entity_id": ["light.kitchen"]} -async def test_http_api_no_match( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the HTTP conversation API with an intent match failure.""" - client = await hass_client() - - # Shouldn't match any intents - resp = await client.post("/api/conversation/process", json={"text": "do something"}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert data["response"]["data"]["code"] == "no_intent_match" - - -async def test_http_api_handle_failure( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the HTTP conversation API with an error during handling.""" - client = await hass_client() - - hass.states.async_set("light.kitchen", "off") - - # Raise an error during intent handling - def async_handle_error(*args, **kwargs): - raise intent.IntentHandleError - - with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): - resp = await client.post( - "/api/conversation/process", json={"text": "turn on the kitchen"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert data["response"]["data"]["code"] == "failed_to_handle" - - -async def test_http_api_unexpected_failure( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the HTTP conversation API with an unexpected error during handling.""" - client = await hass_client() - - hass.states.async_set("light.kitchen", "off") - - # Raise an "unexpected" error during intent handling - def async_handle_error(*args, **kwargs): - raise intent.IntentUnexpectedError - - with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): - resp = await client.post( - "/api/conversation/process", json={"text": "turn on the kitchen"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert data["response"]["data"]["code"] == "unknown" - - -async def test_http_api_wrong_data( - hass: HomeAssistant, init_components, hass_client: ClientSessionGenerator -) -> None: - """Test the HTTP conversation API.""" - client = await hass_client() - - resp = await client.post("/api/conversation/process", json={"text": 123}) - assert resp.status == HTTPStatus.BAD_REQUEST - - resp = await client.post("/api/conversation/process", json={}) - assert resp.status == HTTPStatus.BAD_REQUEST - - +@pytest.mark.usefixtures("init_components") async def test_custom_agent( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -667,10 +110,6 @@ async def test_custom_agent( snapshot: SnapshotAssertion, ) -> None: """Test a custom conversation agent.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - assert await async_setup_component(hass, "intent", {}) - client = await hass_client() data = { @@ -699,162 +138,9 @@ async def test_custom_agent( ) -@pytest.mark.parametrize( - "payload", - [ - { - "text": "Test Text", - }, - { - "text": "Test Text", - "language": "test-language", - }, - { - "text": "Test Text", - "conversation_id": "test-conv-id", - }, - { - "text": "Test Text", - "conversation_id": None, - }, - { - "text": "Test Text", - "conversation_id": "test-conv-id", - "language": "test-language", - }, - { - "text": "Test Text", - "agent_id": "homeassistant", - }, - ], -) -async def test_ws_api( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - payload, - snapshot: SnapshotAssertion, -) -> None: - """Test the Websocket conversation API.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - client = await hass_ws_client(hass) - - await client.send_json_auto_id({"type": "conversation/process", **payload}) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - assert msg["result"]["response"]["data"]["code"] == "no_intent_match" - - -@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) -async def test_ws_prepare( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, agent_id -) -> None: - """Test the Websocket prepare conversation API.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - agent = default_agent.async_get_default_agent(hass) - assert isinstance(agent, default_agent.DefaultAgent) - - # No intents should be loaded yet - assert not agent._lang_intents.get(hass.config.language) - - client = await hass_ws_client(hass) - - msg = {"type": "conversation/prepare"} - if agent_id is not None: - msg["agent_id"] = agent_id - await client.send_json_auto_id(msg) - - msg = await client.receive_json() - - assert msg["success"] - - # Intents should now be load - assert agent._lang_intents.get(hass.config.language) - - -async def test_custom_sentences( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - snapshot: SnapshotAssertion, -) -> None: - """Test custom sentences with a custom intent.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - assert await async_setup_component(hass, "intent", {}) - - # Expecting testing_config/custom_sentences/en/beer.yaml - intent.async_register(hass, OrderBeerIntentHandler()) - - # Don't use "en" to test loading custom sentences with language variants. - language = "en-us" - - # Invoke intent via HTTP API - client = await hass_client() - for beer_style in ("stout", "lager"): - resp = await client.post( - "/api/conversation/process", - json={ - "text": f"I'd like to order a {beer_style}, please", - "language": language, - }, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - assert ( - data["response"]["speech"]["plain"]["speech"] - == f"You ordered a {beer_style}" - ) - - -async def test_custom_sentences_config( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - snapshot: SnapshotAssertion, -) -> None: - """Test custom sentences with a custom intent in config.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component( - hass, - "conversation", - {"conversation": {"intents": {"StealthMode": ["engage stealth mode"]}}}, - ) - assert await async_setup_component(hass, "intent", {}) - assert await async_setup_component( - hass, - "intent_script", - { - "intent_script": { - "StealthMode": {"speech": {"text": "Stealth mode engaged"}} - } - }, - ) - - # Invoke intent via HTTP API - client = await hass_client() - resp = await client.post( - "/api/conversation/process", - json={"text": "engage stealth mode"}, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - assert data["response"]["speech"]["plain"]["speech"] == "Stealth mode engaged" - - -async def test_prepare_reload(hass: HomeAssistant) -> None: +async def test_prepare_reload(hass: HomeAssistant, init_components) -> None: """Test calling the reload service.""" language = hass.config.language - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) # Load intents agent = default_agent.async_get_default_agent(hass) @@ -890,181 +176,7 @@ async def test_prepare_fail(hass: HomeAssistant) -> None: await agent.async_prepare("not-a-language") # Confirm no intents were loaded - assert not agent._lang_intents.get("not-a-language") - - -async def test_language_region(hass: HomeAssistant, init_components) -> None: - """Test calling the turn on intent.""" - hass.states.async_set("light.kitchen", "off") - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - # Add fake region - language = f"{hass.config.language}-YZ" - await hass.services.async_call( - "conversation", - "process", - { - conversation.ATTR_TEXT: "turn on the kitchen", - conversation.ATTR_LANGUAGE: language, - }, - ) - await hass.async_block_till_done() - - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": ["light.kitchen"]} - - -async def test_non_default_response(hass: HomeAssistant, init_components) -> None: - """Test intent response that is not the default.""" - hass.states.async_set("cover.front_door", "closed") - calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) - - agent = default_agent.async_get_default_agent(hass) - assert isinstance(agent, default_agent.DefaultAgent) - - result = await agent.async_process( - ConversationInput( - text="open the front door", - context=Context(), - conversation_id=None, - device_id=None, - language=hass.config.language, - agent_id=None, - ) - ) - assert len(calls) == 1 - assert result.response.speech["plain"]["speech"] == "Opened" - - -async def test_turn_on_area( - hass: HomeAssistant, - init_components, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test turning on an area.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - - device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - - kitchen_area = area_registry.async_create("kitchen") - device_registry.async_update_device(device.id, area_id=kitchen_area.id) - - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="stove" - ) - entity_registry.async_update_entity( - "light.stove", aliases={"my stove light"}, area_id=kitchen_area.id - ) - hass.states.async_set("light.stove", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, - ) - await hass.async_block_till_done() - - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": ["light.stove"]} - - basement_area = area_registry.async_create("basement") - device_registry.async_update_device(device.id, area_id=basement_area.id) - entity_registry.async_update_entity("light.stove", area_id=basement_area.id) - calls.clear() - - # Test that the area is updated - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, - ) - await hass.async_block_till_done() - - assert len(calls) == 0 - - # Test the new area works - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on lights in the basement"}, - ) - await hass.async_block_till_done() - - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": ["light.stove"]} - - -async def test_light_area_same_name( - hass: HomeAssistant, - init_components, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test turning on a light with the same name as an area.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - - device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - - kitchen_area = area_registry.async_create("kitchen") - device_registry.async_update_device(device.id, area_id=kitchen_area.id) - - kitchen_light = entity_registry.async_get_or_create( - "light", "demo", "1234", original_name="kitchen light" - ) - entity_registry.async_update_entity( - kitchen_light.entity_id, area_id=kitchen_area.id - ) - hass.states.async_set( - kitchen_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} - ) - - ceiling_light = entity_registry.async_get_or_create( - "light", "demo", "5678", original_name="ceiling light" - ) - entity_registry.async_update_entity( - ceiling_light.entity_id, area_id=kitchen_area.id - ) - hass.states.async_set( - ceiling_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "ceiling light"} - ) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on kitchen light"}, - ) - await hass.async_block_till_done() - - # Should only turn on one light instead of all lights in the kitchen - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": [kitchen_light.entity_id]} + assert agent._lang_intents.get("not-a-language") is default_agent.ERROR_SENTINEL async def test_agent_id_validator_invalid_agent( @@ -1078,64 +190,6 @@ async def test_agent_id_validator_invalid_agent( conversation.agent_id_validator("conversation.home_assistant") -async def test_get_agent_list( - hass: HomeAssistant, - init_components, - mock_conversation_agent: MockAgent, - mock_agent_support_all: MockAgent, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test getting agent info.""" - client = await hass_ws_client(hass) - - await client.send_json_auto_id({"type": "conversation/agent/list"}) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "smurfish"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "en"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "en-UK"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "de"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "de", "country": "ch"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - async def test_get_agent_info( hass: HomeAssistant, init_components, @@ -1161,231 +215,16 @@ async def test_get_agent_info( assert agent_info == snapshot -async def test_ws_hass_agent_debug( +@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) +async def test_prepare_agent( hass: HomeAssistant, init_components, - hass_ws_client: WebSocketGenerator, - area_registry: ar.AreaRegistry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, + agent_id: str, ) -> None: - """Test homeassistant agent debug websocket command.""" - client = await hass_ws_client(hass) - - kitchen_area = area_registry.async_create("kitchen") - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity( - "light.kitchen", - aliases={"my cool light"}, - area_id=kitchen_area.id, - ) - hass.states.async_set("light.kitchen", "off") - - on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") - - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "turn on my cool light", - "turn my cool light off", - "turn on all lights in the kitchen", - "how many lights are on in the kitchen?", - "this will not match anything", # None in results - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - # Last sentence should be a failed match - assert msg["result"]["results"][-1] is None - - # Light state should not have been changed - assert len(on_calls) == 0 - assert len(off_calls) == 0 - - -async def test_ws_hass_agent_debug_null_result( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test homeassistant agent debug websocket command with a null result.""" - client = await hass_ws_client(hass) - - async def async_recognize(self, user_input, *args, **kwargs): - if user_input.text == "bad sentence": - return None - - return await self.async_recognize(user_input, *args, **kwargs) - + """Test prepare agent.""" with patch( - "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize", - async_recognize, - ): - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "bad sentence", - ], - } - ) + "homeassistant.components.conversation.default_agent.DefaultAgent.async_prepare" + ) as mock_prepare: + await conversation.async_prepare_agent(hass, agent_id, "en") - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - assert msg["result"]["results"] == [None] - - -async def test_ws_hass_agent_debug_out_of_range( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test homeassistant agent debug websocket command with an out of range entity.""" - test_light = entity_registry.async_get_or_create("light", "demo", "1234") - hass.states.async_set( - test_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "test light"} - ) - - client = await hass_ws_client(hass) - - # Brightness is in range (0-100) - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "set test light brightness to 100%", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - results = msg["result"]["results"] - assert len(results) == 1 - assert results[0]["match"] - - # Brightness is out of range - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "set test light brightness to 1001%", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - results = msg["result"]["results"] - assert len(results) == 1 - assert not results[0]["match"] - - # Name matched, but brightness didn't - assert results[0]["slots"] == {"name": "test light"} - assert results[0]["unmatched_slots"] == {"brightness": 1001} - - -async def test_ws_hass_agent_debug_custom_sentence( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test homeassistant agent debug websocket command with a custom sentence.""" - # Expecting testing_config/custom_sentences/en/beer.yaml - intent.async_register(hass, OrderBeerIntentHandler()) - - client = await hass_ws_client(hass) - - # Brightness is in range (0-100) - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "I'd like to order a lager, please.", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - debug_results = msg["result"].get("results", []) - assert len(debug_results) == 1 - assert debug_results[0].get("match") - assert debug_results[0].get("source") == "custom" - assert debug_results[0].get("file") == "en/beer.yaml" - - -async def test_ws_hass_agent_debug_sentence_trigger( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test homeassistant agent debug websocket command with a sentence trigger.""" - calls = async_mock_service(hass, "test", "automation") - assert await async_setup_component( - hass, - "automation", - { - "automation": { - "trigger": { - "platform": "conversation", - "command": ["hello", "hello[ world]"], - }, - "action": { - "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, - }, - } - }, - ) - - client = await hass_ws_client(hass) - - # Use trigger sentence - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": ["hello world"], - } - ) - await hass.async_block_till_done() - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - debug_results = msg["result"].get("results", []) - assert len(debug_results) == 1 - assert debug_results[0].get("match") - assert debug_results[0].get("source") == "trigger" - assert debug_results[0].get("sentence_template") == "hello[ world]" - - # Trigger should not have been executed - assert len(calls) == 0 + assert len(mock_prepare.mock_calls) == 1 diff --git a/tests/components/conversation/test_trace.py b/tests/components/conversation/test_trace.py index c586eb8865d..59cd10d2510 100644 --- a/tests/components/conversation/test_trace.py +++ b/tests/components/conversation/test_trace.py @@ -33,7 +33,7 @@ async def test_converation_trace( assert traces last_trace = traces[-1].as_dict() assert last_trace.get("events") - assert len(last_trace.get("events")) == 1 + assert len(last_trace.get("events")) == 2 trace_event = last_trace["events"][0] assert ( trace_event.get("event_type") == trace.ConversationTraceEventType.ASYNC_PROCESS @@ -50,6 +50,16 @@ async def test_converation_trace( == "Added apples" ) + trace_event = last_trace["events"][1] + assert trace_event.get("event_type") == trace.ConversationTraceEventType.TOOL_CALL + assert trace_event.get("data") == { + "intent_name": "HassListAddItem", + "slots": { + "name": "Shopping List", + "item": "apples ", + }, + } + async def test_converation_trace_error( hass: HomeAssistant, diff --git a/tests/components/conversation/test_trigger.py b/tests/components/conversation/test_trigger.py index c5d4382e917..3c3e58e7136 100644 --- a/tests/components/conversation/test_trigger.py +++ b/tests/components/conversation/test_trigger.py @@ -11,16 +11,9 @@ from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.helpers import trigger from homeassistant.setup import async_setup_component -from tests.common import async_mock_service from tests.typing import WebSocketGenerator -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) async def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" @@ -29,7 +22,7 @@ async def setup_comp(hass: HomeAssistant) -> None: async def test_if_fires_on_event( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events.""" assert await async_setup_component( @@ -62,8 +55,10 @@ async def test_if_fires_on_event( assert service_response["response"]["speech"]["plain"]["speech"] == "Done" await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["data"] == { + assert len(service_calls) == 2 + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["data"] == { "alias": None, "id": "0", "idx": "0", @@ -75,7 +70,7 @@ async def test_if_fires_on_event( } -async def test_response(hass: HomeAssistant, setup_comp) -> None: +async def test_response(hass: HomeAssistant) -> None: """Test the conversation response action.""" response = "I'm sorry, Dave. I'm afraid I can't do that" assert await async_setup_component( @@ -106,7 +101,7 @@ async def test_response(hass: HomeAssistant, setup_comp) -> None: assert service_response["response"]["speech"]["plain"]["speech"] == response -async def test_empty_response(hass: HomeAssistant, setup_comp) -> None: +async def test_empty_response(hass: HomeAssistant) -> None: """Test the conversation response action with an empty response.""" assert await async_setup_component( hass, @@ -137,7 +132,7 @@ async def test_empty_response(hass: HomeAssistant, setup_comp) -> None: async def test_response_same_sentence( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the conversation response action with multiple triggers using the same sentence.""" assert await async_setup_component( @@ -186,8 +181,10 @@ async def test_response_same_sentence( assert service_response["response"]["speech"]["plain"]["speech"] == "response 1" # Service should still have been called - assert len(calls) == 1 - assert calls[0].data["data"] == { + assert len(service_calls) == 2 + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["data"] == { "alias": None, "id": "trigger1", "idx": "0", @@ -201,8 +198,6 @@ async def test_response_same_sentence( async def test_response_same_sentence_with_error( hass: HomeAssistant, - calls: list[ServiceCall], - setup_comp: None, caplog: pytest.LogCaptureFixture, ) -> None: """Test the conversation response action with multiple triggers using the same sentence and an error.""" @@ -253,7 +248,7 @@ async def test_response_same_sentence_with_error( async def test_subscribe_trigger_does_not_interfere_with_responses( - hass: HomeAssistant, setup_comp, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test that subscribing to a trigger from the websocket API does not interfere with responses.""" websocket_client = await hass_ws_client() @@ -310,7 +305,7 @@ async def test_subscribe_trigger_does_not_interfere_with_responses( async def test_same_trigger_multiple_sentences( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test matching of multiple sentences from the same trigger.""" assert await async_setup_component( @@ -341,8 +336,10 @@ async def test_same_trigger_multiple_sentences( # Only triggers once await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["data"] == { + assert len(service_calls) == 2 + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["data"] == { "alias": None, "id": "0", "idx": "0", @@ -355,7 +352,7 @@ async def test_same_trigger_multiple_sentences( async def test_same_sentence_multiple_triggers( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test use of the same sentence in multiple triggers.""" assert await async_setup_component( @@ -403,11 +400,12 @@ async def test_same_sentence_multiple_triggers( ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 3 # The calls may come in any order call_datas: set[tuple[str, str, str]] = set() - for call in calls: + service_calls.pop(0) # First call is the call to conversation.process + for call in service_calls: call_data = call.data["data"] call_datas.add((call_data["id"], call_data["platform"], call_data["sentence"])) @@ -474,9 +472,7 @@ async def test_fails_on_no_sentences(hass: HomeAssistant) -> None: ) -async def test_wildcards( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None -) -> None: +async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) -> None: """Test wildcards in trigger sentences.""" assert await async_setup_component( hass, @@ -507,8 +503,10 @@ async def test_wildcards( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["data"] == { + assert len(service_calls) == 2 + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["data"] == { "alias": None, "id": "0", "idx": "0", @@ -536,8 +534,6 @@ async def test_wildcards( async def test_trigger_with_device_id(hass: HomeAssistant) -> None: """Test that a trigger receives a device_id.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) assert await async_setup_component( hass, "automation", diff --git a/tests/components/coolmaster/conftest.py b/tests/components/coolmaster/conftest.py index 15670af4bc8..27a801288b0 100644 --- a/tests/components/coolmaster/conftest.py +++ b/tests/components/coolmaster/conftest.py @@ -18,7 +18,7 @@ DEFAULT_INFO: dict[str, str] = { "version": "1", } -TEST_UNITS: dict[dict[str, Any]] = { +TEST_UNITS: dict[str, dict[str, Any]] = { "L1.100": { "is_on": False, "thermostat": 20, diff --git a/tests/components/cover/test_device_condition.py b/tests/components/cover/test_device_condition.py index 545bdd6587e..8c1d2d1c9a7 100644 --- a/tests/components/cover/test_device_condition.py +++ b/tests/components/cover/test_device_condition.py @@ -26,7 +26,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -36,12 +35,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_condition_types"), [ @@ -359,7 +352,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -473,36 +466,36 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_open - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_open - event - test_event1" hass.states.async_set(entry.entity_id, STATE_CLOSED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_closed - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_closed - event - test_event2" hass.states.async_set(entry.entity_id, STATE_OPENING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_opening - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_opening - event - test_event3" hass.states.async_set(entry.entity_id, STATE_CLOSING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "is_closing - event - test_event4" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "is_closing - event - test_event4" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -550,15 +543,15 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_open - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_open - event - test_event1" async def test_if_position( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, mock_cover_entities: list[MockCover], ) -> None: @@ -676,10 +669,10 @@ async def test_if_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" - assert calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" - assert calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" + assert service_calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" + assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} @@ -690,9 +683,9 @@ async def test_if_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 5 - assert calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" - assert calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" + assert len(service_calls) == 5 + assert service_calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} @@ -701,14 +694,14 @@ async def test_if_position( hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 6 - assert calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" + assert len(service_calls) == 6 + assert service_calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" hass.states.async_set(ent.entity_id, STATE_UNAVAILABLE, attributes={}) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 7 - assert calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert len(service_calls) == 7 + assert service_calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" for record in caplog.records: assert record.levelname in ("DEBUG", "INFO") @@ -718,7 +711,7 @@ async def test_if_tilt_position( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, mock_cover_entities: list[MockCover], ) -> None: @@ -836,10 +829,10 @@ async def test_if_tilt_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" - assert calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" - assert calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" + assert service_calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" + assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} @@ -850,9 +843,9 @@ async def test_if_tilt_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 5 - assert calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" - assert calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" + assert len(service_calls) == 5 + assert service_calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} @@ -863,14 +856,14 @@ async def test_if_tilt_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 6 - assert calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" + assert len(service_calls) == 6 + assert service_calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" hass.states.async_set(ent.entity_id, STATE_UNAVAILABLE, attributes={}) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 7 - assert calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert len(service_calls) == 7 + assert service_calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" for record in caplog.records: assert record.levelname in ("DEBUG", "INFO") diff --git a/tests/components/cover/test_device_trigger.py b/tests/components/cover/test_device_trigger.py index 419eea05f9f..5eb8cd484b2 100644 --- a/tests/components/cover/test_device_trigger.py +++ b/tests/components/cover/test_device_trigger.py @@ -29,7 +29,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -39,12 +38,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_trigger_types"), [ @@ -381,7 +374,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for state triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -496,36 +489,36 @@ async def test_if_fires_on_state_change( # Fake that the entity is opened. hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"opened - device - {entry.entity_id} - closed - open - None" ) # Fake that the entity is closed. hass.states.async_set(entry.entity_id, STATE_CLOSED) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"closed - device - {entry.entity_id} - open - closed - None" ) # Fake that the entity is opening. hass.states.async_set(entry.entity_id, STATE_OPENING) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert ( - calls[2].data["some"] + service_calls[2].data["some"] == f"opening - device - {entry.entity_id} - closed - opening - None" ) # Fake that the entity is closing. hass.states.async_set(entry.entity_id, STATE_CLOSING) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 assert ( - calls[3].data["some"] + service_calls[3].data["some"] == f"closing - device - {entry.entity_id} - opening - closing - None" ) @@ -534,7 +527,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for state triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -583,9 +576,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is opened. hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"opened - device - {entry.entity_id} - closed - open - None" ) @@ -594,7 +587,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -640,17 +633,17 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - closed - open - 0:00:05" ) @@ -660,7 +653,7 @@ async def test_if_fires_on_position( device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mock_cover_entities: list[MockCover], - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for position triggers.""" setup_test_component_platform(hass, DOMAIN, mock_cover_entities) @@ -769,9 +762,13 @@ async def test_if_fires_on_position( ent.entity_id, STATE_OPEN, attributes={"current_position": 50} ) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert sorted( - [calls[0].data["some"], calls[1].data["some"], calls[2].data["some"]] + [ + service_calls[0].data["some"], + service_calls[1].data["some"], + service_calls[2].data["some"], + ] ) == sorted( [ ( @@ -791,9 +788,9 @@ async def test_if_fires_on_position( ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} ) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 assert ( - calls[3].data["some"] + service_calls[3].data["some"] == f"is_pos_lt_90 - device - {entry.entity_id} - closed - closed - None" ) @@ -801,9 +798,9 @@ async def test_if_fires_on_position( ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} ) await hass.async_block_till_done() - assert len(calls) == 5 + assert len(service_calls) == 5 assert ( - calls[4].data["some"] + service_calls[4].data["some"] == f"is_pos_gt_45 - device - {entry.entity_id} - closed - closed - None" ) @@ -812,7 +809,7 @@ async def test_if_fires_on_tilt_position( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_cover_entities: list[MockCover], ) -> None: """Test for tilt position triggers.""" @@ -924,9 +921,13 @@ async def test_if_fires_on_tilt_position( ent.entity_id, STATE_OPEN, attributes={"current_tilt_position": 50} ) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert sorted( - [calls[0].data["some"], calls[1].data["some"], calls[2].data["some"]] + [ + service_calls[0].data["some"], + service_calls[1].data["some"], + service_calls[2].data["some"], + ] ) == sorted( [ ( @@ -946,9 +947,9 @@ async def test_if_fires_on_tilt_position( ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} ) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 assert ( - calls[3].data["some"] + service_calls[3].data["some"] == f"is_pos_lt_90 - device - {entry.entity_id} - closed - closed - None" ) @@ -956,8 +957,8 @@ async def test_if_fires_on_tilt_position( ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} ) await hass.async_block_till_done() - assert len(calls) == 5 + assert len(service_calls) == 5 assert ( - calls[4].data["some"] + service_calls[4].data["some"] == f"is_pos_gt_45 - device - {entry.entity_id} - closed - closed - None" ) diff --git a/tests/components/cover/test_init.py b/tests/components/cover/test_init.py index 7da6c6efe21..37740260c2f 100644 --- a/tests/components/cover/test_init.py +++ b/tests/components/cover/test_init.py @@ -156,7 +156,7 @@ def is_closing(hass, ent): return hass.states.is_state(ent.entity_id, STATE_CLOSING) -def _create_tuples(enum: Enum, constant_prefix: str) -> list[tuple[Enum, str]]: +def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: return [(enum_field, constant_prefix) for enum_field in enum] diff --git a/tests/components/cover/test_intent.py b/tests/components/cover/test_intent.py index 8ee621596db..1cf23c4c3df 100644 --- a/tests/components/cover/test_intent.py +++ b/tests/components/cover/test_intent.py @@ -1,5 +1,9 @@ """The tests for the cover platform.""" +from typing import Any + +import pytest + from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, DOMAIN, @@ -16,15 +20,24 @@ from homeassistant.setup import async_setup_component from tests.common import async_mock_service -async def test_open_cover_intent(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("slots"), + [ + ({"name": {"value": "garage door"}}), + ({"device_class": {"value": "garage"}}), + ], +) +async def test_open_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> None: """Test HassOpenCover intent.""" await cover_intent.async_setup_intents(hass) - hass.states.async_set(f"{DOMAIN}.garage_door", STATE_CLOSED) + hass.states.async_set( + f"{DOMAIN}.garage_door", STATE_CLOSED, attributes={"device_class": "garage"} + ) calls = async_mock_service(hass, DOMAIN, SERVICE_OPEN_COVER) response = await intent.async_handle( - hass, "test", cover_intent.INTENT_OPEN_COVER, {"name": {"value": "garage door"}} + hass, "test", cover_intent.INTENT_OPEN_COVER, slots ) await hass.async_block_till_done() @@ -36,18 +49,27 @@ async def test_open_cover_intent(hass: HomeAssistant) -> None: assert call.data == {"entity_id": f"{DOMAIN}.garage_door"} -async def test_close_cover_intent(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("slots"), + [ + ({"name": {"value": "garage door"}}), + ({"device_class": {"value": "garage"}}), + ], +) +async def test_close_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> None: """Test HassCloseCover intent.""" await cover_intent.async_setup_intents(hass) - hass.states.async_set(f"{DOMAIN}.garage_door", STATE_OPEN) + hass.states.async_set( + f"{DOMAIN}.garage_door", STATE_OPEN, attributes={"device_class": "garage"} + ) calls = async_mock_service(hass, DOMAIN, SERVICE_CLOSE_COVER) response = await intent.async_handle( hass, "test", cover_intent.INTENT_CLOSE_COVER, - {"name": {"value": "garage door"}}, + slots, ) await hass.async_block_till_done() @@ -59,13 +81,22 @@ async def test_close_cover_intent(hass: HomeAssistant) -> None: assert call.data == {"entity_id": f"{DOMAIN}.garage_door"} -async def test_set_cover_position(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("slots"), + [ + ({"name": {"value": "test cover"}, "position": {"value": 50}}), + ({"device_class": {"value": "shade"}, "position": {"value": 50}}), + ], +) +async def test_set_cover_position(hass: HomeAssistant, slots: dict[str, Any]) -> None: """Test HassSetPosition intent for covers.""" assert await async_setup_component(hass, "intent", {}) entity_id = f"{DOMAIN}.test_cover" hass.states.async_set( - entity_id, STATE_CLOSED, attributes={ATTR_CURRENT_POSITION: 0} + entity_id, + STATE_CLOSED, + attributes={ATTR_CURRENT_POSITION: 0, "device_class": "shade"}, ) calls = async_mock_service(hass, DOMAIN, SERVICE_SET_COVER_POSITION) @@ -73,7 +104,7 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: hass, "test", intent.INTENT_SET_POSITION, - {"name": {"value": "test cover"}, "position": {"value": 50}}, + slots, ) await hass.async_block_till_done() diff --git a/tests/components/cpuspeed/conftest.py b/tests/components/cpuspeed/conftest.py index e3ea1432659..d9079079ba2 100644 --- a/tests/components/cpuspeed/conftest.py +++ b/tests/components/cpuspeed/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.cpuspeed.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/crownstone/test_config_flow.py b/tests/components/crownstone/test_config_flow.py index be9086e02da..5dd00e7baff 100644 --- a/tests/components/crownstone/test_config_flow.py +++ b/tests/components/crownstone/test_config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from crownstone_cloud.cloud_models.spheres import Spheres @@ -11,7 +12,6 @@ from crownstone_cloud.exceptions import ( ) import pytest from serial.tools.list_ports_common import ListPortInfo -from typing_extensions import Generator from homeassistant.components import usb from homeassistant.components.crownstone.const import ( diff --git a/tests/components/daikin/test_config_flow.py b/tests/components/daikin/test_config_flow.py index 6d957384d4d..5c432e111dd 100644 --- a/tests/components/daikin/test_config_flow.py +++ b/tests/components/daikin/test_config_flow.py @@ -28,9 +28,11 @@ def mock_daikin(): """Mock the init function in pydaikin.""" return Appliance - with patch("homeassistant.components.daikin.config_flow.Appliance") as Appliance: + with patch( + "homeassistant.components.daikin.config_flow.DaikinFactory" + ) as Appliance: type(Appliance).mac = PropertyMock(return_value="AABBCCDDEEFF") - Appliance.factory.side_effect = mock_daikin_factory + Appliance.side_effect = mock_daikin_factory yield Appliance @@ -90,7 +92,7 @@ async def test_abort_if_already_setup(hass: HomeAssistant, mock_daikin) -> None: ) async def test_device_abort(hass: HomeAssistant, mock_daikin, s_effect, reason) -> None: """Test device abort.""" - mock_daikin.factory.side_effect = s_effect + mock_daikin.side_effect = s_effect result = await hass.config_entries.flow.async_init( "daikin", diff --git a/tests/components/daikin/test_init.py b/tests/components/daikin/test_init.py index d7d754dacd2..b3d18467d33 100644 --- a/tests/components/daikin/test_init.py +++ b/tests/components/daikin/test_init.py @@ -27,8 +27,8 @@ def mock_daikin(): """Mock the init function in pydaikin.""" return Appliance - with patch("homeassistant.components.daikin.Appliance") as Appliance: - Appliance.factory.side_effect = mock_daikin_factory + with patch("homeassistant.components.daikin.DaikinFactory") as Appliance: + Appliance.side_effect = mock_daikin_factory type(Appliance).update_status = AsyncMock() type(Appliance).device_ip = PropertyMock(return_value=HOST) type(Appliance).inside_temperature = PropertyMock(return_value=22) @@ -208,7 +208,7 @@ async def test_client_connection_error(hass: HomeAssistant, mock_daikin) -> None ) config_entry.add_to_hass(hass) - mock_daikin.factory.side_effect = ClientConnectionError + mock_daikin.side_effect = ClientConnectionError await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -224,7 +224,7 @@ async def test_timeout_error(hass: HomeAssistant, mock_daikin) -> None: ) config_entry.add_to_hass(hass) - mock_daikin.factory.side_effect = TimeoutError + mock_daikin.side_effect = TimeoutError await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/deconz/conftest.py b/tests/components/deconz/conftest.py index d0f0f11c99b..fd3003b96ef 100644 --- a/tests/components/deconz/conftest.py +++ b/tests/components/deconz/conftest.py @@ -2,30 +2,304 @@ from __future__ import annotations +from collections.abc import Callable, Coroutine, Generator +from types import MappingProxyType +from typing import Any, Protocol from unittest.mock import patch from pydeconz.websocket import Signal import pytest +from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 +from tests.test_util.aiohttp import AiohttpClientMocker + +type ConfigEntryFactoryType = Callable[ + [MockConfigEntry], Coroutine[Any, Any, MockConfigEntry] +] +type WebsocketDataType = Callable[[dict[str, Any]], Coroutine[Any, Any, None]] +type WebsocketStateType = Callable[[str], Coroutine[Any, Any, None]] -@pytest.fixture(autouse=True) -def mock_deconz_websocket(): +class _WebsocketMock(Protocol): + async def __call__( + self, data: dict[str, Any] | None = None, state: str = "" + ) -> None: ... + + +# Config entry fixtures + +API_KEY = "1234567890ABCDEF" +BRIDGE_ID = "01234E56789A" +HOST = "1.2.3.4" +PORT = 80 + + +@pytest.fixture(name="config_entry") +def fixture_config_entry( + config_entry_data: MappingProxyType[str, Any], + config_entry_options: MappingProxyType[str, Any], + config_entry_source: str, +) -> MockConfigEntry: + """Define a config entry fixture.""" + return MockConfigEntry( + domain=DECONZ_DOMAIN, + entry_id="1", + unique_id=BRIDGE_ID, + data=config_entry_data, + options=config_entry_options, + source=config_entry_source, + ) + + +@pytest.fixture(name="config_entry_data") +def fixture_config_entry_data() -> MappingProxyType[str, Any]: + """Define a config entry data fixture.""" + return { + CONF_API_KEY: API_KEY, + CONF_HOST: HOST, + CONF_PORT: PORT, + } + + +@pytest.fixture(name="config_entry_options") +def fixture_config_entry_options() -> MappingProxyType[str, Any]: + """Define a config entry options fixture.""" + return {} + + +@pytest.fixture(name="config_entry_source") +def fixture_config_entry_source() -> str: + """Define a config entry source fixture.""" + return SOURCE_USER + + +# Request mocks + + +@pytest.fixture(name="mock_put_request") +def fixture_put_request( + aioclient_mock: AiohttpClientMocker, config_entry_data: MappingProxyType[str, Any] +) -> Callable[[str, str], AiohttpClientMocker]: + """Mock a deCONZ put request.""" + _host = config_entry_data[CONF_HOST] + _port = config_entry_data[CONF_PORT] + _api_key = config_entry_data[CONF_API_KEY] + + def __mock_requests(path: str, host: str = "") -> AiohttpClientMocker: + url = f"http://{host or _host}:{_port}/api/{_api_key}{path}" + aioclient_mock.put(url, json={}, headers={"content-type": CONTENT_TYPE_JSON}) + return aioclient_mock + + return __mock_requests + + +@pytest.fixture(name="mock_requests") +def fixture_get_request( + aioclient_mock: AiohttpClientMocker, + config_entry_data: MappingProxyType[str, Any], + config_payload: dict[str, Any], + alarm_system_payload: dict[str, Any], + group_payload: dict[str, Any], + light_payload: dict[str, Any], + sensor_payload: dict[str, Any], + deconz_payload: dict[str, Any], +) -> Callable[[str], None]: + """Mock default deCONZ requests responses.""" + _host = config_entry_data[CONF_HOST] + _port = config_entry_data[CONF_PORT] + _api_key = config_entry_data[CONF_API_KEY] + + data = deconz_payload + data.setdefault("alarmsystems", alarm_system_payload) + data.setdefault("config", config_payload) + data.setdefault("groups", group_payload) + if "state" in light_payload: + light_payload = {"0": light_payload} + data.setdefault("lights", light_payload) + if "state" in sensor_payload or "config" in sensor_payload: + sensor_payload = {"0": sensor_payload} + data.setdefault("sensors", sensor_payload) + + def __mock_requests(host: str = "") -> None: + url = f"http://{host or _host}:{_port}/api/{_api_key}" + aioclient_mock.get( + url, + json=deconz_payload | {"config": config_payload}, + headers={ + "content-type": CONTENT_TYPE_JSON, + }, + ) + + return __mock_requests + + +# Request payload fixtures + + +@pytest.fixture(name="deconz_payload") +def fixture_data() -> dict[str, Any]: + """Combine multiple payloads with one fixture.""" + return {} + + +@pytest.fixture(name="alarm_system_payload") +def fixture_alarm_system_data() -> dict[str, Any]: + """Alarm system data.""" + return {} + + +@pytest.fixture(name="config_payload") +def fixture_config_data() -> dict[str, Any]: + """Config data.""" + return { + "bridgeid": BRIDGE_ID, + "ipaddress": HOST, + "mac": "00:11:22:33:44:55", + "modelid": "deCONZ", + "name": "deCONZ mock gateway", + "sw_version": "2.05.69", + "uuid": "1234", + "websocketport": 1234, + } + + +@pytest.fixture(name="group_payload") +def fixture_group_data() -> dict[str, Any]: + """Group data.""" + return {} + + +@pytest.fixture(name="light_payload") +def fixture_light_data() -> dict[str, Any]: + """Light data. + + Should be + - one light data payload {"state": ...} + - multiple lights {"1": ..., "2": ...} + """ + return {} + + +@pytest.fixture(name="sensor_payload") +def fixture_sensor_data() -> dict[str, Any]: + """Sensor data. + + Should be + - one sensor data payload {"config": ..., "state": ...} ("0") + - multiple sensors {"1": ..., "2": ...} + """ + return {} + + +@pytest.fixture(name="config_entry_factory") +async def fixture_config_entry_factory( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_requests: Callable[[str], None], +) -> ConfigEntryFactoryType: + """Fixture factory that can set up UniFi network integration.""" + + async def __mock_setup_config_entry( + entry: MockConfigEntry = config_entry, + ) -> MockConfigEntry: + entry.add_to_hass(hass) + mock_requests(entry.data[CONF_HOST]) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return entry + + return __mock_setup_config_entry + + +@pytest.fixture(name="config_entry_setup") +async def fixture_config_entry_setup( + config_entry_factory: ConfigEntryFactoryType, +) -> MockConfigEntry: + """Fixture providing a set up instance of deCONZ integration.""" + return await config_entry_factory() + + +# Websocket fixtures + + +@pytest.fixture(autouse=True, name="_mock_websocket") +def fixture_websocket() -> Generator[_WebsocketMock]: """No real websocket allowed.""" with patch("pydeconz.gateway.WSClient") as mock: - async def make_websocket_call(data: dict | None = None, state: str = ""): + async def make_websocket_call( + data: dict[str, Any] | None = None, state: str = "" + ) -> None: """Generate a websocket call.""" pydeconz_gateway_session_handler = mock.call_args[0][3] + signal: Signal if data: mock.return_value.data = data - await pydeconz_gateway_session_handler(signal=Signal.DATA) + signal = Signal.DATA elif state: mock.return_value.state = state - await pydeconz_gateway_session_handler(signal=Signal.CONNECTION_STATE) - else: - raise NotImplementedError + signal = Signal.CONNECTION_STATE + await pydeconz_gateway_session_handler(signal) yield make_websocket_call + + +@pytest.fixture(name="mock_websocket_data") +def fixture_websocket_data(_mock_websocket: _WebsocketMock) -> WebsocketDataType: + """Fixture to send websocket data.""" + + async def change_websocket_data(data: dict[str, Any]) -> None: + """Provide new data on the websocket.""" + if "t" not in data: + data["t"] = "event" + if "e" not in data: + data["e"] = "changed" + if "id" not in data: + data["id"] = "0" + await _mock_websocket(data=data) + + return change_websocket_data + + +@pytest.fixture(name="light_ws_data") +def fixture_light_websocket_data( + mock_websocket_data: WebsocketDataType, +) -> WebsocketDataType: + """Fixture to send light data over websocket.""" + + async def send_light_data(data: dict[str, Any]) -> None: + """Send light data on the websocket.""" + await mock_websocket_data({"r": "lights"} | data) + + return send_light_data + + +@pytest.fixture(name="sensor_ws_data") +def fixture_sensor_websocket_data( + mock_websocket_data: WebsocketDataType, +) -> WebsocketDataType: + """Fixture to send sensor data over websocket.""" + + async def send_sensor_data(data: dict[str, Any]) -> None: + """Send sensor data on the websocket.""" + await mock_websocket_data({"r": "sensors"} | data) + + return send_sensor_data + + +@pytest.fixture(name="mock_websocket_state") +def fixture_websocket_state(_mock_websocket: _WebsocketMock) -> WebsocketStateType: + """Fixture to set websocket state.""" + + async def change_websocket_state(state: str) -> None: + """Simulate a change to the websocket connection state.""" + await _mock_websocket(state=state) + + return change_websocket_state diff --git a/tests/components/deconz/snapshots/test_alarm_control_panel.ambr b/tests/components/deconz/snapshots/test_alarm_control_panel.ambr new file mode 100644 index 00000000000..86b97a62dfe --- /dev/null +++ b/tests/components/deconz/snapshots/test_alarm_control_panel.ambr @@ -0,0 +1,51 @@ +# serializer version: 1 +# name: test_alarm_control_panel[sensor_payload0-alarm_system_payload0][alarm_control_panel.keypad-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.keypad', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Keypad', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[sensor_payload0-alarm_system_payload0][alarm_control_panel.keypad-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': True, + 'code_format': , + 'friendly_name': 'Keypad', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.keypad', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_binary_sensor.ambr b/tests/components/deconz/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..584575c23af --- /dev/null +++ b/tests/components/deconz/snapshots/test_binary_sensor.ambr @@ -0,0 +1,1014 @@ +# serializer version: 1 +# name: test_binary_sensors[sensor_payload0-expected0-config_entry_options0][binary_sensor.alarm_10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.alarm_10', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm 10', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload0-expected0-config_entry_options0][binary_sensor.alarm_10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'safety', + 'friendly_name': 'Alarm 10', + 'on': True, + 'temperature': 26.0, + }), + 'context': , + 'entity_id': 'binary_sensor.alarm_10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.cave_co', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cave CO', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-carbon_monoxide', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_monoxide', + 'friendly_name': 'Cave CO', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.cave_co', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.cave_co_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cave CO Low Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Cave CO Low Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.cave_co_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_tampered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.cave_co_tampered', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cave CO Tampered', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-tampered', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_tampered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Cave CO Tampered', + }), + 'context': , + 'entity_id': 'binary_sensor.cave_co_tampered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.presence_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'motion', + 'friendly_name': 'Presence sensor', + 'on': True, + 'temperature': 0.1, + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.presence_sensor_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor Low Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Presence sensor Low Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_tampered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.presence_sensor_tampered', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor Tampered', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-tampered', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_tampered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Presence sensor Tampered', + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor_tampered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.sensor_kitchen_smoke', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'sensor_kitchen_smoke', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-fire', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'sensor_kitchen_smoke', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'sensor_kitchen_smoke Test Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-in_test_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'sensor_kitchen_smoke Test Mode', + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.sensor_kitchen_smoke', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'sensor_kitchen_smoke', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-fire', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'sensor_kitchen_smoke', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'sensor_kitchen_smoke Test Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-in_test_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'sensor_kitchen_smoke Test Mode', + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload4-expected4-config_entry_options0][binary_sensor.kitchen_switch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.kitchen_switch', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Kitchen Switch', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'kitchen-switch-flag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload4-expected4-config_entry_options0][binary_sensor.kitchen_switch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kitchen Switch', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_switch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[sensor_payload5-expected5-config_entry_options0][binary_sensor.back_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.back_door', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Back Door', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:2b:96:b4-01-0006-open', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload5-expected5-config_entry_options0][binary_sensor.back_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Back Door', + 'on': True, + 'temperature': 33.0, + }), + 'context': , + 'entity_id': 'binary_sensor.back_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload6-expected6-config_entry_options0][binary_sensor.motion_sensor_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.motion_sensor_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion sensor 4', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:17:88:01:03:28:8c:9b-02-0406-presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload6-expected6-config_entry_options0][binary_sensor.motion_sensor_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'motion', + 'friendly_name': 'Motion sensor 4', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.motion_sensor_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.water2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'water2', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:2f:07:db-01-0500-water', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'water2', + 'on': True, + 'temperature': 25.0, + }), + 'context': , + 'entity_id': 'binary_sensor.water2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.water2_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'water2 Low Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:2f:07:db-01-0500-low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'water2 Low Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.water2_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_tampered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.water2_tampered', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'water2 Tampered', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:2f:07:db-01-0500-tampered', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_tampered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'water2 Tampered', + }), + 'context': , + 'entity_id': 'binary_sensor.water2_tampered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload8-expected8-config_entry_options0][binary_sensor.vibration_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.vibration_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Vibration 1', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-vibration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload8-expected8-config_entry_options0][binary_sensor.vibration_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'vibration', + 'friendly_name': 'Vibration 1', + 'on': True, + 'orientation': list([ + 10, + 1059, + 0, + ]), + 'temperature': 32.0, + 'tiltangle': 83, + 'vibrationstrength': 114, + }), + 'context': , + 'entity_id': 'binary_sensor.vibration_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.presence_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'motion', + 'friendly_name': 'Presence sensor', + 'on': True, + 'temperature': 0.1, + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.presence_sensor_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor Low Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Presence sensor Low Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_tampered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.presence_sensor_tampered', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor Tampered', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-tampered', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_tampered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Presence sensor Tampered', + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor_tampered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_button.ambr b/tests/components/deconz/snapshots/test_button.ambr new file mode 100644 index 00000000000..1ef5248ebc3 --- /dev/null +++ b/tests/components/deconz/snapshots/test_button.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_button[deconz_payload0-expected0][button.light_group_scene_store_current_scene-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.light_group_scene_store_current_scene', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:inbox-arrow-down', + 'original_name': 'Scene Store Current Scene', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01234E56789A/groups/1/scenes/1-store', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[deconz_payload0-expected0][button.light_group_scene_store_current_scene-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Light group Scene Store Current Scene', + 'icon': 'mdi:inbox-arrow-down', + }), + 'context': , + 'entity_id': 'button.light_group_scene_store_current_scene', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[deconz_payload1-expected1][button.aqara_fp1_reset_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.aqara_fp1_reset_presence', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Aqara FP1 Reset Presence', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-reset_presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[deconz_payload1-expected1][button.aqara_fp1_reset_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Aqara FP1 Reset Presence', + }), + 'context': , + 'entity_id': 'button.aqara_fp1_reset_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_climate.ambr b/tests/components/deconz/snapshots/test_climate.ambr new file mode 100644 index 00000000000..4e33e11534e --- /dev/null +++ b/tests/components/deconz/snapshots/test_climate.ambr @@ -0,0 +1,545 @@ +# serializer version: 1 +# name: test_climate_device_with_cooling_support[sensor_payload0][climate.zen_01-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.zen_01', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Zen-01', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:24:46:00:00:11:6f:56-01-0201', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_device_with_cooling_support[sensor_payload0][climate.zen_01-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 23.2, + 'fan_mode': 'off', + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'friendly_name': 'Zen-01', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 0, + 'supported_features': , + 'temperature': 22.2, + }), + 'context': , + 'entity_id': 'climate.zen_01', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_device_with_fan_support[sensor_payload0][climate.zen_01-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.zen_01', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Zen-01', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:24:46:00:00:11:6f:56-01-0201', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_device_with_fan_support[sensor_payload0][climate.zen_01-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 23.2, + 'fan_mode': 'auto', + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'friendly_name': 'Zen-01', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 0, + 'supported_features': , + 'temperature': 22.2, + }), + 'context': , + 'entity_id': 'climate.zen_01', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_device_with_preset[sensor_payload0][climate.zen_01-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'preset_modes': list([ + 'auto', + 'boost', + 'comfort', + 'complex', + 'eco', + 'holiday', + 'manual', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.zen_01', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Zen-01', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:24:46:00:00:11:6f:56-01-0201', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_device_with_preset[sensor_payload0][climate.zen_01-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 23.2, + 'fan_mode': 'off', + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'friendly_name': 'Zen-01', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 0, + 'preset_mode': 'auto', + 'preset_modes': list([ + 'auto', + 'boost', + 'comfort', + 'complex', + 'eco', + 'holiday', + 'manual', + ]), + 'supported_features': , + 'temperature': 22.2, + }), + 'context': , + 'entity_id': 'climate.zen_01', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_device_without_cooling_support[sensor_payload0][climate.thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.thermostat', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_device_without_cooling_support[sensor_payload0][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.6, + 'friendly_name': 'Thermostat', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 10, + 'supported_features': , + 'temperature': 22.0, + 'valve': 30, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'auto', + }) +# --- +# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.clip_thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.clip_thermostat', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CLIP thermostat', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.clip_thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.6, + 'friendly_name': 'CLIP thermostat', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'supported_features': , + 'temperature': None, + 'valve': 30, + }), + 'context': , + 'entity_id': 'climate.clip_thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.thermostat', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.6, + 'friendly_name': 'Thermostat', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 10, + 'supported_features': , + 'temperature': 22.0, + 'valve': 30, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'auto', + }) +# --- +# name: test_simple_climate_device[sensor_payload0][climate.thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.thermostat', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'thermostat', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '14:b4:57:ff:fe:d5:4e:77-01-0201', + 'unit_of_measurement': None, + }) +# --- +# name: test_simple_climate_device[sensor_payload0][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.0, + 'friendly_name': 'thermostat', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'locked': True, + 'max_temp': 35, + 'min_temp': 7, + 'offset': 0, + 'supported_features': , + 'temperature': 21.0, + 'valve': 24, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_cover.ambr b/tests/components/deconz/snapshots/test_cover.ambr new file mode 100644 index 00000000000..5c50923453c --- /dev/null +++ b/tests/components/deconz/snapshots/test_cover.ambr @@ -0,0 +1,150 @@ +# serializer version: 1 +# name: test_cover[light_payload0][cover.window_covering_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.window_covering_device', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Window covering device', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[light_payload0][cover.window_covering_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 0, + 'device_class': 'shade', + 'friendly_name': 'Window covering device', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.window_covering_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_level_controllable_output_cover[light_payload0][cover.vent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.vent', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Vent', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:22:a3:00:00:00:00:00-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_level_controllable_output_cover[light_payload0][cover.vent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 5, + 'current_tilt_position': 97, + 'device_class': 'damper', + 'friendly_name': 'Vent', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.vent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_tilt_cover[light_payload0][cover.covering_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.covering_device', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Covering device', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:24:46:00:00:12:34:56-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_tilt_cover[light_payload0][cover.covering_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 100, + 'current_tilt_position': 100, + 'device_class': 'shade', + 'friendly_name': 'Covering device', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.covering_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_fan.ambr b/tests/components/deconz/snapshots/test_fan.ambr new file mode 100644 index 00000000000..8b7dbba64e4 --- /dev/null +++ b/tests/components/deconz/snapshots/test_fan.ambr @@ -0,0 +1,54 @@ +# serializer version: 1 +# name: test_fans[light_payload0][fan.ceiling_fan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': None, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.ceiling_fan', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ceiling fan', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:22:a3:00:00:27:8b:81-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[light_payload0][fan.ceiling_fan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ceiling fan', + 'percentage': 100, + 'percentage_step': 1.0, + 'preset_mode': None, + 'preset_modes': None, + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.ceiling_fan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_hub.ambr b/tests/components/deconz/snapshots/test_hub.ambr new file mode 100644 index 00000000000..f3aa9a5e65d --- /dev/null +++ b/tests/components/deconz/snapshots/test_hub.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_registry_entry + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://1.2.3.4:80', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'deconz', + '01234E56789A', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Dresden Elektronik', + 'model': 'deCONZ', + 'model_id': None, + 'name': 'deCONZ mock gateway', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/deconz/snapshots/test_light.ambr b/tests/components/deconz/snapshots/test_light.ambr new file mode 100644 index 00000000000..b5a9f7b5543 --- /dev/null +++ b/tests/components/deconz/snapshots/test_light.ambr @@ -0,0 +1,1486 @@ +# serializer version: 1 +# name: test_groups[input0-light_payload0][light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimmable light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-light_payload0][light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-light_payload0][light.group-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.group', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01234E56789A-/groups/0', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-light_payload0][light.group-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_on': False, + 'brightness': 255, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Group', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': True, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.group', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-light_payload0][light.rgb_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.rgb_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RGB light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-light_payload0][light.rgb_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 50, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'RGB light', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.rgb_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-light_payload0][light.tunable_white_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tunable_white_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tunable white light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-light_payload0][light.tunable_white_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'friendly_name': 'Tunable white light', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.tunable_white_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-light_payload0][light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimmable light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-light_payload0][light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-light_payload0][light.group-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.group', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01234E56789A-/groups/0', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-light_payload0][light.group-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_on': False, + 'brightness': 50, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Group', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': True, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.group', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-light_payload0][light.rgb_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.rgb_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RGB light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-light_payload0][light.rgb_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 50, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'RGB light', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.rgb_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-light_payload0][light.tunable_white_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tunable_white_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tunable white light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-light_payload0][light.tunable_white_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'friendly_name': 'Tunable white light', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.tunable_white_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-light_payload0][light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimmable light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-light_payload0][light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-light_payload0][light.group-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.group', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01234E56789A-/groups/0', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-light_payload0][light.group-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_on': False, + 'brightness': 50, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Group', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': True, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.group', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-light_payload0][light.rgb_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.rgb_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RGB light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-light_payload0][light.rgb_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 50, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'RGB light', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.rgb_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-light_payload0][light.tunable_white_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tunable_white_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tunable white light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-light_payload0][light.tunable_white_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'friendly_name': 'Tunable white light', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.tunable_white_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload0][light.hue_go-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_go', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue Go', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload0][light.hue_go-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'color_temp': 375, + 'color_temp_kelvin': 2666, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Hue Go', + 'hs_color': tuple( + 28.47, + 66.821, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 165, + 84, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.53, + 0.388, + ), + }), + 'context': , + 'entity_id': 'light.hue_go', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload1][light.hue_ensis-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 7142, + 'max_mireds': 650, + 'min_color_temp_kelvin': 1538, + 'min_mireds': 140, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_ensis', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue Ensis', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload1][light.hue_ensis-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Hue Ensis', + 'hs_color': tuple( + 29.691, + 38.039, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 7142, + 'max_mireds': 650, + 'min_color_temp_kelvin': 1538, + 'min_mireds': 140, + 'rgb_color': tuple( + 255, + 206, + 158, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.427, + 0.373, + ), + }), + 'context': , + 'entity_id': 'light.hue_ensis', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload2][light.lidl_xmas_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'carnival', + 'collide', + 'fading', + 'fireworks', + 'flag', + 'glow', + 'rainbow', + 'snake', + 'snow', + 'sparkles', + 'steady', + 'strobe', + 'twinkle', + 'updown', + 'vintage', + 'waves', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.lidl_xmas_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LIDL xmas light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '58:8e:81:ff:fe:db:7b:be-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload2][light.lidl_xmas_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 25, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'carnival', + 'collide', + 'fading', + 'fireworks', + 'flag', + 'glow', + 'rainbow', + 'snake', + 'snow', + 'sparkles', + 'steady', + 'strobe', + 'twinkle', + 'updown', + 'vintage', + 'waves', + ]), + 'friendly_name': 'LIDL xmas light', + 'hs_color': tuple( + 294.938, + 55.294, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 243, + 113, + 255, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.357, + 0.188, + ), + }), + 'context': , + 'entity_id': 'light.lidl_xmas_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload3][light.hue_white_ambiance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_white_ambiance', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue White Ambiance', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-02', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload3][light.hue_white_ambiance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'color_temp': 396, + 'color_temp_kelvin': 2525, + 'friendly_name': 'Hue White Ambiance', + 'hs_color': tuple( + 28.809, + 71.624, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 160, + 72, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.544, + 0.389, + ), + }), + 'context': , + 'entity_id': 'light.hue_white_ambiance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload4][light.hue_filament-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_filament', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue Filament', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-03', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload4][light.hue_filament-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'friendly_name': 'Hue Filament', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.hue_filament', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload5][light.simple_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.simple_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Simple Light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:23:45:67-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload5][light.simple_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'Simple Light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.simple_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload6][light.gradient_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.gradient_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Gradient light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:0b:0c:0d:0e-0f', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload6][light.gradient_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 184, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Gradient light', + 'hs_color': tuple( + 98.095, + 74.118, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 135, + 255, + 66, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.2727, + 0.6226, + ), + }), + 'context': , + 'entity_id': 'light.gradient_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_number.ambr b/tests/components/deconz/snapshots/test_number.ambr new file mode 100644 index 00000000000..26e044e1d31 --- /dev/null +++ b/tests/components/deconz/snapshots/test_number.ambr @@ -0,0 +1,111 @@ +# serializer version: 1 +# name: test_number_entities[sensor_payload0-expected0][number.presence_sensor_delay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65535, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.presence_sensor_delay', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Presence sensor Delay', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-delay', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_entities[sensor_payload0-expected0][number.presence_sensor_delay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Presence sensor Delay', + 'max': 65535, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.presence_sensor_delay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_number_entities[sensor_payload1-expected1][number.presence_sensor_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65535, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.presence_sensor_duration', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Presence sensor Duration', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-duration', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_entities[sensor_payload1-expected1][number.presence_sensor_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Presence sensor Duration', + 'max': 65535, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.presence_sensor_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_scene.ambr b/tests/components/deconz/snapshots/test_scene.ambr new file mode 100644 index 00000000000..85a5ab92c5c --- /dev/null +++ b/tests/components/deconz/snapshots/test_scene.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_scenes[group_payload0-expected0][scene.light_group_scene-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'scene', + 'entity_category': None, + 'entity_id': 'scene.light_group_scene', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Scene', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01234E56789A/groups/1/scenes/1', + 'unit_of_measurement': None, + }) +# --- +# name: test_scenes[group_payload0-expected0][scene.light_group_scene-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Light group Scene', + }), + 'context': , + 'entity_id': 'scene.light_group_scene', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_select.ambr b/tests/components/deconz/snapshots/test_select.ambr new file mode 100644 index 00000000000..12966709947 --- /dev/null +++ b/tests/components/deconz/snapshots/test_select.ambr @@ -0,0 +1,508 @@ +# serializer version: 1 +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_device_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Device Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_device_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Device Mode', + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'undirected', + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Sensitivity', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Sensitivity', + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'High', + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_trigger_distance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Trigger Distance', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_trigger_distance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Trigger Distance', + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_device_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Device Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_device_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Device Mode', + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'undirected', + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Sensitivity', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Sensitivity', + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'High', + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_trigger_distance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Trigger Distance', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_trigger_distance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Trigger Distance', + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_device_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Device Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_device_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Device Mode', + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'undirected', + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Sensitivity', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Sensitivity', + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'High', + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_trigger_distance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Trigger Distance', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_trigger_distance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Trigger Distance', + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_sensor.ambr b/tests/components/deconz/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..dd097ea1c9a --- /dev/null +++ b/tests/components/deconz/snapshots/test_sensor.ambr @@ -0,0 +1,2201 @@ +# serializer version: 1 +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_flur-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.clip_flur', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CLIP Flur', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '/sensors/3-status', + 'unit_of_measurement': None, + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_flur-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'CLIP Flur', + 'on': True, + }), + 'context': , + 'entity_id': 'sensor.clip_flur', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_light_level_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.clip_light_level_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CLIP light level sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00-light_level', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_light_level_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'illuminance', + 'friendly_name': 'CLIP light level sensor', + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.clip_light_level_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '999.8', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.light_level_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light level sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-light_level', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'illuminance', + 'friendly_name': 'Light level sensor', + 'on': True, + 'state_class': , + 'temperature': 0.1, + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.light_level_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '999.8', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.light_level_sensor_temperature', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light level sensor Temperature', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-internal_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Light level sensor Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.light_level_sensor_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bosch_air_quality_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'BOSCH Air quality sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BOSCH Air quality sensor', + }), + 'context': , + 'entity_id': 'sensor.bosch_air_quality_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'poor', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'BOSCH Air quality sensor PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BOSCH Air quality sensor PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '809', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bosch_air_quality_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'BOSCH Air quality sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BOSCH Air quality sensor', + }), + 'context': , + 'entity_id': 'sensor.bosch_air_quality_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'poor', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'BOSCH Air quality sensor PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BOSCH Air quality sensor PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '809', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload10-expected10][sensor.fsm_state_motion_stair-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.fsm_state_motion_stair', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'FSM_STATE Motion stair', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'fsm-state-1520195376277-status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload10-expected10][sensor.fsm_state_motion_stair-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'FSM_STATE Motion stair', + 'on': True, + }), + 'context': , + 'entity_id': 'sensor.fsm_state_motion_stair', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mi_temperature_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0405-humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Mi temperature 1', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.55', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0405-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Mi temperature 1 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.soil_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soil Sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a4:c1:38:fe:86:8f:07:a3-01-0408-moisture', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Soil Sensor', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.soil_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '72.13', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.soil_sensor_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soil Sensor Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a4:c1:38:fe:86:8f:07:a3-01-0408-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Soil Sensor Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.soil_sensor_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.motion_sensor_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion sensor 4', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:17:88:01:03:28:8c:9b-02-0400-light_level', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': True, + 'daylight': False, + 'device_class': 'illuminance', + 'friendly_name': 'Motion sensor 4', + 'on': True, + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.motion_sensor_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.0', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.motion_sensor_4_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion sensor 4 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:17:88:01:03:28:8c:9b-02-0400-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': True, + 'daylight': False, + 'device_class': 'battery', + 'friendly_name': 'Motion sensor 4 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.motion_sensor_4_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload14-expected14][sensor.starkvind_airpurifier_pm25-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.starkvind_airpurifier_pm25', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'STARKVIND AirPurifier PM25', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-042a-particulate_matter_pm2_5', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload14-expected14][sensor.starkvind_airpurifier_pm25-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'STARKVIND AirPurifier PM25', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.starkvind_airpurifier_pm25', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload15-expected15][sensor.power_16-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.power_16', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power 16', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:0d:6f:00:0b:7a:64:29-01-0b04-power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload15-expected15][sensor.power_16-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current': 34, + 'device_class': 'power', + 'friendly_name': 'Power 16', + 'on': True, + 'state_class': , + 'unit_of_measurement': , + 'voltage': 231, + }), + 'context': , + 'entity_id': 'sensor.power_16', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '64', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mi_temperature_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0403-pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Mi temperature 1', + 'on': True, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1010', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0403-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Mi temperature 1 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mi_temperature_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0402-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mi temperature 1', + 'on': True, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.82', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0402-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Mi temperature 1 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.etrv_sejour', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'eTRV Séjour', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cc:cc:cc:ff:fe:38:4d:b3-01-000a-last_set', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'eTRV Séjour', + }), + 'context': , + 'entity_id': 'sensor.etrv_sejour', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020-11-19T08:07:08+00:00', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.etrv_sejour_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'eTRV Séjour Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cc:cc:cc:ff:fe:38:4d:b3-01-000a-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'eTRV Séjour Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.etrv_sejour_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.alarm_10_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm 10 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Alarm 10 Battery', + 'on': True, + 'state_class': , + 'temperature': 26.0, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.alarm_10_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.alarm_10_temperature', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm 10 Temperature', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-internal_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Alarm 10 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.alarm_10_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26.0', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ch2o-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ch2o', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CH2O', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ch2o-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds', + 'friendly_name': 'AirQuality 1 CH2O', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ch2o', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_co2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_co2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CO2', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_co2', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_co2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'AirQuality 1 CO2', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_co2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '359', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_pm25-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_pm25', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 PM25', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_pm25-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'AirQuality 1 PM25', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_pm25', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AirQuality 1 PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirQuality 1 PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload20-expected20][sensor.dimmer_switch_3_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dimmer_switch_3_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Dimmer switch 3 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:17:88:01:02:0e:32:a3-02-fc00-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload20-expected20][sensor.dimmer_switch_3_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'event_id': 'dimmer_switch_3', + 'friendly_name': 'Dimmer switch 3 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dimmer_switch_3_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '90', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ch2o-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ch2o', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CH2O', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ch2o-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds', + 'friendly_name': 'AirQuality 1 CH2O', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ch2o', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_co2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_co2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CO2', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_co2', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_co2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'AirQuality 1 CO2', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_co2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '359', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_pm25-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_pm25', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 PM25', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_pm25-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'AirQuality 1 PM25', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_pm25', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AirQuality 1 PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirQuality 1 PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ch2o-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ch2o', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CH2O', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ch2o-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds', + 'friendly_name': 'AirQuality 1 CH2O', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ch2o', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_co2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_co2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CO2', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_co2', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_co2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'AirQuality 1 CO2', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_co2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '359', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_pm25-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_pm25', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 PM25', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_pm25-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'AirQuality 1 PM25', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_pm25', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AirQuality 1 PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirQuality 1 PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload5-expected5][sensor.fyrtur_block_out_roller_blind_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fyrtur_block_out_roller_blind_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'FYRTUR block-out roller blind Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:0d:6f:ff:fe:01:23:45-01-0001-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload5-expected5][sensor.fyrtur_block_out_roller_blind_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'FYRTUR block-out roller blind Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fyrtur_block_out_roller_blind_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload6-expected6][sensor.carbondioxide_35-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.carbondioxide_35', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CarbonDioxide 35', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-040d-carbon_dioxide', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload6-expected6][sensor.carbondioxide_35-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'CarbonDioxide 35', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.carbondioxide_35', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '370', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload7-expected7][sensor.consumption_15-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.consumption_15', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption 15', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:0d:6f:00:0b:7a:64:29-01-0702-consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload7-expected7][sensor.consumption_15-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Consumption 15', + 'on': True, + 'power': 123, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.consumption_15', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.342', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload8-expected8][sensor.daylight-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.daylight', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:white-balance-sunny', + 'original_name': 'Daylight', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01:23:4E:FF:FF:56:78:9A-01-daylight_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload8-expected8][sensor.daylight-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'daylight': True, + 'friendly_name': 'Daylight', + 'icon': 'mdi:white-balance-sunny', + 'on': True, + }), + 'context': , + 'entity_id': 'sensor.daylight', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'solar_noon', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload9-expected9][sensor.formaldehyde_34-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.formaldehyde_34', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Formaldehyde 34', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-042b-formaldehyde', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload9-expected9][sensor.formaldehyde_34-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds', + 'friendly_name': 'Formaldehyde 34', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.formaldehyde_34', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- diff --git a/tests/components/deconz/test_alarm_control_panel.py b/tests/components/deconz/test_alarm_control_panel.py index c855076de2f..6c47146f9b0 100644 --- a/tests/components/deconz/test_alarm_control_panel.py +++ b/tests/components/deconz/test_alarm_control_panel.py @@ -1,8 +1,11 @@ """deCONZ alarm control panel platform tests.""" +from collections.abc import Callable from unittest.mock import patch from pydeconz.models.sensor.ancillary_control import AncillaryControlPanel +import pytest +from syrupy import SnapshotAssertion from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, @@ -21,34 +24,21 @@ from homeassistant.const import ( STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED, - STATE_UNAVAILABLE, - STATE_UNKNOWN, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no climate entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - -async def test_alarm_control_panel( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test successful creation of alarm control panel entities.""" - data = { - "alarmsystems": { +@pytest.mark.parametrize( + "alarm_system_payload", + [ + { "0": { "name": "default", "config": { @@ -75,230 +65,95 @@ async def test_alarm_control_panel( }, }, } - }, - "sensors": { - "0": { - "config": { - "battery": 95, - "enrolled": 1, - "on": True, - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "5aaa1c6bae8501f59929539c6e8f44d6", - "lastseen": "2021-07-25T18:07Z", - "manufacturername": "lk", - "modelid": "ZB-KeypadGeneric-D0002", - "name": "Keypad", - "state": { - "action": "armed_stay", - "lastupdated": "2021-07-25T18:02:51.172", - "lowbattery": False, - "panel": "none", - "seconds_remaining": 55, - "tampered": False, - }, - "swversion": "3.13", - "type": "ZHAAncillaryControl", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 4 - assert hass.states.get("alarm_control_panel.keypad").state == STATE_UNKNOWN - - # Event signals alarm control panel armed away - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.ARMED_AWAY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMED_AWAY - - # Event signals alarm control panel armed night - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.ARMED_NIGHT}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert ( - hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMED_NIGHT - ) - - # Event signals alarm control panel armed home - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.ARMED_STAY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMED_HOME - - # Event signals alarm control panel disarmed - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.DISARMED}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_DISARMED - - # Event signals alarm control panel arming - - for arming_event in ( - AncillaryControlPanel.ARMING_AWAY, - AncillaryControlPanel.ARMING_NIGHT, - AncillaryControlPanel.ARMING_STAY, - ): - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": arming_event}, } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMING - - # Event signals alarm control panel pending - - for pending_event in ( - AncillaryControlPanel.ENTRY_DELAY, - AncillaryControlPanel.EXIT_DELAY, - ): - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": pending_event}, + ], +) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 95, + "enrolled": 1, + "on": True, + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "5aaa1c6bae8501f59929539c6e8f44d6", + "lastseen": "2021-07-25T18:07Z", + "manufacturername": "lk", + "modelid": "ZB-KeypadGeneric-D0002", + "name": "Keypad", + "state": { + "action": "armed_stay", + "lastupdated": "2021-07-25T18:02:51.172", + "lowbattery": False, + "panel": "none", + "seconds_remaining": 55, + "tampered": False, + }, + "swversion": "3.13", + "type": "ZHAAncillaryControl", + "uniqueid": "00:00:00:00:00:00:00:00-00", } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + ], +) +async def test_alarm_control_panel( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, +) -> None: + """Test successful creation of alarm control panel entities.""" + with patch( + "homeassistant.components.deconz.PLATFORMS", [Platform.ALARM_CONTROL_PANEL] + ): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - assert ( - hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_PENDING - ) - - # Event signals alarm control panel triggered - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.IN_ALARM}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_TRIGGERED - - # Event signals alarm control panel unknown state keeps previous state - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.NOT_READY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_TRIGGERED + for action, state in ( + # Event signals alarm control panel armed state + (AncillaryControlPanel.ARMED_AWAY, STATE_ALARM_ARMED_AWAY), + (AncillaryControlPanel.ARMED_NIGHT, STATE_ALARM_ARMED_NIGHT), + (AncillaryControlPanel.ARMED_STAY, STATE_ALARM_ARMED_HOME), + (AncillaryControlPanel.DISARMED, STATE_ALARM_DISARMED), + # Event signals alarm control panel arming state + (AncillaryControlPanel.ARMING_AWAY, STATE_ALARM_ARMING), + (AncillaryControlPanel.ARMING_NIGHT, STATE_ALARM_ARMING), + (AncillaryControlPanel.ARMING_STAY, STATE_ALARM_ARMING), + # Event signals alarm control panel pending state + (AncillaryControlPanel.ENTRY_DELAY, STATE_ALARM_PENDING), + (AncillaryControlPanel.EXIT_DELAY, STATE_ALARM_PENDING), + # Event signals alarm control panel triggered state + (AncillaryControlPanel.IN_ALARM, STATE_ALARM_TRIGGERED), + # Event signals alarm control panel unknown state keeps previous state + (AncillaryControlPanel.NOT_READY, STATE_ALARM_TRIGGERED), + ): + await sensor_ws_data({"state": {"panel": action}}) + assert hass.states.get("alarm_control_panel.keypad").state == state # Verify service calls - # Service set alarm to away mode - - mock_deconz_put_request( - aioclient_mock, config_entry.data, "/alarmsystems/0/arm_away" - ) - - await hass.services.async_call( - ALARM_CONTROL_PANEL_DOMAIN, - SERVICE_ALARM_ARM_AWAY, - {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "1234"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[1][2] == {"code0": "1234"} - - # Service set alarm to home mode - - mock_deconz_put_request( - aioclient_mock, config_entry.data, "/alarmsystems/0/arm_stay" - ) - - await hass.services.async_call( - ALARM_CONTROL_PANEL_DOMAIN, - SERVICE_ALARM_ARM_HOME, - {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "2345"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[2][2] == {"code0": "2345"} - - # Service set alarm to night mode - - mock_deconz_put_request( - aioclient_mock, config_entry.data, "/alarmsystems/0/arm_night" - ) - - await hass.services.async_call( - ALARM_CONTROL_PANEL_DOMAIN, - SERVICE_ALARM_ARM_NIGHT, - {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "3456"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[3][2] == {"code0": "3456"} - - # Service set alarm to disarmed - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/alarmsystems/0/disarm") - - await hass.services.async_call( - ALARM_CONTROL_PANEL_DOMAIN, - SERVICE_ALARM_DISARM, - {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "4567"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[4][2] == {"code0": "4567"} - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 4 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 + for path, service, code in ( + # Service set alarm to away mode + ("arm_away", SERVICE_ALARM_ARM_AWAY, "1234"), + # Service set alarm to home mode + ("arm_stay", SERVICE_ALARM_ARM_HOME, "2345"), + # Service set alarm to night mode + ("arm_night", SERVICE_ALARM_ARM_NIGHT, "3456"), + # Service set alarm to disarmed + ("disarm", SERVICE_ALARM_DISARM, "4567"), + ): + aioclient_mock.mock_calls.clear() + aioclient_mock = mock_put_request(f"/alarmsystems/0/{path}") + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + service, + {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: code}, + blocking=True, + ) + assert aioclient_mock.mock_calls[0][2] == {"code0": code} diff --git a/tests/components/deconz/test_binary_sensor.py b/tests/components/deconz/test_binary_sensor.py index 6ab5f2f5477..59d31afb9fc 100644 --- a/tests/components/deconz/test_binary_sensor.py +++ b/tests/components/deconz/test_binary_sensor.py @@ -1,10 +1,12 @@ """deCONZ binary sensor platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.components.deconz.const import ( CONF_ALLOW_CLIP_SENSOR, CONF_ALLOW_NEW_DEVICES, @@ -12,32 +14,13 @@ from homeassistant.components.deconz.const import ( DOMAIN as DECONZ_DOMAIN, ) from homeassistant.components.deconz.services import SERVICE_DEVICE_REFRESH -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - STATE_OFF, - STATE_ON, - STATE_UNAVAILABLE, - EntityCategory, -) +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_request, - setup_deconz_integration, -) - -from tests.test_util.aiohttp import AiohttpClientMocker - - -async def test_no_binary_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no sensor entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import MockConfigEntry, snapshot_platform TEST_DATA = [ ( # Alarm binary sensor @@ -64,19 +47,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:b5:d1:80-01-0500", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "binary_sensor.alarm_10", - "unique_id": "00:15:8d:00:02:b5:d1:80-01-0500-alarm", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.SAFETY, - "attributes": { - "on": True, - "temperature": 26.0, - "device_class": "safety", - "friendly_name": "Alarm 10", - }, "websocket_event": {"alarm": True}, "next_state": STATE_ON, }, @@ -105,18 +76,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:a5:21:24-01-0101", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "binary_sensor.cave_co", - "unique_id": "00:15:8d:00:02:a5:21:24-01-0101-carbon_monoxide", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.CO, - "attributes": { - "on": True, - "device_class": "carbon_monoxide", - "friendly_name": "Cave CO", - }, "websocket_event": {"carbonmonoxide": True}, "next_state": STATE_ON, }, @@ -140,18 +100,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:01:d9:3e:7c-01-0500", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "binary_sensor.sensor_kitchen_smoke", - "unique_id": "00:15:8d:00:01:d9:3e:7c-01-0500-fire", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.SMOKE, - "attributes": { - "on": True, - "device_class": "smoke", - "friendly_name": "sensor_kitchen_smoke", - }, "websocket_event": {"fire": True}, "next_state": STATE_ON, }, @@ -176,17 +125,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:01:d9:3e:7c-01-0500", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "binary_sensor.sensor_kitchen_smoke_test_mode", - "unique_id": "00:15:8d:00:01:d9:3e:7c-01-0500-in_test_mode", - "state": STATE_OFF, - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": BinarySensorDeviceClass.SMOKE, - "attributes": { - "device_class": "smoke", - "friendly_name": "sensor_kitchen_smoke Test Mode", - }, "websocket_event": {"test": True}, "next_state": STATE_ON, }, @@ -208,17 +147,7 @@ TEST_DATA = [ "uniqueid": "kitchen-switch", }, { - "entity_count": 1, - "device_count": 2, "entity_id": "binary_sensor.kitchen_switch", - "unique_id": "kitchen-switch-flag", - "state": STATE_ON, - "entity_category": None, - "device_class": None, - "attributes": { - "on": True, - "friendly_name": "Kitchen Switch", - }, "websocket_event": {"flag": False}, "next_state": STATE_OFF, }, @@ -245,19 +174,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:2b:96:b4-01-0006", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "binary_sensor.back_door", - "unique_id": "00:15:8d:00:02:2b:96:b4-01-0006-open", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.OPENING, - "attributes": { - "on": True, - "temperature": 33.0, - "device_class": "opening", - "friendly_name": "Back Door", - }, "websocket_event": {"open": True}, "next_state": STATE_ON, }, @@ -291,19 +208,7 @@ TEST_DATA = [ "uniqueid": "00:17:88:01:03:28:8c:9b-02-0406", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "binary_sensor.motion_sensor_4", - "unique_id": "00:17:88:01:03:28:8c:9b-02-0406-presence", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.MOTION, - "attributes": { - "on": True, - "dark": False, - "device_class": "motion", - "friendly_name": "Motion sensor 4", - }, "websocket_event": {"presence": True}, "next_state": STATE_ON, }, @@ -332,19 +237,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:2f:07:db-01-0500", }, { - "entity_count": 5, - "device_count": 3, "entity_id": "binary_sensor.water2", - "unique_id": "00:15:8d:00:02:2f:07:db-01-0500-water", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.MOISTURE, - "attributes": { - "on": True, - "temperature": 25.0, - "device_class": "moisture", - "friendly_name": "water2", - }, "websocket_event": {"water": True}, "next_state": STATE_ON, }, @@ -377,22 +270,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:a5:21:24-01-0101", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "binary_sensor.vibration_1", - "unique_id": "00:15:8d:00:02:a5:21:24-01-0101-vibration", - "state": STATE_ON, - "entity_category": None, - "device_class": BinarySensorDeviceClass.VIBRATION, - "attributes": { - "on": True, - "temperature": 32.0, - "orientation": [10, 1059, 0], - "tiltangle": 83, - "vibrationstrength": 114, - "device_class": "vibration", - "friendly_name": "Vibration 1", - }, "websocket_event": {"vibration": False}, "next_state": STATE_OFF, }, @@ -415,17 +293,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "binary_sensor.presence_sensor_tampered", - "unique_id": "00:00:00:00:00:00:00:00-00-tampered", - "state": STATE_OFF, - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": BinarySensorDeviceClass.TAMPER, - "attributes": { - "device_class": "tamper", - "friendly_name": "Presence sensor Tampered", - }, "websocket_event": {"tampered": True}, "next_state": STATE_ON, }, @@ -448,17 +316,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "binary_sensor.presence_sensor_low_battery", - "unique_id": "00:00:00:00:00:00:00:00-00-low_battery", - "state": STATE_OFF, - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": BinarySensorDeviceClass.BATTERY, - "attributes": { - "device_class": "battery", - "friendly_name": "Presence sensor Low Battery", - }, "websocket_event": {"lowbattery": True}, "next_state": STATE_ON, }, @@ -466,99 +324,50 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("sensor_data", "expected"), TEST_DATA) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) +@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) async def test_binary_sensors( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, - sensor_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + sensor_ws_data: WebsocketDataType, + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of binary sensor entities.""" - with patch.dict(DECONZ_WEB_REQUEST, {"sensors": {"1": sensor_data}}): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} - ) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - sensor = hass.states.get(expected["entity_id"]) - assert sensor.state == expected["state"] - assert sensor.attributes.get(ATTR_DEVICE_CLASS) == expected["device_class"] - assert sensor.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.BINARY_SENSOR]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Change state - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": expected["websocket_event"], - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": expected["websocket_event"]}) assert hass.states.get(expected["entity_id"]).state == expected["next_state"] - # Unload entry - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_not_allow_clip_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that CLIP sensors are not allowed.""" - data = { - "sensors": { - "1": { - "name": "CLIP presence sensor", - "type": "CLIPPresence", - "state": {"presence": False}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:02-00", - }, +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "CLIP presence sensor", + "type": "CLIPPresence", + "state": {"presence": False}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:02-00", } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: False} - ) - + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: False}]) +@pytest.mark.usefixtures("config_entry_setup") +async def test_not_allow_clip_sensor(hass: HomeAssistant) -> None: + """Test that CLIP sensors are not allowed.""" assert len(hass.states.async_all()) == 0 -async def test_allow_clip_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that CLIP sensors can be allowed.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Presence sensor", "type": "ZHAPresence", @@ -585,12 +394,13 @@ async def test_allow_clip_sensor( "uniqueid": "/sensors/3", }, } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} - ) + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) +async def test_allow_clip_sensor( + hass: HomeAssistant, config_entry_setup: MockConfigEntry +) -> None: + """Test that CLIP sensors can be allowed.""" assert len(hass.states.async_all()) == 3 assert hass.states.get("binary_sensor.presence_sensor").state == STATE_OFF @@ -600,7 +410,7 @@ async def test_allow_clip_sensor( # Disallow clip sensors hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_CLIP_SENSOR: False} + config_entry_setup, options={CONF_ALLOW_CLIP_SENSOR: False} ) await hass.async_block_till_done() @@ -611,7 +421,7 @@ async def test_allow_clip_sensor( # Allow clip sensors hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_CLIP_SENSOR: True} + config_entry_setup, options={CONF_ALLOW_CLIP_SENSOR: True} ) await hass.async_block_till_done() @@ -620,15 +430,16 @@ async def test_allow_clip_sensor( assert hass.states.get("binary_sensor.clip_flag_boot_time").state == STATE_ON +@pytest.mark.usefixtures("config_entry_setup") async def test_add_new_binary_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new binary sensor works.""" + assert len(hass.states.async_all()) == 0 + event_added_sensor = { - "t": "event", "e": "added", - "r": "sensors", - "id": "1", "sensor": { "id": "Presence sensor id", "name": "Presence sensor", @@ -638,22 +449,21 @@ async def test_add_new_binary_sensor( "uniqueid": "00:00:00:00:00:00:00:00-00", }, } - - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() - + await sensor_ws_data(event_added_sensor) assert len(hass.states.async_all()) == 1 assert hass.states.get("binary_sensor.presence_sensor").state == STATE_OFF +@pytest.mark.parametrize( + "config_entry_options", [{CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}] +) async def test_add_new_binary_sensor_ignored_load_entities_on_service_call( hass: HomeAssistant, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + deconz_payload: dict[str, Any], + mock_requests: Callable[[str], None], + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new binary sensor is not allowed.""" sensor = { @@ -663,36 +473,24 @@ async def test_add_new_binary_sensor_ignored_load_entities_on_service_call( "config": {"on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:00-00", } - event_added_sensor = { - "t": "event", - "e": "added", - "r": "sensors", - "id": "1", - "sensor": sensor, - } - - config_entry = await setup_deconz_integration( - hass, - aioclient_mock, - options={CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}, - ) assert len(hass.states.async_all()) == 0 - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"e": "added", "sensor": sensor}) assert len(hass.states.async_all()) == 0 assert not hass.states.get("binary_sensor.presence_sensor") assert ( - len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) + len( + er.async_entries_for_config_entry( + entity_registry, config_entry_setup.entry_id + ) + ) == 0 ) - aioclient_mock.clear_requests() - data = {"config": {}, "groups": {}, "lights": {}, "sensors": {"1": sensor}} - mock_deconz_request(aioclient_mock, config_entry.data, data) + deconz_payload["sensors"]["0"] = sensor + mock_requests() await hass.services.async_call(DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH) await hass.async_block_till_done() @@ -701,11 +499,16 @@ async def test_add_new_binary_sensor_ignored_load_entities_on_service_call( assert hass.states.get("binary_sensor.presence_sensor") +@pytest.mark.parametrize( + "config_entry_options", [{CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}] +) async def test_add_new_binary_sensor_ignored_load_entities_on_options_change( hass: HomeAssistant, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + deconz_payload: dict[str, Any], + mock_requests: Callable[[str], None], + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new binary sensor is not allowed.""" sensor = { @@ -715,39 +518,27 @@ async def test_add_new_binary_sensor_ignored_load_entities_on_options_change( "config": {"on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:00-00", } - event_added_sensor = { - "t": "event", - "e": "added", - "r": "sensors", - "id": "1", - "sensor": sensor, - } - - config_entry = await setup_deconz_integration( - hass, - aioclient_mock, - options={CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}, - ) assert len(hass.states.async_all()) == 0 - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"e": "added", "sensor": sensor}) assert len(hass.states.async_all()) == 0 assert not hass.states.get("binary_sensor.presence_sensor") assert ( - len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) + len( + er.async_entries_for_config_entry( + entity_registry, config_entry_setup.entry_id + ) + ) == 0 ) - aioclient_mock.clear_requests() - data = {"config": {}, "groups": {}, "lights": {}, "sensors": {"1": sensor}} - mock_deconz_request(aioclient_mock, config_entry.data, data) + deconz_payload["sensors"]["0"] = sensor + mock_requests() hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_NEW_DEVICES: True} + config_entry_setup, options={CONF_ALLOW_NEW_DEVICES: True} ) await hass.async_block_till_done() diff --git a/tests/components/deconz/test_button.py b/tests/components/deconz/test_button.py index 4d85270ddca..c649dba5b00 100644 --- a/tests/components/deconz/test_button.py +++ b/tests/components/deconz/test_button.py @@ -1,31 +1,22 @@ """deCONZ button platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, EntityCategory +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker - -async def test_no_binary_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no sensor entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - TEST_DATA = [ ( # Store scene button { @@ -42,15 +33,7 @@ TEST_DATA = [ } }, { - "entity_count": 2, - "device_count": 3, "entity_id": "button.light_group_scene_store_current_scene", - "unique_id": "01234E56789A/groups/1/scenes/1-store", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "icon": "mdi:inbox-arrow-down", - "friendly_name": "Light group Scene Store Current Scene", - }, "request": "/groups/1/scenes/1/store", "request_data": {}, }, @@ -84,15 +67,7 @@ TEST_DATA = [ } }, { - "entity_count": 5, - "device_count": 3, "entity_id": "button.aqara_fp1_reset_presence", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-reset_presence", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "device_class": "restart", - "friendly_name": "Aqara FP1 Reset Presence", - }, "request": "/sensors/1/config", "request_data": {"resetpresence": True}, }, @@ -100,42 +75,24 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("raw_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("deconz_payload", "expected"), TEST_DATA) async def test_button( hass: HomeAssistant, entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - raw_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of button entities.""" - with patch.dict(DECONZ_WEB_REQUEST, raw_data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - button = hass.states.get(expected["entity_id"]) - assert button.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.BUTTON]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify button press - mock_deconz_put_request(aioclient_mock, config_entry.data, expected["request"]) + aioclient_mock = mock_put_request(expected["request"]) await hass.services.async_call( BUTTON_DOMAIN, @@ -144,14 +101,3 @@ async def test_button( blocking=True, ) assert aioclient_mock.mock_calls[1][2] == expected["request_data"] - - # Unload entry - - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_climate.py b/tests/components/deconz/test_climate.py index 0e51f31cec4..7f456e81976 100644 --- a/tests/components/deconz/test_climate.py +++ b/tests/components/deconz/test_climate.py @@ -1,8 +1,10 @@ """deCONZ climate platform tests.""" +from collections.abc import Callable from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.climate import ( ATTR_FAN_MODE, @@ -11,15 +13,10 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN as CLIMATE_DOMAIN, - FAN_AUTO, - FAN_HIGH, - FAN_LOW, - FAN_MEDIUM, FAN_OFF, FAN_ON, PRESET_BOOST, PRESET_COMFORT, - PRESET_ECO, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, @@ -30,106 +27,74 @@ from homeassistant.components.climate import ( from homeassistant.components.deconz.climate import ( DECONZ_FAN_SMART, DECONZ_PRESET_AUTO, - DECONZ_PRESET_COMPLEX, - DECONZ_PRESET_HOLIDAY, DECONZ_PRESET_MANUAL, ) from homeassistant.components.deconz.const import CONF_ALLOW_CLIP_SENSOR -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_TEMPERATURE, - STATE_OFF, - STATE_UNAVAILABLE, -) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_OFF, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no climate entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 59, + "displayflipped": None, + "heatsetpoint": 2100, + "locked": True, + "mountingmode": None, + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "6130553ac247174809bae47144ee23f8", + "lastseen": "2020-11-29T19:31Z", + "manufacturername": "Danfoss", + "modelid": "eTRV0100", + "name": "thermostat", + "state": { + "errorcode": None, + "lastupdated": "2020-11-29T19:28:40.665", + "mountingmodeactive": False, + "on": True, + "temperature": 2102, + "valve": 24, + "windowopen": "Closed", + }, + "swversion": "01.02.0008 01.02", + "type": "ZHAThermostat", + "uniqueid": "14:b4:57:ff:fe:d5:4e:77-01-0201", + } + ], +) async def test_simple_climate_device( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of climate entities. This is a simple water heater that only supports setting temperature and on and off. """ - data = { - "sensors": { - "0": { - "config": { - "battery": 59, - "displayflipped": None, - "heatsetpoint": 2100, - "locked": True, - "mountingmode": None, - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "6130553ac247174809bae47144ee23f8", - "lastseen": "2020-11-29T19:31Z", - "manufacturername": "Danfoss", - "modelid": "eTRV0100", - "name": "thermostat", - "state": { - "errorcode": None, - "lastupdated": "2020-11-29T19:28:40.665", - "mountingmodeactive": False, - "on": True, - "temperature": 2102, - "valve": 24, - "windowopen": "Closed", - }, - "swversion": "01.02.0008 01.02", - "type": "ZHAThermostat", - "uniqueid": "14:b4:57:ff:fe:d5:4e:77-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - climate_thermostat = hass.states.get("climate.thermostat") - assert climate_thermostat.state == HVACMode.HEAT - assert climate_thermostat.attributes["hvac_modes"] == [ - HVACMode.HEAT, - HVACMode.OFF, - ] - assert climate_thermostat.attributes["current_temperature"] == 21.0 - assert climate_thermostat.attributes["temperature"] == 21.0 - assert climate_thermostat.attributes["locked"] is True - assert hass.states.get("sensor.thermostat_battery").state == "59" - assert climate_thermostat.attributes["hvac_action"] == HVACAction.HEATING + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals thermostat configured off - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": False}}) assert hass.states.get("climate.thermostat").state == STATE_OFF assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -138,16 +103,7 @@ async def test_simple_climate_device( # Event signals thermostat state on - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": True}}) assert hass.states.get("climate.thermostat").state == HVACMode.HEAT assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -156,7 +112,7 @@ async def test_simple_climate_device( # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service turn on thermostat @@ -189,61 +145,40 @@ async def test_simple_climate_device( ) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "Thermostat", + "type": "ZHAThermostat", + "state": {"on": True, "temperature": 2260, "valve": 30}, + "config": { + "battery": 100, + "heatsetpoint": 2200, + "mode": "auto", + "offset": 10, + "reachable": True, + }, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) async def test_climate_device_without_cooling_support( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - data = { - "sensors": { - "1": { - "name": "Thermostat", - "type": "ZHAThermostat", - "state": {"on": True, "temperature": 2260, "valve": 30}, - "config": { - "battery": 100, - "heatsetpoint": 2200, - "mode": "auto", - "offset": 10, - "reachable": True, - }, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - climate_thermostat = hass.states.get("climate.thermostat") - assert climate_thermostat.state == HVACMode.AUTO - assert climate_thermostat.attributes["hvac_modes"] == [ - HVACMode.HEAT, - HVACMode.OFF, - HVACMode.AUTO, - ] - assert climate_thermostat.attributes["current_temperature"] == 22.6 - assert climate_thermostat.attributes["temperature"] == 22.0 - assert hass.states.get("sensor.thermostat") is None - assert hass.states.get("sensor.thermostat_battery").state == "100" - assert hass.states.get("climate.presence_sensor") is None - assert hass.states.get("climate.clip_thermostat") is None - assert ( - hass.states.get("climate.thermostat").attributes["hvac_action"] - == HVACAction.HEATING - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals thermostat configured off - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "config": {"mode": "off"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"mode": "off"}}) assert hass.states.get("climate.thermostat").state == STATE_OFF assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -252,17 +187,7 @@ async def test_climate_device_without_cooling_support( # Event signals thermostat state on - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "config": {"mode": "other"}, - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"mode": "other"}, "state": {"on": True}}) assert hass.states.get("climate.thermostat").state == HVACMode.HEAT assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -271,16 +196,7 @@ async def test_climate_device_without_cooling_support( # Event signals thermostat state off - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": False}}) assert hass.states.get("climate.thermostat").state == STATE_OFF assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -289,7 +205,7 @@ async def test_climate_device_without_cooling_support( # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/1/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set HVAC mode to auto @@ -355,83 +271,53 @@ async def test_climate_device_without_cooling_support( blocking=True, ) - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 25, + "coolsetpoint": 1111, + "fanmode": None, + "heatsetpoint": 2222, + "mode": "heat", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": { + "lastupdated": "2020-11-27T13:42:40.863", + "on": False, + "temperature": 2320, + }, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + ], +) async def test_climate_device_with_cooling_support( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 25, - "coolsetpoint": 1111, - "fanmode": None, - "heatsetpoint": 2222, - "mode": "heat", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": { - "lastupdated": "2020-11-27T13:42:40.863", - "on": False, - "temperature": 2320, - }, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - climate_thermostat = hass.states.get("climate.zen_01") - assert climate_thermostat.state == HVACMode.HEAT - assert climate_thermostat.attributes["hvac_modes"] == [ - HVACMode.HEAT, - HVACMode.OFF, - HVACMode.AUTO, - HVACMode.COOL, - ] - assert climate_thermostat.attributes["current_temperature"] == 23.2 - assert climate_thermostat.attributes["temperature"] == 22.2 - assert hass.states.get("sensor.zen_01_battery").state == "25" - assert ( - hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals thermostat mode cool - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"mode": "cool"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"mode": "cool"}}) assert hass.states.get("climate.zen_01").state == HVACMode.COOL assert hass.states.get("climate.zen_01").attributes["temperature"] == 11.1 assert ( @@ -440,16 +326,7 @@ async def test_climate_device_with_cooling_support( # Event signals thermostat state on - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": True}}) assert hass.states.get("climate.zen_01").state == HVACMode.COOL assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] @@ -458,7 +335,7 @@ async def test_climate_device_with_cooling_support( # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set temperature to 20 @@ -471,71 +348,52 @@ async def test_climate_device_with_cooling_support( assert aioclient_mock.mock_calls[1][2] == {"coolsetpoint": 2000.0} +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 25, + "coolsetpoint": None, + "fanmode": "auto", + "heatsetpoint": 2222, + "mode": "heat", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": { + "lastupdated": "2020-11-27T13:42:40.863", + "on": False, + "temperature": 2320, + }, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + ], +) async def test_climate_device_with_fan_support( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 25, - "coolsetpoint": None, - "fanmode": "auto", - "heatsetpoint": 2222, - "mode": "heat", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": { - "lastupdated": "2020-11-27T13:42:40.863", - "on": False, - "temperature": 2320, - }, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - climate_thermostat = hass.states.get("climate.zen_01") - assert climate_thermostat.state == HVACMode.HEAT - assert climate_thermostat.attributes["fan_mode"] == FAN_AUTO - assert climate_thermostat.attributes["fan_modes"] == [ - DECONZ_FAN_SMART, - FAN_AUTO, - FAN_HIGH, - FAN_MEDIUM, - FAN_LOW, - FAN_ON, - FAN_OFF, - ] - assert ( - hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals fan mode defaults to off - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"fanmode": "unsupported"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"fanmode": "unsupported"}}) assert hass.states.get("climate.zen_01").attributes["fan_mode"] == FAN_OFF assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE @@ -543,17 +401,7 @@ async def test_climate_device_with_fan_support( # Event signals unsupported fan mode - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"fanmode": "unsupported"}, - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"fanmode": "unsupported"}, "state": {"on": True}}) assert hass.states.get("climate.zen_01").attributes["fan_mode"] == FAN_ON assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] @@ -562,16 +410,7 @@ async def test_climate_device_with_fan_support( # Event signals unsupported fan mode - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"fanmode": "unsupported"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"fanmode": "unsupported"}}) assert hass.states.get("climate.zen_01").attributes["fan_mode"] == FAN_ON assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] @@ -580,7 +419,7 @@ async def test_climate_device_with_fan_support( # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set fan mode to off @@ -613,75 +452,53 @@ async def test_climate_device_with_fan_support( ) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 25, + "coolsetpoint": None, + "fanmode": None, + "heatsetpoint": 2222, + "mode": "heat", + "preset": "auto", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": { + "lastupdated": "2020-11-27T13:42:40.863", + "on": False, + "temperature": 2320, + }, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + ], +) async def test_climate_device_with_preset( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 25, - "coolsetpoint": None, - "fanmode": None, - "heatsetpoint": 2222, - "mode": "heat", - "preset": "auto", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": { - "lastupdated": "2020-11-27T13:42:40.863", - "on": False, - "temperature": 2320, - }, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - - climate_zen_01 = hass.states.get("climate.zen_01") - assert climate_zen_01.state == HVACMode.HEAT - assert climate_zen_01.attributes["current_temperature"] == 23.2 - assert climate_zen_01.attributes["temperature"] == 22.2 - assert climate_zen_01.attributes["preset_mode"] == DECONZ_PRESET_AUTO - assert climate_zen_01.attributes["preset_modes"] == [ - DECONZ_PRESET_AUTO, - PRESET_BOOST, - PRESET_COMFORT, - DECONZ_PRESET_COMPLEX, - PRESET_ECO, - DECONZ_PRESET_HOLIDAY, - DECONZ_PRESET_MANUAL, - ] - assert ( - hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals deCONZ preset - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"preset": "manual"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"preset": "manual"}}) assert ( hass.states.get("climate.zen_01").attributes["preset_mode"] == DECONZ_PRESET_MANUAL @@ -689,21 +506,12 @@ async def test_climate_device_with_preset( # Event signals unknown preset - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"preset": "unsupported"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"preset": "unsupported"}}) assert hass.states.get("climate.zen_01").attributes["preset_mode"] is None # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set preset to HASS preset @@ -736,12 +544,10 @@ async def test_climate_device_with_preset( ) -async def test_clip_climate_device( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test successful creation of sensor entities.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Thermostat", "type": "ZHAThermostat", @@ -763,18 +569,19 @@ async def test_clip_climate_device( "uniqueid": "00:00:00:00:00:00:00:02-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} - ) - - assert len(hass.states.async_all()) == 3 - assert hass.states.get("climate.clip_thermostat").state == HVACMode.HEAT - assert ( - hass.states.get("climate.clip_thermostat").attributes["hvac_action"] - == HVACAction.HEATING - ) + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) +async def test_clip_climate_device( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, +) -> None: + """Test successful creation of sensor entities.""" + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Disallow clip sensors @@ -783,7 +590,7 @@ async def test_clip_climate_device( ) await hass.async_block_till_done() - assert len(hass.states.async_all()) == 2 + assert len(hass.states.async_all()) == 1 assert not hass.states.get("climate.clip_thermostat") # Allow clip sensors @@ -793,7 +600,7 @@ async def test_clip_climate_device( ) await hass.async_block_till_done() - assert len(hass.states.async_all()) == 3 + assert len(hass.states.async_all()) == 2 assert hass.states.get("climate.clip_thermostat").state == HVACMode.HEAT assert ( hass.states.get("climate.clip_thermostat").attributes["hvac_action"] @@ -801,46 +608,37 @@ async def test_clip_climate_device( ) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "Thermostat", + "type": "ZHAThermostat", + "state": {"on": True, "temperature": 2260, "valve": 30}, + "config": { + "battery": 100, + "heatsetpoint": 2200, + "mode": "auto", + "offset": 10, + "reachable": True, + }, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_verify_state_update( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that state update properly.""" - data = { - "sensors": { - "1": { - "name": "Thermostat", - "type": "ZHAThermostat", - "state": {"on": True, "temperature": 2260, "valve": 30}, - "config": { - "battery": 100, - "heatsetpoint": 2200, - "mode": "auto", - "offset": 10, - "reachable": True, - }, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - assert hass.states.get("climate.thermostat").state == HVACMode.AUTO assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] == HVACAction.HEATING ) - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": False}}) assert hass.states.get("climate.thermostat").state == HVACMode.AUTO assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -848,15 +646,14 @@ async def test_verify_state_update( ) +@pytest.mark.usefixtures("config_entry_setup") async def test_add_new_climate_device( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new climate device works.""" event_added_sensor = { - "t": "event", "e": "added", - "r": "sensors", - "id": "1", "sensor": { "id": "Thermostat id", "name": "Thermostat", @@ -873,11 +670,9 @@ async def test_add_new_climate_device( }, } - await setup_deconz_integration(hass, aioclient_mock) assert len(hass.states.async_all()) == 0 - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() + await sensor_ws_data(event_added_sensor) assert len(hass.states.async_all()) == 2 assert hass.states.get("climate.thermostat").state == HVACMode.AUTO @@ -888,141 +683,115 @@ async def test_add_new_climate_device( ) -async def test_not_allow_clip_thermostat( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "CLIP thermostat sensor", + "type": "CLIPThermostat", + "state": {}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + }, + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: False}]) +@pytest.mark.usefixtures("config_entry_setup") +async def test_not_allow_clip_thermostat(hass: HomeAssistant) -> None: """Test that CLIP thermostats are not allowed.""" - data = { - "sensors": { - "1": { - "name": "CLIP thermostat sensor", - "type": "CLIPThermostat", - "state": {}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - }, - } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: False} - ) - assert len(hass.states.async_all()) == 0 -async def test_no_mode_no_state( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that a climate device without mode and state works.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 25, - "heatsetpoint": 2222, - "mode": None, - "preset": "auto", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": {"lastupdated": "none", "on": None, "temperature": 2290}, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 25, + "heatsetpoint": 2222, + "mode": None, + "preset": "auto", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": {"lastupdated": "none", "on": None, "temperature": 2290}, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_no_mode_no_state(hass: HomeAssistant) -> None: + """Test that a climate device without mode and state works.""" assert len(hass.states.async_all()) == 2 climate_thermostat = hass.states.get("climate.zen_01") - assert climate_thermostat.state is STATE_OFF assert climate_thermostat.attributes["preset_mode"] is DECONZ_PRESET_AUTO assert climate_thermostat.attributes["hvac_action"] is HVACAction.IDLE - # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 58, + "heatsetpoint": 2200, + "locked": False, + "mode": "heat", + "offset": -200, + "on": True, + "preset": "manual", + "reachable": True, + "schedule": {}, + "schedule_on": False, + "setvalve": False, + "windowopen_set": False, + }, + "ep": 1, + "etag": "404c15db68c318ebe7832ce5aa3d1e30", + "lastannounced": "2022-08-31T03:00:59Z", + "lastseen": "2022-09-19T11:58Z", + "manufacturername": "_TZE200_b6wax7g0", + "modelid": "TS0601", + "name": "Thermostat", + "state": { + "lastupdated": "2022-09-19T11:58:24.204", + "lowbattery": False, + "on": False, + "temperature": 2200, + "valve": 0, + }, + "type": "ZHAThermostat", + "uniqueid": "84:fd:27:ff:fe:8a:eb:89-01-0201", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_boost_mode( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that a climate device with boost mode and different state works.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 58, - "heatsetpoint": 2200, - "locked": False, - "mode": "heat", - "offset": -200, - "on": True, - "preset": "manual", - "reachable": True, - "schedule": {}, - "schedule_on": False, - "setvalve": False, - "windowopen_set": False, - }, - "ep": 1, - "etag": "404c15db68c318ebe7832ce5aa3d1e30", - "lastannounced": "2022-08-31T03:00:59Z", - "lastseen": "2022-09-19T11:58Z", - "manufacturername": "_TZE200_b6wax7g0", - "modelid": "TS0601", - "name": "Thermostat", - "state": { - "lastupdated": "2022-09-19T11:58:24.204", - "lowbattery": False, - "on": False, - "temperature": 2200, - "valve": 0, - }, - "type": "ZHAThermostat", - "uniqueid": "84:fd:27:ff:fe:8a:eb:89-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 3 climate_thermostat = hass.states.get("climate.thermostat") - assert climate_thermostat.state == HVACMode.HEAT - assert climate_thermostat.attributes["preset_mode"] is DECONZ_PRESET_MANUAL assert climate_thermostat.attributes["hvac_action"] is HVACAction.IDLE # Event signals thermostat preset boost and valve 100 (real data) - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"preset": "boost"}, - "state": {"valve": 100}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"config": {"preset": "boost"}, "state": {"valve": 100}}) climate_thermostat = hass.states.get("climate.thermostat") assert climate_thermostat.attributes["preset_mode"] is PRESET_BOOST assert climate_thermostat.attributes["hvac_action"] is HVACAction.HEATING - - # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") diff --git a/tests/components/deconz/test_config_flow.py b/tests/components/deconz/test_config_flow.py index 6da940e0918..49711962407 100644 --- a/tests/components/deconz/test_config_flow.py +++ b/tests/components/deconz/test_config_flow.py @@ -32,8 +32,9 @@ from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .test_gateway import API_KEY, BRIDGEID, setup_deconz_integration +from .conftest import API_KEY, BRIDGE_ID +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker BAD_BRIDGEID = "0000000000000000" @@ -47,7 +48,7 @@ async def test_flow_discovered_bridges( aioclient_mock.get( pydeconz.utils.URL_DISCOVER, json=[ - {"id": BRIDGEID, "internalipaddress": "1.2.3.4", "internalport": 80}, + {"id": BRIDGE_ID, "internalipaddress": "1.2.3.4", "internalport": 80}, {"id": "1234E567890A", "internalipaddress": "5.6.7.8", "internalport": 80}, ], headers={"content-type": CONTENT_TYPE_JSON}, @@ -78,7 +79,7 @@ async def test_flow_discovered_bridges( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGEID + assert result["title"] == BRIDGE_ID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -92,7 +93,7 @@ async def test_flow_manual_configuration_decision( """Test that config flow for one discovered bridge works.""" aioclient_mock.get( pydeconz.utils.URL_DISCOVER, - json=[{"id": BRIDGEID, "internalipaddress": "1.2.3.4", "internalport": 80}], + json=[{"id": BRIDGE_ID, "internalipaddress": "1.2.3.4", "internalport": 80}], headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -123,7 +124,7 @@ async def test_flow_manual_configuration_decision( aioclient_mock.get( f"http://1.2.3.4:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -132,7 +133,7 @@ async def test_flow_manual_configuration_decision( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGEID + assert result["title"] == BRIDGE_ID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -174,7 +175,7 @@ async def test_flow_manual_configuration( aioclient_mock.get( f"http://1.2.3.4:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -183,7 +184,7 @@ async def test_flow_manual_configuration( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGEID + assert result["title"] == BRIDGE_ID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -222,11 +223,11 @@ async def test_manual_configuration_after_discovery_ResponseError( async def test_manual_configuration_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + config_entry_setup: MockConfigEntry, ) -> None: """Test that manual configuration can update existing config entry.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.get( pydeconz.utils.URL_DISCOVER, json=[], @@ -256,7 +257,7 @@ async def test_manual_configuration_update_configuration( aioclient_mock.get( f"http://2.3.4.5:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -266,15 +267,14 @@ async def test_manual_configuration_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" +@pytest.mark.usefixtures("config_entry_setup") async def test_manual_configuration_dont_update_configuration( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that _create_entry work and that bridgeid can be requested.""" - await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.get( pydeconz.utils.URL_DISCOVER, json=[], @@ -304,7 +304,7 @@ async def test_manual_configuration_dont_update_configuration( aioclient_mock.get( f"http://1.2.3.4:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -367,12 +367,15 @@ async def test_manual_configuration_timeout_get_bridge( ], ) async def test_link_step_fails( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, raised_error, error_string + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + raised_error: Exception, + error_string: str, ) -> None: """Test config flow should abort if no API key was possible to retrieve.""" aioclient_mock.get( pydeconz.utils.URL_DISCOVER, - json=[{"id": BRIDGEID, "internalipaddress": "1.2.3.4", "internalport": 80}], + json=[{"id": BRIDGE_ID, "internalipaddress": "1.2.3.4", "internalport": 80}], headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -399,14 +402,14 @@ async def test_link_step_fails( async def test_reauth_flow_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + config_entry_setup: MockConfigEntry, ) -> None: """Verify reauth flow can update gateway API key.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, - data=config_entry.data, + data=config_entry_setup.data, context={"source": SOURCE_REAUTH}, ) @@ -423,7 +426,7 @@ async def test_reauth_flow_update_configuration( aioclient_mock.get( f"http://1.2.3.4:80/api/{new_api_key}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -433,7 +436,7 @@ async def test_reauth_flow_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_API_KEY] == new_api_key + assert config_entry_setup.data[CONF_API_KEY] == new_api_key async def test_flow_ssdp_discovery( @@ -448,7 +451,7 @@ async def test_flow_ssdp_discovery( ssdp_location="http://1.2.3.4:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, + ATTR_UPNP_SERIAL: BRIDGE_ID, }, ), context={"source": SOURCE_SSDP}, @@ -472,7 +475,7 @@ async def test_flow_ssdp_discovery( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGEID + assert result["title"] == BRIDGE_ID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -481,11 +484,9 @@ async def test_flow_ssdp_discovery( async def test_ssdp_discovery_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test if a discovered bridge is configured but updates with new attributes.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - with patch( "homeassistant.components.deconz.async_setup_entry", return_value=True, @@ -498,7 +499,7 @@ async def test_ssdp_discovery_update_configuration( ssdp_location="http://2.3.4.5:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, + ATTR_UPNP_SERIAL: BRIDGE_ID, }, ), context={"source": SOURCE_SSDP}, @@ -507,15 +508,14 @@ async def test_ssdp_discovery_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" assert len(mock_setup_entry.mock_calls) == 1 async def test_ssdp_discovery_dont_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test if a discovered bridge has already been configured.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, @@ -525,7 +525,7 @@ async def test_ssdp_discovery_dont_update_configuration( ssdp_location="http://1.2.3.4:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, + ATTR_UPNP_SERIAL: BRIDGE_ID, }, ), context={"source": SOURCE_SSDP}, @@ -533,17 +533,14 @@ async def test_ssdp_discovery_dont_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" +@pytest.mark.parametrize("config_entry_source", [SOURCE_HASSIO]) async def test_ssdp_discovery_dont_update_existing_hassio_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test to ensure the SSDP discovery does not update an Hass.io entry.""" - config_entry = await setup_deconz_integration( - hass, aioclient_mock, source=SOURCE_HASSIO - ) - result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, data=ssdp.SsdpServiceInfo( @@ -552,7 +549,7 @@ async def test_ssdp_discovery_dont_update_existing_hassio_configuration( ssdp_location="http://1.2.3.4:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, + ATTR_UPNP_SERIAL: BRIDGE_ID, }, ), context={"source": SOURCE_SSDP}, @@ -560,7 +557,7 @@ async def test_ssdp_discovery_dont_update_existing_hassio_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" async def test_flow_hassio_discovery(hass: HomeAssistant) -> None: @@ -572,7 +569,7 @@ async def test_flow_hassio_discovery(hass: HomeAssistant) -> None: "addon": "Mock Addon", CONF_HOST: "mock-deconz", CONF_PORT: 80, - CONF_SERIAL: BRIDGEID, + CONF_SERIAL: BRIDGE_ID, CONF_API_KEY: API_KEY, }, name="Mock Addon", @@ -610,11 +607,10 @@ async def test_flow_hassio_discovery(hass: HomeAssistant) -> None: async def test_hassio_discovery_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + config_entry_setup: MockConfigEntry, ) -> None: """Test we can update an existing config entry.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - with patch( "homeassistant.components.deconz.async_setup_entry", return_value=True, @@ -626,7 +622,7 @@ async def test_hassio_discovery_update_configuration( CONF_HOST: "2.3.4.5", CONF_PORT: 8080, CONF_API_KEY: "updated", - CONF_SERIAL: BRIDGEID, + CONF_SERIAL: BRIDGE_ID, }, name="Mock Addon", slug="deconz", @@ -638,18 +634,15 @@ async def test_hassio_discovery_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "2.3.4.5" - assert config_entry.data[CONF_PORT] == 8080 - assert config_entry.data[CONF_API_KEY] == "updated" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_PORT] == 8080 + assert config_entry_setup.data[CONF_API_KEY] == "updated" assert len(mock_setup_entry.mock_calls) == 1 -async def test_hassio_discovery_dont_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.usefixtures("config_entry_setup") +async def test_hassio_discovery_dont_update_configuration(hass: HomeAssistant) -> None: """Test we can update an existing config entry.""" - await setup_deconz_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, data=HassioServiceInfo( @@ -657,7 +650,7 @@ async def test_hassio_discovery_dont_update_configuration( CONF_HOST: "1.2.3.4", CONF_PORT: 80, CONF_API_KEY: API_KEY, - CONF_SERIAL: BRIDGEID, + CONF_SERIAL: BRIDGE_ID, }, name="Mock Addon", slug="deconz", @@ -671,12 +664,10 @@ async def test_hassio_discovery_dont_update_configuration( async def test_option_flow( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test config flow options.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - result = await hass.config_entries.options.async_init(config_entry.entry_id) + result = await hass.config_entries.options.async_init(config_entry_setup.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "deconz_devices" diff --git a/tests/components/deconz/test_cover.py b/tests/components/deconz/test_cover.py index 69452c3285e..f1573394fae 100644 --- a/tests/components/deconz/test_cover.py +++ b/tests/components/deconz/test_cover.py @@ -1,10 +1,13 @@ """deCONZ cover platform tests.""" +from collections.abc import Callable from unittest.mock import patch +import pytest +from syrupy import SnapshotAssertion + from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, - ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, @@ -17,80 +20,59 @@ from homeassistant.components.cover import ( SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_CLOSED, - STATE_OPEN, - STATE_UNAVAILABLE, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OPEN, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_covers( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no cover entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - -async def test_cover( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that all supported cover entities are created.""" - data = { - "lights": { - "1": { +@pytest.mark.parametrize( + "light_payload", + [ + { + "0": { "name": "Window covering device", "type": "Window covering device", "state": {"lift": 100, "open": False, "reachable": True}, "modelid": "lumi.curtain", "uniqueid": "00:00:00:00:00:00:00:01-00", }, - "2": { + "1": { "name": "Unsupported cover", "type": "Not a cover", "state": {"reachable": True}, "uniqueid": "00:00:00:00:00:00:00:02-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - cover = hass.states.get("cover.window_covering_device") - assert cover.state == STATE_CLOSED - assert cover.attributes[ATTR_CURRENT_POSITION] == 0 - assert not hass.states.get("cover.unsupported_cover") + ], +) +async def test_cover( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + light_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, +) -> None: + """Test that all supported cover entities are created.""" + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.COVER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals cover is open - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"lift": 0, "open": True}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data({"state": {"lift": 0, "open": True}}) cover = hass.states.get("cover.window_covering_device") assert cover.state == STATE_OPEN assert cover.attributes[ATTR_CURRENT_POSITION] == 100 # Verify service calls for cover - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service open cover @@ -132,56 +114,46 @@ async def test_cover( ) assert aioclient_mock.mock_calls[4][2] == {"stop": True} - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "87269755b9b3a046485fdae8d96b252c", + "lastannounced": None, + "lastseen": "2020-08-01T16:22:05Z", + "manufacturername": "AXIS", + "modelid": "Gear", + "name": "Covering device", + "state": { + "bri": 0, + "lift": 0, + "on": False, + "open": True, + "reachable": True, + "tilt": 0, + }, + "swversion": "100-5.3.5.1122", + "type": "Window covering device", + "uniqueid": "00:24:46:00:00:12:34:56-01", + } + ], +) async def test_tilt_cover( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + snapshot: SnapshotAssertion, ) -> None: """Test that tilting a cover works.""" - data = { - "lights": { - "0": { - "etag": "87269755b9b3a046485fdae8d96b252c", - "lastannounced": None, - "lastseen": "2020-08-01T16:22:05Z", - "manufacturername": "AXIS", - "modelid": "Gear", - "name": "Covering device", - "state": { - "bri": 0, - "lift": 0, - "on": False, - "open": True, - "reachable": True, - "tilt": 0, - }, - "swversion": "100-5.3.5.1122", - "type": "Window covering device", - "uniqueid": "00:24:46:00:00:12:34:56-01", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 1 - covering_device = hass.states.get("cover.covering_device") - assert covering_device.state == STATE_OPEN - assert covering_device.attributes[ATTR_CURRENT_TILT_POSITION] == 100 + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.COVER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify service calls for tilting cover - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service set tilt cover @@ -224,44 +196,45 @@ async def test_tilt_cover( assert aioclient_mock.mock_calls[4][2] == {"stop": True} +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "4cefc909134c8e99086b55273c2bde67", + "hascolor": False, + "lastannounced": "2022-08-08T12:06:18Z", + "lastseen": "2022-08-14T14:22Z", + "manufacturername": "Keen Home Inc", + "modelid": "SV01-410-MP-1.0", + "name": "Vent", + "state": { + "alert": "none", + "bri": 242, + "on": False, + "reachable": True, + "sat": 10, + }, + "swversion": "0x00000012", + "type": "Level controllable output", + "uniqueid": "00:22:a3:00:00:00:00:00-01", + } + ], +) async def test_level_controllable_output_cover( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + snapshot: SnapshotAssertion, ) -> None: """Test that tilting a cover works.""" - data = { - "lights": { - "0": { - "etag": "4cefc909134c8e99086b55273c2bde67", - "hascolor": False, - "lastannounced": "2022-08-08T12:06:18Z", - "lastseen": "2022-08-14T14:22Z", - "manufacturername": "Keen Home Inc", - "modelid": "SV01-410-MP-1.0", - "name": "Vent", - "state": { - "alert": "none", - "bri": 242, - "on": False, - "reachable": True, - "sat": 10, - }, - "swversion": "0x00000012", - "type": "Level controllable output", - "uniqueid": "00:22:a3:00:00:00:00:00-01", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 1 - covering_device = hass.states.get("cover.vent") - assert covering_device.state == STATE_OPEN - assert covering_device.attributes[ATTR_CURRENT_TILT_POSITION] == 97 + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.COVER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify service calls for tilting cover - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service open cover diff --git a/tests/components/deconz/test_deconz_event.py b/tests/components/deconz/test_deconz_event.py index 1193f348e38..8bf7bb146d1 100644 --- a/tests/components/deconz/test_deconz_event.py +++ b/tests/components/deconz/test_deconz_event.py @@ -1,12 +1,11 @@ """Test deCONZ remote events.""" -from unittest.mock import patch - from pydeconz.models.sensor.ancillary_control import ( AncillaryControlAction, AncillaryControlPanel, ) from pydeconz.models.sensor.presence import PresenceStatePresenceEvent +import pytest from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN from homeassistant.components.deconz.deconz_event import ( @@ -18,31 +17,19 @@ from homeassistant.components.deconz.deconz_event import ( CONF_DECONZ_RELATIVE_ROTARY_EVENT, RELATIVE_ROTARY_DECONZ_TO_EVENT, ) -from homeassistant.const import ( - CONF_DEVICE_ID, - CONF_EVENT, - CONF_ID, - CONF_UNIQUE_ID, - STATE_UNAVAILABLE, -) +from homeassistant.const import CONF_DEVICE_ID, CONF_EVENT, CONF_ID, CONF_UNIQUE_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration +from .conftest import WebsocketDataType -from tests.common import async_capture_events -from tests.test_util.aiohttp import AiohttpClientMocker +from tests.common import MockConfigEntry, async_capture_events -async def test_deconz_events( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, -) -> None: - """Test successful creation of deconz events.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Switch 1", "type": "ZHASwitch", @@ -79,14 +66,23 @@ async def test_deconz_events( "uniqueid": "00:00:00:00:00:00:00:05-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + ], +) +async def test_deconz_events( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, + sensor_ws_data: WebsocketDataType, +) -> None: + """Test successful creation of deconz events.""" assert len(hass.states.async_all()) == 3 # 5 switches + 2 additional devices for deconz service and host assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 7 ) assert hass.states.get("sensor.switch_2_battery").state == "100" @@ -95,15 +91,7 @@ async def test_deconz_events( captured_events = async_capture_events(hass, CONF_DECONZ_EVENT) - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"buttonevent": 2000}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"id": "1", "state": {"buttonevent": 2000}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -117,15 +105,7 @@ async def test_deconz_events( "device_id": device.id, } - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "3", - "state": {"buttonevent": 2000}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"id": "3", "state": {"buttonevent": 2000}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:03")} @@ -140,15 +120,7 @@ async def test_deconz_events( "device_id": device.id, } - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "4", - "state": {"gesture": 0}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"id": "4", "state": {"gesture": 0}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:04")} @@ -164,14 +136,10 @@ async def test_deconz_events( } event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", "id": "5", "state": {"buttonevent": 6002, "angle": 110, "xy": [0.5982, 0.3897]}, } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data(event_changed_sensor) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:05")} @@ -189,39 +157,14 @@ async def test_deconz_events( # Unsupported event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "name": "other name", - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"id": "1", "name": "other name"}) assert len(captured_events) == 4 - await hass.config_entries.async_unload(config_entry.entry_id) - states = hass.states.async_all() - assert len(hass.states.async_all()) == 3 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_deconz_alarm_events( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, -) -> None: - """Test successful creation of deconz alarm events.""" - data = { - "alarmsystems": { +@pytest.mark.parametrize( + "alarm_system_payload", + [ + { "0": { "name": "default", "config": { @@ -248,43 +191,55 @@ async def test_deconz_alarm_events( }, }, } - }, - "sensors": { - "1": { - "config": { - "battery": 95, - "enrolled": 1, - "on": True, - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "5aaa1c6bae8501f59929539c6e8f44d6", - "lastseen": "2021-07-25T18:07Z", - "manufacturername": "lk", - "modelid": "ZB-KeypadGeneric-D0002", - "name": "Keypad", - "state": { - "action": "invalid_code", - "lastupdated": "2021-07-25T18:02:51.172", - "lowbattery": False, - "panel": "exit_delay", - "seconds_remaining": 55, - "tampered": False, - }, - "swversion": "3.13", - "type": "ZHAAncillaryControl", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + } + ], +) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 95, + "enrolled": 1, + "on": True, + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "5aaa1c6bae8501f59929539c6e8f44d6", + "lastseen": "2021-07-25T18:07Z", + "manufacturername": "lk", + "modelid": "ZB-KeypadGeneric-D0002", + "name": "Keypad", + "state": { + "action": "invalid_code", + "lastupdated": "2021-07-25T18:02:51.172", + "lowbattery": False, + "panel": "exit_delay", + "seconds_remaining": 55, + "tampered": False, + }, + "swversion": "3.13", + "type": "ZHAAncillaryControl", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +async def test_deconz_alarm_events( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, + sensor_ws_data: WebsocketDataType, +) -> None: + """Test successful creation of deconz alarm events.""" assert len(hass.states.async_all()) == 4 # 1 alarm control device + 2 additional devices for deconz service and host assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 3 ) @@ -292,15 +247,7 @@ async def test_deconz_alarm_events( # Emergency event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.EMERGENCY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"action": AncillaryControlAction.EMERGENCY}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -316,15 +263,7 @@ async def test_deconz_alarm_events( # Fire event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.FIRE}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"action": AncillaryControlAction.FIRE}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -340,15 +279,7 @@ async def test_deconz_alarm_events( # Invalid code event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.INVALID_CODE}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"action": AncillaryControlAction.INVALID_CODE}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -364,15 +295,7 @@ async def test_deconz_alarm_events( # Panic event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.PANIC}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"action": AncillaryControlAction.PANIC}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -388,84 +311,57 @@ async def test_deconz_alarm_events( # Only care for changes to specific action events - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.ARMED_AWAY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"action": AncillaryControlAction.ARMED_AWAY}}) assert len(captured_events) == 4 # Only care for action events - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"panel": AncillaryControlPanel.ARMED_AWAY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"panel": AncillaryControlPanel.ARMED_AWAY}}) assert len(captured_events) == 4 - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(hass.states.async_all()) == 4 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", + } + ], +) async def test_deconz_presence_events( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + sensor_ws_data: WebsocketDataType, ) -> None: """Test successful creation of deconz presence events.""" - data = { - "sensors": { - "1": { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 5 assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 3 ) @@ -485,15 +381,7 @@ async def test_deconz_presence_events( PresenceStatePresenceEvent.LEFT_LEAVE, PresenceStatePresenceEvent.RIGHT_LEAVE, ): - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"presenceevent": presence_event}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"presenceevent": presence_event}}) assert len(captured_events) == 1 assert captured_events[0].data == { @@ -506,69 +394,51 @@ async def test_deconz_presence_events( # Unsupported presence event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"presenceevent": PresenceStatePresenceEvent.NINE}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"presenceevent": PresenceStatePresenceEvent.NINE}}) assert len(captured_events) == 0 - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(hass.states.async_all()) == 5 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 100, + "on": True, + "reachable": True, + }, + "etag": "463728970bdb7d04048fc4373654f45a", + "lastannounced": "2022-07-03T13:57:59Z", + "lastseen": "2022-07-03T14:02Z", + "manufacturername": "Signify Netherlands B.V.", + "modelid": "RDM002", + "name": "RDM002 44", + "state": { + "expectedeventduration": 400, + "expectedrotation": 75, + "lastupdated": "2022-07-03T11:37:49.586", + "rotaryevent": 2, + }, + "swversion": "2.59.19", + "type": "ZHARelativeRotary", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-14-fc00", + } + ], +) async def test_deconz_relative_rotary_events( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + sensor_ws_data: WebsocketDataType, ) -> None: """Test successful creation of deconz relative rotary events.""" - data = { - "sensors": { - "1": { - "config": { - "battery": 100, - "on": True, - "reachable": True, - }, - "etag": "463728970bdb7d04048fc4373654f45a", - "lastannounced": "2022-07-03T13:57:59Z", - "lastseen": "2022-07-03T14:02Z", - "manufacturername": "Signify Netherlands B.V.", - "modelid": "RDM002", - "name": "RDM002 44", - "state": { - "expectedeventduration": 400, - "expectedrotation": 75, - "lastupdated": "2022-07-03T11:37:49.586", - "rotaryevent": 2, - }, - "swversion": "2.59.19", - "type": "ZHARelativeRotary", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-14-fc00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 1 assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 3 ) @@ -580,18 +450,13 @@ async def test_deconz_relative_rotary_events( for rotary_event, duration, rotation in ((1, 100, 50), (2, 200, -50)): event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", "state": { "rotaryevent": rotary_event, "expectedeventduration": duration, "expectedrotation": rotation, - }, + } } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data(event_changed_sensor) assert len(captured_events) == 1 assert captured_events[0].data == { @@ -606,38 +471,14 @@ async def test_deconz_relative_rotary_events( # Unsupported relative rotary event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "name": "123", - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"name": "123"}) assert len(captured_events) == 0 - await hass.config_entries.async_unload(config_entry.entry_id) - states = hass.states.async_all() - assert len(hass.states.async_all()) == 1 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_deconz_events_bad_unique_id( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, -) -> None: - """Verify no devices are created if unique id is bad or missing.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Switch 1 no unique id", "type": "ZHASwitch", @@ -652,12 +493,20 @@ async def test_deconz_events_bad_unique_id( "uniqueid": "00:00-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + ], +) +async def test_deconz_events_bad_unique_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, +) -> None: + """Verify no devices are created if unique id is bad or missing.""" assert len(hass.states.async_all()) == 1 assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 2 ) diff --git a/tests/components/deconz/test_device_trigger.py b/tests/components/deconz/test_device_trigger.py index 329cf0405db..6f74db0b82c 100644 --- a/tests/components/deconz/test_device_trigger.py +++ b/tests/components/deconz/test_device_trigger.py @@ -1,6 +1,6 @@ """deCONZ device automation tests.""" -from unittest.mock import Mock, patch +from unittest.mock import Mock import pytest from pytest_unordered import unordered @@ -27,15 +27,14 @@ from homeassistant.const import ( CONF_TYPE, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.trigger import async_initialize_triggers from homeassistant.setup import async_setup_component -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration +from .conftest import WebsocketDataType -from tests.common import async_get_device_automations, async_mock_service -from tests.test_util.aiohttp import AiohttpClientMocker +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -43,45 +42,37 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def automation_calls(hass): - """Track automation calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "alert": "none", + "battery": 60, + "group": "10", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "1b355c0b6d2af28febd7ca9165881952", + "manufacturername": "IKEA of Sweden", + "mode": 1, + "modelid": "TRADFRI on/off switch", + "name": "TRÅDFRI on/off switch ", + "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, + "swversion": "1.4.018", + "type": "ZHASwitch", + "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, ) -> None: """Test triggers work.""" - data = { - "sensors": { - "1": { - "config": { - "alert": "none", - "battery": 60, - "group": "10", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "1b355c0b6d2af28febd7ca9165881952", - "manufacturername": "IKEA of Sweden", - "mode": 1, - "modelid": "TRADFRI on/off switch", - "name": "TRÅDFRI on/off switch ", - "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, - "swversion": "1.4.018", - "type": "ZHASwitch", - "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")} ) @@ -155,46 +146,44 @@ async def test_get_triggers( assert triggers == unordered(expected_triggers) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 95, + "enrolled": 1, + "on": True, + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "5aaa1c6bae8501f59929539c6e8f44d6", + "lastseen": "2021-07-25T18:07Z", + "manufacturername": "lk", + "modelid": "ZB-KeypadGeneric-D0002", + "name": "Keypad", + "state": { + "action": "armed_stay", + "lastupdated": "2021-07-25T18:02:51.172", + "lowbattery": False, + "panel": "exit_delay", + "seconds_remaining": 55, + "tampered": False, + }, + "swversion": "3.13", + "type": "ZHAAncillaryControl", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_get_triggers_for_alarm_event( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, ) -> None: """Test triggers work.""" - data = { - "sensors": { - "1": { - "config": { - "battery": 95, - "enrolled": 1, - "on": True, - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "5aaa1c6bae8501f59929539c6e8f44d6", - "lastseen": "2021-07-25T18:07Z", - "manufacturername": "lk", - "modelid": "ZB-KeypadGeneric-D0002", - "name": "Keypad", - "state": { - "action": "armed_stay", - "lastupdated": "2021-07-25T18:02:51.172", - "lowbattery": False, - "panel": "exit_delay", - "seconds_remaining": 55, - "tampered": False, - }, - "swversion": "3.13", - "type": "ZHAAncillaryControl", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:00")} ) @@ -252,37 +241,34 @@ async def test_get_triggers_for_alarm_event( assert triggers == unordered(expected_triggers) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "alert": "none", + "group": "10", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "1b355c0b6d2af28febd7ca9165881952", + "manufacturername": "IKEA of Sweden", + "mode": 1, + "modelid": "Unsupported model", + "name": "TRÅDFRI on/off switch ", + "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, + "swversion": "1.4.018", + "type": "ZHASwitch", + "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_get_triggers_manage_unsupported_remotes( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: """Verify no triggers for an unsupported remote.""" - data = { - "sensors": { - "1": { - "config": { - "alert": "none", - "group": "10", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "1b355c0b6d2af28febd7ca9165881952", - "manufacturername": "IKEA of Sweden", - "mode": 1, - "modelid": "Unsupported model", - "name": "TRÅDFRI on/off switch ", - "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, - "swversion": "1.4.018", - "type": "ZHASwitch", - "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")} ) @@ -296,41 +282,38 @@ async def test_get_triggers_manage_unsupported_remotes( assert triggers == unordered(expected_triggers) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "alert": "none", + "battery": 60, + "group": "10", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "1b355c0b6d2af28febd7ca9165881952", + "manufacturername": "IKEA of Sweden", + "mode": 1, + "modelid": "TRADFRI on/off switch", + "name": "TRÅDFRI on/off switch ", + "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, + "swversion": "1.4.018", + "type": "ZHASwitch", + "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_functional_device_trigger( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, - automation_calls, device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], + sensor_ws_data: WebsocketDataType, ) -> None: """Test proper matching and attachment of device trigger automation.""" - - data = { - "sensors": { - "1": { - "config": { - "alert": "none", - "battery": 60, - "group": "10", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "1b355c0b6d2af28febd7ca9165881952", - "manufacturername": "IKEA of Sweden", - "mode": 1, - "modelid": "TRADFRI on/off switch", - "name": "TRÅDFRI on/off switch ", - "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, - "swversion": "1.4.018", - "type": "ZHASwitch", - "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")} ) @@ -359,27 +342,16 @@ async def test_functional_device_trigger( assert len(hass.states.async_entity_ids(AUTOMATION_DOMAIN)) == 1 - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"buttonevent": 1002}, - } - await mock_deconz_websocket(data=event_changed_sensor) + await sensor_ws_data({"state": {"buttonevent": 1002}}) await hass.async_block_till_done() - - assert len(automation_calls) == 1 - assert automation_calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" @pytest.mark.skip(reason="Temporarily disabled until automation validation is improved") -async def test_validate_trigger_unknown_device( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.usefixtures("config_entry_setup") +async def test_validate_trigger_unknown_device(hass: HomeAssistant) -> None: """Test unknown device does not return a trigger config.""" - await setup_deconz_integration(hass, aioclient_mock) - assert await async_setup_component( hass, AUTOMATION_DOMAIN, @@ -408,14 +380,12 @@ async def test_validate_trigger_unknown_device( async def test_validate_trigger_unsupported_device( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, ) -> None: """Test unsupported device doesn't return a trigger config.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, + config_entry_id=config_entry_setup.entry_id, identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")}, model="unsupported", ) @@ -450,14 +420,12 @@ async def test_validate_trigger_unsupported_device( async def test_validate_trigger_unsupported_trigger( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, ) -> None: """Test unsupported trigger does not return a trigger config.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, + config_entry_id=config_entry_setup.entry_id, identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")}, model="TRADFRI on/off switch", ) @@ -494,14 +462,12 @@ async def test_validate_trigger_unsupported_trigger( async def test_attach_trigger_no_matching_event( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, ) -> None: """Test no matching event for device doesn't return a trigger config.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, + config_entry_id=config_entry_setup.entry_id, identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")}, name="Tradfri switch", model="TRADFRI on/off switch", diff --git a/tests/components/deconz/test_diagnostics.py b/tests/components/deconz/test_diagnostics.py index bfbc27b206d..2abc6d83995 100644 --- a/tests/components/deconz/test_diagnostics.py +++ b/tests/components/deconz/test_diagnostics.py @@ -2,30 +2,28 @@ from pydeconz.websocket import State from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant -from .test_gateway import setup_deconz_integration +from .conftest import WebsocketStateType +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateType, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - await mock_deconz_websocket(state=State.RUNNING) + await mock_websocket_state(State.RUNNING) await hass.async_block_till_done() - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry_setup + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/deconz/test_fan.py b/tests/components/deconz/test_fan.py index 5da0398c3e6..21809a138c6 100644 --- a/tests/components/deconz/test_fan.py +++ b/tests/components/deconz/test_fan.py @@ -1,9 +1,10 @@ """deCONZ fan platform tests.""" +from collections.abc import Callable from unittest.mock import patch import pytest -from voluptuous.error import MultipleInvalid +from syrupy import SnapshotAssertion from homeassistant.components.fan import ( ATTR_PERCENTAGE, @@ -12,129 +13,67 @@ from homeassistant.components.fan import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_fans( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no fan entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "432f3de28965052961a99e3c5494daf4", + "hascolor": False, + "manufacturername": "King Of Fans, Inc.", + "modelid": "HDC52EastwindFan", + "name": "Ceiling fan", + "state": { + "alert": "none", + "bri": 254, + "on": False, + "reachable": True, + "speed": 4, + }, + "swversion": "0000000F", + "type": "Fan", + "uniqueid": "00:22:a3:00:00:27:8b:81-01", + } + ], +) async def test_fans( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + aioclient_mock: AiohttpClientMocker, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + light_ws_data: WebsocketDataType, ) -> None: """Test that all supported fan entities are created.""" - data = { - "lights": { - "1": { - "etag": "432f3de28965052961a99e3c5494daf4", - "hascolor": False, - "manufacturername": "King Of Fans, Inc.", - "modelid": "HDC52EastwindFan", - "name": "Ceiling fan", - "state": { - "alert": "none", - "bri": 254, - "on": False, - "reachable": True, - "speed": 4, - }, - "swversion": "0000000F", - "type": "Fan", - "uniqueid": "00:22:a3:00:00:27:8b:81-01", - } - } - } + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.FAN]): + config_entry = await config_entry_factory() - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 # Light and fan - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 100 + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Test states - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 1}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 25 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 2}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 50 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 3}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 75 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 4}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 100 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 0}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() + for speed, percent in (1, 25), (2, 50), (3, 75), (4, 100): + await light_ws_data({"state": {"speed": speed}}) + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == percent + await light_ws_data({"state": {"speed": 0}}) assert hass.states.get("fan.ceiling_fan").state == STATE_OFF assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 0 # Test service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service turn on fan using saved default_on_speed @@ -166,323 +105,20 @@ async def test_fans( ) assert aioclient_mock.mock_calls[3][2] == {"speed": 1} - # Service set fan percentage to 20% + # Service set fan percentage - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 20}, - blocking=True, - ) - assert aioclient_mock.mock_calls[4][2] == {"speed": 1} - - # Service set fan percentage to 40% - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 40}, - blocking=True, - ) - assert aioclient_mock.mock_calls[5][2] == {"speed": 2} - - # Service set fan percentage to 60% - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 60}, - blocking=True, - ) - assert aioclient_mock.mock_calls[6][2] == {"speed": 3} - - # Service set fan percentage to 80% - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 80}, - blocking=True, - ) - assert aioclient_mock.mock_calls[7][2] == {"speed": 4} - - # Service set fan percentage to 0% does not equal off - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 0}, - blocking=True, - ) - assert aioclient_mock.mock_calls[8][2] == {"speed": 0} - - # Events with an unsupported speed does not get converted - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 5}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert not hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_fans_legacy_speed_modes( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that all supported fan entities are created. - - Legacy fan support. - """ - data = { - "lights": { - "1": { - "etag": "432f3de28965052961a99e3c5494daf4", - "hascolor": False, - "manufacturername": "King Of Fans, Inc.", - "modelid": "HDC52EastwindFan", - "name": "Ceiling fan", - "state": { - "alert": "none", - "bri": 254, - "on": False, - "reachable": True, - "speed": 4, - }, - "swversion": "0000000F", - "type": "Fan", - "uniqueid": "00:22:a3:00:00:27:8b:81-01", - } - } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 # Light and fan - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - - # Test states - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 1}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 25 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 2}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 50 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 3}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 75 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 4}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 100 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 0}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_OFF - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 0 - - # Test service calls - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") - - # Service turn on fan using saved default_on_speed - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[1][2] == {"speed": 4} - - # Service turn on fan with speed_off - # async_turn_on_compat use speed_to_percentage which will return 0 - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 0}, - blocking=True, - ) - assert aioclient_mock.mock_calls[2][2] == {"speed": 0} - - # Service turn on fan with bad speed - # async_turn_on_compat use speed_to_percentage which will convert to SPEED_MEDIUM -> 2 - - with pytest.raises(MultipleInvalid): - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: "bad"}, - blocking=True, - ) - - # Service turn on fan to low speed - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 25}, - blocking=True, - ) - assert aioclient_mock.mock_calls[3][2] == {"speed": 1} - - # Service turn on fan to medium speed - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 50}, - blocking=True, - ) - assert aioclient_mock.mock_calls[4][2] == {"speed": 2} - - # Service turn on fan to high speed - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 100}, - blocking=True, - ) - assert aioclient_mock.mock_calls[5][2] == {"speed": 4} - - # Service set fan speed to low - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 25}, - blocking=True, - ) - assert aioclient_mock.mock_calls[6][2] == {"speed": 1} - - # Service set fan speed to medium - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 50}, - blocking=True, - ) - assert aioclient_mock.mock_calls[7][2] == {"speed": 2} - - # Service set fan speed to high - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 100}, - blocking=True, - ) - assert aioclient_mock.mock_calls[8][2] == {"speed": 4} - - # Service set fan speed to off - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 0}, - blocking=True, - ) - assert aioclient_mock.mock_calls[9][2] == {"speed": 0} - - # Service set fan speed to unsupported value - - with pytest.raises(MultipleInvalid): + for percent, speed in (20, 1), (40, 2), (60, 3), (80, 4), (0, 0): + aioclient_mock.mock_calls.clear() await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: "bad value"}, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: percent}, blocking=True, ) + assert aioclient_mock.mock_calls[0][2] == {"speed": speed} - # Events with an unsupported speed gets converted to default speed "medium" - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 3}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() + # Events with an unsupported speed does not get converted + await light_ws_data({"state": {"speed": 5}}) assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 75 - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 + assert not hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] diff --git a/tests/components/deconz/test_gateway.py b/tests/components/deconz/test_gateway.py deleted file mode 100644 index b00a5cc1f05..00000000000 --- a/tests/components/deconz/test_gateway.py +++ /dev/null @@ -1,319 +0,0 @@ -"""Test deCONZ gateway.""" - -from copy import deepcopy -from typing import Any -from unittest.mock import patch - -import pydeconz -from pydeconz.websocket import State -import pytest - -from homeassistant.components import ssdp -from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, -) -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN -from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN -from homeassistant.components.deconz.config_flow import DECONZ_MANUFACTURERURL -from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN -from homeassistant.components.deconz.errors import AuthenticationRequired, CannotConnect -from homeassistant.components.deconz.hub import DeconzHub, get_deconz_api -from homeassistant.components.fan import DOMAIN as FAN_DOMAIN -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN -from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN -from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN -from homeassistant.components.select import DOMAIN as SELECT_DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.components.siren import DOMAIN as SIREN_DOMAIN -from homeassistant.components.ssdp import ( - ATTR_UPNP_MANUFACTURER_URL, - ATTR_UPNP_SERIAL, - ATTR_UPNP_UDN, -) -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_SSDP, SOURCE_USER -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_PORT, - CONTENT_TYPE_JSON, - STATE_OFF, - STATE_UNAVAILABLE, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.typing import UNDEFINED, UndefinedType - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker - -API_KEY = "1234567890ABCDEF" -BRIDGEID = "01234E56789A" -HOST = "1.2.3.4" -PORT = 80 - -DEFAULT_URL = f"http://{HOST}:{PORT}/api/{API_KEY}" - -ENTRY_CONFIG = {CONF_API_KEY: API_KEY, CONF_HOST: HOST, CONF_PORT: PORT} - -ENTRY_OPTIONS = {} - -DECONZ_CONFIG = { - "bridgeid": BRIDGEID, - "ipaddress": HOST, - "mac": "00:11:22:33:44:55", - "modelid": "deCONZ", - "name": "deCONZ mock gateway", - "sw_version": "2.05.69", - "uuid": "1234", - "websocketport": 1234, -} - -DECONZ_WEB_REQUEST = { - "config": DECONZ_CONFIG, - "groups": {}, - "lights": {}, - "sensors": {}, -} - - -def mock_deconz_request(aioclient_mock, config, data): - """Mock a deCONZ get request.""" - host = config[CONF_HOST] - port = config[CONF_PORT] - api_key = config[CONF_API_KEY] - - aioclient_mock.get( - f"http://{host}:{port}/api/{api_key}", - json=deepcopy(data), - headers={"content-type": CONTENT_TYPE_JSON}, - ) - - -def mock_deconz_put_request(aioclient_mock, config, path): - """Mock a deCONZ put request.""" - host = config[CONF_HOST] - port = config[CONF_PORT] - api_key = config[CONF_API_KEY] - - aioclient_mock.put( - f"http://{host}:{port}/api/{api_key}{path}", - json={}, - headers={"content-type": CONTENT_TYPE_JSON}, - ) - - -async def setup_deconz_integration( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker | None = None, - *, - options: dict[str, Any] | UndefinedType = UNDEFINED, - entry_id="1", - unique_id=BRIDGEID, - source=SOURCE_USER, -): - """Create the deCONZ gateway.""" - config_entry = MockConfigEntry( - domain=DECONZ_DOMAIN, - source=source, - data=deepcopy(ENTRY_CONFIG), - options=deepcopy(ENTRY_OPTIONS if options is UNDEFINED else options), - entry_id=entry_id, - unique_id=unique_id, - ) - config_entry.add_to_hass(hass) - - if aioclient_mock: - mock_deconz_request(aioclient_mock, ENTRY_CONFIG, DECONZ_WEB_REQUEST) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry - - -async def test_gateway_setup( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, -) -> None: - """Successful setup.""" - # Patching async_forward_entry_setup* is not advisable, and should be refactored - # in the future. - with patch( - "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups", - return_value=True, - ) as forward_entry_setup: - config_entry = await setup_deconz_integration(hass, aioclient_mock) - gateway = DeconzHub.get_hub(hass, config_entry) - assert gateway.bridgeid == BRIDGEID - assert gateway.master is True - assert gateway.config.allow_clip_sensor is False - assert gateway.config.allow_deconz_groups is True - assert gateway.config.allow_new_devices is True - - assert len(gateway.deconz_ids) == 0 - assert len(hass.states.async_all()) == 0 - - assert forward_entry_setup.mock_calls[0][1] == ( - config_entry, - [ - ALARM_CONTROL_PANEL_DOMAIN, - BINARY_SENSOR_DOMAIN, - BUTTON_DOMAIN, - CLIMATE_DOMAIN, - COVER_DOMAIN, - FAN_DOMAIN, - LIGHT_DOMAIN, - LOCK_DOMAIN, - NUMBER_DOMAIN, - SCENE_DOMAIN, - SELECT_DOMAIN, - SENSOR_DOMAIN, - SIREN_DOMAIN, - SWITCH_DOMAIN, - ], - ) - - gateway_entry = device_registry.async_get_device( - identifiers={(DECONZ_DOMAIN, gateway.bridgeid)} - ) - - assert gateway_entry.configuration_url == f"http://{HOST}:{PORT}" - assert gateway_entry.entry_type is dr.DeviceEntryType.SERVICE - - -async def test_gateway_device_configuration_url_when_addon( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, -) -> None: - """Successful setup.""" - # Patching async_forward_entry_setup* is not advisable, and should be refactored - # in the future. - with patch( - "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups", - return_value=True, - ): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, source=SOURCE_HASSIO - ) - gateway = DeconzHub.get_hub(hass, config_entry) - - gateway_entry = device_registry.async_get_device( - identifiers={(DECONZ_DOMAIN, gateway.bridgeid)} - ) - - assert ( - gateway_entry.configuration_url == "homeassistant://hassio/ingress/core_deconz" - ) - - -async def test_connection_status_signalling( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Make sure that connection status triggers a dispatcher send.""" - data = { - "sensors": { - "1": { - "name": "presence", - "type": "ZHAPresence", - "state": {"presence": False}, - "config": {"on": True, "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - - assert hass.states.get("binary_sensor.presence").state == STATE_OFF - - await mock_deconz_websocket(state=State.RETRYING) - await hass.async_block_till_done() - - assert hass.states.get("binary_sensor.presence").state == STATE_UNAVAILABLE - - await mock_deconz_websocket(state=State.RUNNING) - await hass.async_block_till_done() - - assert hass.states.get("binary_sensor.presence").state == STATE_OFF - - -async def test_update_address( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Make sure that connection status triggers a dispatcher send.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - gateway = DeconzHub.get_hub(hass, config_entry) - assert gateway.api.host == "1.2.3.4" - - with patch( - "homeassistant.components.deconz.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - await hass.config_entries.flow.async_init( - DECONZ_DOMAIN, - data=ssdp.SsdpServiceInfo( - ssdp_st="mock_st", - ssdp_usn="mock_usn", - ssdp_location="http://2.3.4.5:80/", - upnp={ - ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, - ATTR_UPNP_UDN: "uuid:456DEF", - }, - ), - context={"source": SOURCE_SSDP}, - ) - await hass.async_block_till_done() - - assert gateway.api.host == "2.3.4.5" - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_reset_after_successful_setup( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Make sure that connection status triggers a dispatcher send.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - gateway = DeconzHub.get_hub(hass, config_entry) - - result = await gateway.async_reset() - await hass.async_block_till_done() - - assert result is True - - -async def test_get_deconz_api(hass: HomeAssistant) -> None: - """Successful call.""" - config_entry = MockConfigEntry(domain=DECONZ_DOMAIN, data=ENTRY_CONFIG) - with patch("pydeconz.DeconzSession.refresh_state", return_value=True): - assert await get_deconz_api(hass, config_entry) - - -@pytest.mark.parametrize( - ("side_effect", "raised_exception"), - [ - (TimeoutError, CannotConnect), - (pydeconz.RequestError, CannotConnect), - (pydeconz.ResponseError, CannotConnect), - (pydeconz.Unauthorized, AuthenticationRequired), - ], -) -async def test_get_deconz_api_fails( - hass: HomeAssistant, side_effect, raised_exception -) -> None: - """Failed call.""" - config_entry = MockConfigEntry(domain=DECONZ_DOMAIN, data=ENTRY_CONFIG) - with ( - patch( - "pydeconz.DeconzSession.refresh_state", - side_effect=side_effect, - ), - pytest.raises(raised_exception), - ): - assert await get_deconz_api(hass, config_entry) diff --git a/tests/components/deconz/test_hub.py b/tests/components/deconz/test_hub.py new file mode 100644 index 00000000000..43c51179337 --- /dev/null +++ b/tests/components/deconz/test_hub.py @@ -0,0 +1,100 @@ +"""Test deCONZ gateway.""" + +from unittest.mock import patch + +from pydeconz.websocket import State +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components import ssdp +from homeassistant.components.deconz.config_flow import DECONZ_MANUFACTURERURL +from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN +from homeassistant.components.ssdp import ( + ATTR_UPNP_MANUFACTURER_URL, + ATTR_UPNP_SERIAL, + ATTR_UPNP_UDN, +) +from homeassistant.config_entries import SOURCE_SSDP +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .conftest import BRIDGE_ID + +from tests.common import MockConfigEntry + + +async def test_device_registry_entry( + config_entry_setup: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Successful setup.""" + device_entry = device_registry.async_get_device( + identifiers={(DECONZ_DOMAIN, config_entry_setup.unique_id)} + ) + assert device_entry == snapshot + + +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "presence", + "type": "ZHAPresence", + "state": {"presence": False}, + "config": {"on": True, "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_connection_status_signalling( + hass: HomeAssistant, mock_websocket_state +) -> None: + """Make sure that connection status triggers a dispatcher send.""" + assert hass.states.get("binary_sensor.presence").state == STATE_OFF + + await mock_websocket_state(State.RETRYING) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.presence").state == STATE_UNAVAILABLE + + await mock_websocket_state(State.RUNNING) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.presence").state == STATE_OFF + + +async def test_update_address( + hass: HomeAssistant, config_entry_setup: MockConfigEntry +) -> None: + """Make sure that connection status triggers a dispatcher send.""" + assert config_entry_setup.data["host"] == "1.2.3.4" + + with ( + patch( + "homeassistant.components.deconz.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + patch("pydeconz.gateway.WSClient") as ws_mock, + ): + await hass.config_entries.flow.async_init( + DECONZ_DOMAIN, + data=ssdp.SsdpServiceInfo( + ssdp_st="mock_st", + ssdp_usn="mock_usn", + ssdp_location="http://2.3.4.5:80/", + upnp={ + ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, + ATTR_UPNP_SERIAL: BRIDGE_ID, + ATTR_UPNP_UDN: "uuid:456DEF", + }, + ), + context={"source": SOURCE_SSDP}, + ) + await hass.async_block_till_done() + + assert ws_mock.call_args[0][1] == "2.3.4.5" + assert config_entry_setup.data["host"] == "2.3.4.5" + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/deconz/test_init.py b/tests/components/deconz/test_init.py index d08bd039184..390d8b9b353 100644 --- a/tests/components/deconz/test_init.py +++ b/tests/components/deconz/test_init.py @@ -3,64 +3,59 @@ import asyncio from unittest.mock import patch -from homeassistant.components.deconz import ( - DeconzHub, - async_setup_entry, - async_unload_entry, +import pydeconz +import pytest + +from homeassistant.components.deconz.const import ( + CONF_MASTER_GATEWAY, + DOMAIN as DECONZ_DOMAIN, ) -from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN -from homeassistant.components.deconz.errors import AuthenticationRequired, CannotConnect +from homeassistant.components.deconz.errors import AuthenticationRequired +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration +from .conftest import ConfigEntryFactoryType -from tests.test_util.aiohttp import AiohttpClientMocker - -ENTRY1_HOST = "1.2.3.4" -ENTRY1_PORT = 80 -ENTRY1_API_KEY = "1234567890ABCDEF" -ENTRY1_BRIDGEID = "12345ABC" -ENTRY1_UUID = "456DEF" - -ENTRY2_HOST = "2.3.4.5" -ENTRY2_PORT = 80 -ENTRY2_API_KEY = "1234567890ABCDEF" -ENTRY2_BRIDGEID = "23456DEF" -ENTRY2_UUID = "789ACE" +from tests.common import MockConfigEntry -async def setup_entry(hass, entry): - """Test that setup entry works.""" - with ( - patch.object(DeconzHub, "async_setup", return_value=True), - patch.object(DeconzHub, "async_update_device_registry", return_value=True), - ): - assert await async_setup_entry(hass, entry) is True +async def test_setup_entry(config_entry_setup: MockConfigEntry) -> None: + """Test successful setup of entry.""" + assert config_entry_setup.state is ConfigEntryState.LOADED + assert config_entry_setup.options[CONF_MASTER_GATEWAY] is True -async def test_setup_entry_successful( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +@pytest.mark.parametrize( + ("side_effect", "state"), + [ + # Failed authentication trigger a reauthentication flow + (pydeconz.Unauthorized, ConfigEntryState.SETUP_ERROR), + # Connection fails + (TimeoutError, ConfigEntryState.SETUP_RETRY), + (pydeconz.RequestError, ConfigEntryState.SETUP_RETRY), + (pydeconz.ResponseError, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_get_deconz_api_fails( + hass: HomeAssistant, + config_entry: MockConfigEntry, + side_effect: Exception, + state: ConfigEntryState, ) -> None: - """Test setup entry is successful.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert hass.data[DECONZ_DOMAIN] - assert config_entry.entry_id in hass.data[DECONZ_DOMAIN] - assert hass.data[DECONZ_DOMAIN][config_entry.entry_id].master - - -async def test_setup_entry_fails_config_entry_not_ready(hass: HomeAssistant) -> None: - """Failed authentication trigger a reauthentication flow.""" + """Failed setup.""" + config_entry.add_to_hass(hass) with patch( - "homeassistant.components.deconz.get_deconz_api", - side_effect=CannotConnect, + "homeassistant.components.deconz.hub.api.DeconzSession.refresh_state", + side_effect=side_effect, ): - await setup_deconz_integration(hass) - - assert hass.data[DECONZ_DOMAIN] == {} + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is state -async def test_setup_entry_fails_trigger_reauth_flow(hass: HomeAssistant) -> None: +async def test_setup_entry_fails_trigger_reauth_flow( + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType +) -> None: """Failed authentication trigger a reauthentication flow.""" with ( patch( @@ -69,89 +64,83 @@ async def test_setup_entry_fails_trigger_reauth_flow(hass: HomeAssistant) -> Non ), patch.object(hass.config_entries.flow, "async_init") as mock_flow_init, ): - await setup_deconz_integration(hass) + config_entry = await config_entry_factory() mock_flow_init.assert_called_once() - - assert hass.data[DECONZ_DOMAIN] == {} + assert config_entry.state is ConfigEntryState.SETUP_ERROR async def test_setup_entry_multiple_gateways( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Test setup entry is successful with multiple gateways.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.clear_requests() + config_entry = await config_entry_factory() - data = {"config": {"bridgeid": "01234E56789B"}} - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry2 = await setup_deconz_integration( - hass, - aioclient_mock, - entry_id="2", - unique_id="01234E56789B", - ) + entry2 = MockConfigEntry( + domain=DECONZ_DOMAIN, + entry_id="2", + unique_id="01234E56789B", + data=config_entry.data | {"host": "2.3.4.5"}, + ) + config_entry2 = await config_entry_factory(entry2) - assert len(hass.data[DECONZ_DOMAIN]) == 2 - assert hass.data[DECONZ_DOMAIN][config_entry.entry_id].master - assert not hass.data[DECONZ_DOMAIN][config_entry2.entry_id].master + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry2.state is ConfigEntryState.LOADED + assert config_entry.options[CONF_MASTER_GATEWAY] is True + assert config_entry2.options[CONF_MASTER_GATEWAY] is False async def test_unload_entry( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test being able to unload an entry.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert hass.data[DECONZ_DOMAIN] - - assert await async_unload_entry(hass, config_entry) - assert not hass.data[DECONZ_DOMAIN] + assert config_entry_setup.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(config_entry_setup.entry_id) + assert config_entry_setup.state is ConfigEntryState.NOT_LOADED async def test_unload_entry_multiple_gateways( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Test being able to unload an entry and master gateway gets moved.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.clear_requests() + config_entry = await config_entry_factory() - data = {"config": {"bridgeid": "01234E56789B"}} - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry2 = await setup_deconz_integration( - hass, - aioclient_mock, - entry_id="2", - unique_id="01234E56789B", - ) + entry2 = MockConfigEntry( + domain=DECONZ_DOMAIN, + entry_id="2", + unique_id="01234E56789B", + data=config_entry.data | {"host": "2.3.4.5"}, + ) + config_entry2 = await config_entry_factory(entry2) - assert len(hass.data[DECONZ_DOMAIN]) == 2 + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry2.state is ConfigEntryState.LOADED - assert await async_unload_entry(hass, config_entry) - - assert len(hass.data[DECONZ_DOMAIN]) == 1 - assert hass.data[DECONZ_DOMAIN][config_entry2.entry_id].master + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert config_entry2.options[CONF_MASTER_GATEWAY] is True async def test_unload_entry_multiple_gateways_parallel( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Test race condition when unloading multiple config entries in parallel.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.clear_requests() + config_entry = await config_entry_factory() - data = {"config": {"bridgeid": "01234E56789B"}} - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry2 = await setup_deconz_integration( - hass, - aioclient_mock, - entry_id="2", - unique_id="01234E56789B", - ) + entry2 = MockConfigEntry( + domain=DECONZ_DOMAIN, + entry_id="2", + unique_id="01234E56789B", + data=config_entry.data | {"host": "2.3.4.5"}, + ) + config_entry2 = await config_entry_factory(entry2) - assert len(hass.data[DECONZ_DOMAIN]) == 2 + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry2.state is ConfigEntryState.LOADED await asyncio.gather( hass.config_entries.async_unload(config_entry.entry_id), hass.config_entries.async_unload(config_entry2.entry_id), ) - assert len(hass.data[DECONZ_DOMAIN]) == 0 + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert config_entry2.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/deconz/test_light.py b/tests/components/deconz/test_light.py index d964361df57..441cb01be63 100644 --- a/tests/components/deconz/test_light.py +++ b/tests/components/deconz/test_light.py @@ -1,22 +1,20 @@ """deCONZ light platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.deconz.const import ATTR_ON, CONF_ALLOW_DECONZ_GROUPS -from homeassistant.components.deconz.light import DECONZ_GROUP +from homeassistant.components.deconz.const import CONF_ALLOW_DECONZ_GROUPS from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_TEMP, ATTR_EFFECT, - ATTR_EFFECT_LIST, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, - ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -34,29 +32,19 @@ from homeassistant.const import ( ATTR_SUPPORTED_FEATURES, STATE_OFF, STATE_ON, - STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import MockConfigEntry, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_lights_or_groups( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no lights or groups entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - @pytest.mark.parametrize( - ("input", "expected"), + "light_payload", [ ( # RGB light in color temp color mode { @@ -85,28 +73,7 @@ async def test_no_lights_or_groups( "swversion": "5.127.1.26420", "type": "Extended color light", "uniqueid": "00:17:88:01:01:23:45:67-00", - }, - { - "entity_id": "light.hue_go", - "state": STATE_ON, - "attributes": { - ATTR_BRIGHTNESS: 254, - ATTR_COLOR_TEMP: 375, - ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], - ATTR_SUPPORTED_COLOR_MODES: [ - ColorMode.COLOR_TEMP, - ColorMode.HS, - ColorMode.XY, - ], - ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 500, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - DECONZ_GROUP: False, - }, - }, + } ), ( # RGB light in XY color mode { @@ -135,30 +102,7 @@ async def test_no_lights_or_groups( "swversion": "1.65.9_hB3217DF4", "type": "Extended color light", "uniqueid": "00:17:88:01:01:23:45:67-01", - }, - { - "entity_id": "light.hue_ensis", - "state": STATE_ON, - "attributes": { - ATTR_MIN_MIREDS: 140, - ATTR_MAX_MIREDS: 650, - ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], - ATTR_SUPPORTED_COLOR_MODES: [ - ColorMode.COLOR_TEMP, - ColorMode.HS, - ColorMode.XY, - ], - ATTR_COLOR_MODE: ColorMode.XY, - ATTR_BRIGHTNESS: 254, - ATTR_HS_COLOR: (29.691, 38.039), - ATTR_RGB_COLOR: (255, 206, 158), - ATTR_XY_COLOR: (0.427, 0.373), - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - }, - }, + } ), ( # RGB light with only HS color mode { @@ -181,41 +125,7 @@ async def test_no_lights_or_groups( "swversion": None, "type": "Color dimmable light", "uniqueid": "58:8e:81:ff:fe:db:7b:be-01", - }, - { - "entity_id": "light.lidl_xmas_light", - "state": STATE_ON, - "attributes": { - ATTR_EFFECT_LIST: [ - "carnival", - "collide", - "fading", - "fireworks", - "flag", - "glow", - "rainbow", - "snake", - "snow", - "sparkles", - "steady", - "strobe", - "twinkle", - "updown", - "vintage", - "waves", - ], - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.HS], - ATTR_COLOR_MODE: ColorMode.HS, - ATTR_BRIGHTNESS: 25, - ATTR_HS_COLOR: (294.938, 55.294), - ATTR_RGB_COLOR: (243, 113, 255), - ATTR_XY_COLOR: (0.357, 0.188), - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - }, - }, + } ), ( # Tunable white light in CT color mode { @@ -240,22 +150,7 @@ async def test_no_lights_or_groups( "swversion": "1.46.13_r26312", "type": "Color temperature light", "uniqueid": "00:17:88:01:01:23:45:67-02", - }, - { - "entity_id": "light.hue_white_ambiance", - "state": STATE_ON, - "attributes": { - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 454, - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP], - ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, - ATTR_BRIGHTNESS: 254, - ATTR_COLOR_TEMP: 396, - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH, - }, - }, + } ), ( # Dimmable light { @@ -270,19 +165,7 @@ async def test_no_lights_or_groups( "swversion": "1.55.8_r28815", "type": "Dimmable light", "uniqueid": "00:17:88:01:01:23:45:67-03", - }, - { - "entity_id": "light.hue_filament", - "state": STATE_ON, - "attributes": { - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.BRIGHTNESS], - ATTR_COLOR_MODE: ColorMode.BRIGHTNESS, - ATTR_BRIGHTNESS: 254, - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH, - }, - }, + } ), ( # On/Off light { @@ -297,17 +180,7 @@ async def test_no_lights_or_groups( "swversion": "2.0", "type": "Simple light", "uniqueid": "00:15:8d:00:01:23:45:67-01", - }, - { - "entity_id": "light.simple_light", - "state": STATE_ON, - "attributes": { - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.ONOFF], - ATTR_COLOR_MODE: ColorMode.ONOFF, - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: 0, - }, - }, + } ), ( # Gradient light { @@ -406,98 +279,63 @@ async def test_no_lights_or_groups( "swversion": "1.104.2", "type": "Extended color light", "uniqueid": "00:17:88:01:0b:0c:0d:0e-0f", - }, - { - "entity_id": "light.gradient_light", - "state": STATE_ON, - "attributes": { - ATTR_SUPPORTED_COLOR_MODES: [ - ColorMode.COLOR_TEMP, - ColorMode.HS, - ColorMode.XY, - ], - ATTR_COLOR_MODE: ColorMode.XY, - }, - }, + } ), ], ) async def test_lights( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, ) -> None: """Test that different light entities are created with expected values.""" - data = {"lights": {"0": input}} - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 1 - - light = hass.states.get(expected["entity_id"]) - assert light.state == expected["state"] - for attribute, expected_value in expected["attributes"].items(): - assert light.attributes[attribute] == expected_value - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.LIGHT]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) +@pytest.mark.parametrize( + "light_payload", + [ + { + "colorcapabilities": 31, + "ctmax": 500, + "ctmin": 153, + "etag": "055485a82553e654f156d41c9301b7cf", + "hascolor": True, + "lastannounced": None, + "lastseen": "2021-06-10T20:25Z", + "manufacturername": "Philips", + "modelid": "LLC020", + "name": "Hue Go", + "state": { + "alert": "none", + "bri": 254, + "colormode": "ct", + "ct": 375, + "effect": "none", + "hue": 8348, + "on": True, + "reachable": True, + "sat": 147, + "xy": [0.462, 0.4111], + }, + "swversion": "5.127.1.26420", + "type": "Extended color light", + "uniqueid": "00:17:88:01:01:23:45:67-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_light_state_change( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + light_ws_data: WebsocketDataType, ) -> None: """Verify light can change state on websocket event.""" - data = { - "lights": { - "0": { - "colorcapabilities": 31, - "ctmax": 500, - "ctmin": 153, - "etag": "055485a82553e654f156d41c9301b7cf", - "hascolor": True, - "lastannounced": None, - "lastseen": "2021-06-10T20:25Z", - "manufacturername": "Philips", - "modelid": "LLC020", - "name": "Hue Go", - "state": { - "alert": "none", - "bri": 254, - "colormode": "ct", - "ct": 375, - "effect": "none", - "hue": 8348, - "on": True, - "reachable": True, - "sat": 147, - "xy": [0.462, 0.4111], - }, - "swversion": "5.127.1.26420", - "type": "Extended color light", - "uniqueid": "00:17:88:01:01:23:45:67-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - assert hass.states.get("light.hue_go").state == STATE_ON - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "0", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data({"state": {"on": False}}) assert hass.states.get("light.hue_go").state == STATE_OFF @@ -639,44 +477,45 @@ async def test_light_state_change( ], ) async def test_light_service_calls( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + config_entry_factory: ConfigEntryFactoryType, + light_payload: dict[str, Any], + mock_put_request: Callable[[str, str], AiohttpClientMocker], + input: dict[str, Any], + expected: dict[str, Any], ) -> None: """Verify light can change state on websocket event.""" - data = { - "lights": { - "0": { - "colorcapabilities": 31, - "ctmax": 500, - "ctmin": 153, - "etag": "055485a82553e654f156d41c9301b7cf", - "hascolor": True, - "lastannounced": None, - "lastseen": "2021-06-10T20:25Z", - "manufacturername": "Philips", - "modelid": "LLC020", - "name": "Hue Go", - "state": { - "alert": "none", - "bri": 254, - "colormode": "ct", - "ct": 375, - "effect": "none", - "hue": 8348, - "on": input["light_on"], - "reachable": True, - "sat": 147, - "xy": [0.462, 0.4111], - }, - "swversion": "5.127.1.26420", - "type": "Extended color light", - "uniqueid": "00:17:88:01:01:23:45:67-00", - } - } + light_payload[0] = { + "colorcapabilities": 31, + "ctmax": 500, + "ctmin": 153, + "etag": "055485a82553e654f156d41c9301b7cf", + "hascolor": True, + "lastannounced": None, + "lastseen": "2021-06-10T20:25Z", + "manufacturername": "Philips", + "modelid": "LLC020", + "name": "Hue Go", + "state": { + "alert": "none", + "bri": 254, + "colormode": "ct", + "ct": 375, + "effect": "none", + "hue": 8348, + "on": input["light_on"], + "reachable": True, + "sat": 147, + "xy": [0.462, 0.4111], + }, + "swversion": "5.127.1.26420", + "type": "Extended color light", + "uniqueid": "00:17:88:01:01:23:45:67-00", } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) + await config_entry_factory() - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") await hass.services.async_call( LIGHT_DOMAIN, @@ -690,41 +529,41 @@ async def test_light_service_calls( assert len(aioclient_mock.mock_calls) == 1 # not called +@pytest.mark.parametrize( + "light_payload", + [ + { + "colorcapabilities": 0, + "ctmax": 65535, + "ctmin": 0, + "etag": "9dd510cd474791481f189d2a68a3c7f1", + "hascolor": True, + "lastannounced": "2020-12-17T17:44:38Z", + "lastseen": "2021-01-11T18:36Z", + "manufacturername": "IKEA of Sweden", + "modelid": "TRADFRI bulb E27 WS opal 1000lm", + "name": "IKEA light", + "state": { + "alert": "none", + "bri": 156, + "colormode": "ct", + "ct": 250, + "on": True, + "reachable": True, + }, + "swversion": "2.0.022", + "type": "Color temperature light", + "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_ikea_default_transition_time( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Verify that service calls to IKEA lights always extend with transition tinme 0 if absent.""" - data = { - "lights": { - "0": { - "colorcapabilities": 0, - "ctmax": 65535, - "ctmin": 0, - "etag": "9dd510cd474791481f189d2a68a3c7f1", - "hascolor": True, - "lastannounced": "2020-12-17T17:44:38Z", - "lastseen": "2021-01-11T18:36Z", - "manufacturername": "IKEA of Sweden", - "modelid": "TRADFRI bulb E27 WS opal 1000lm", - "name": "IKEA light", - "state": { - "alert": "none", - "bri": 156, - "colormode": "ct", - "ct": 250, - "on": True, - "reachable": True, - }, - "swversion": "2.0.022", - "type": "Color temperature light", - "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", - }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") await hass.services.async_call( LIGHT_DOMAIN, @@ -758,40 +597,39 @@ async def test_ikea_default_transition_time( } +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "87a89542bf9b9d0aa8134919056844f8", + "hascolor": True, + "lastannounced": None, + "lastseen": "2020-12-05T22:57Z", + "manufacturername": "_TZE200_s8gkrkxk", + "modelid": "TS0601", + "name": "LIDL xmas light", + "state": { + "bri": 25, + "colormode": "hs", + "effect": "none", + "hue": 53691, + "on": True, + "reachable": True, + "sat": 141, + }, + "swversion": None, + "type": "Color dimmable light", + "uniqueid": "58:8e:81:ff:fe:db:7b:be-01", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_lidl_christmas_light( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that lights or groups entities are created.""" - data = { - "lights": { - "0": { - "etag": "87a89542bf9b9d0aa8134919056844f8", - "hascolor": True, - "lastannounced": None, - "lastseen": "2020-12-05T22:57Z", - "manufacturername": "_TZE200_s8gkrkxk", - "modelid": "TS0601", - "name": "LIDL xmas light", - "state": { - "bri": 25, - "colormode": "hs", - "effect": "none", - "hue": 53691, - "on": True, - "reachable": True, - "sat": 141, - }, - "swversion": None, - "type": "Color dimmable light", - "uniqueid": "58:8e:81:ff:fe:db:7b:be-01", - } - } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") await hass.services.async_call( LIGHT_DOMAIN, @@ -803,135 +641,37 @@ async def test_lidl_christmas_light( blocking=True, ) assert aioclient_mock.mock_calls[1][2] == {"on": True, "hue": 3640, "sat": 76} - assert hass.states.get("light.lidl_xmas_light") -async def test_configuration_tool( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Verify that configuration tool is not created.""" - data = { - "lights": { - "0": { - "etag": "26839cb118f5bf7ba1f2108256644010", - "hascolor": False, - "lastannounced": None, - "lastseen": "2020-11-22T11:27Z", - "manufacturername": "dresden elektronik", - "modelid": "ConBee II", - "name": "Configuration tool 1", - "state": {"reachable": True}, - "swversion": "0x264a0700", - "type": "Configuration tool", - "uniqueid": "00:21:2e:ff:ff:05:a7:a3-01", - } +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "26839cb118f5bf7ba1f2108256644010", + "hascolor": False, + "lastannounced": None, + "lastseen": "2020-11-22T11:27Z", + "manufacturername": "dresden elektronik", + "modelid": "ConBee II", + "name": "Configuration tool 1", + "state": {"reachable": True}, + "swversion": "0x264a0700", + "type": "Configuration tool", + "uniqueid": "00:21:2e:ff:ff:05:a7:a3-01", } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_configuration_tool(hass: HomeAssistant) -> None: + """Verify that configuration tool is not created.""" assert len(hass.states.async_all()) == 0 @pytest.mark.parametrize( - ("input", "expected"), + "light_payload", [ - ( - { - "lights": ["1", "2", "3"], - }, - { - "entity_id": "light.group", - "state": ATTR_ON, - "attributes": { - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 500, - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP, ColorMode.XY], - ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, - ATTR_BRIGHTNESS: 255, - ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], - "all_on": False, - DECONZ_GROUP: True, - ATTR_SUPPORTED_FEATURES: 44, - }, - }, - ), - ( - { - "lights": ["3", "1", "2"], - }, - { - "entity_id": "light.group", - "state": ATTR_ON, - "attributes": { - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 500, - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP, ColorMode.XY], - ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, - ATTR_BRIGHTNESS: 50, - ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], - "all_on": False, - DECONZ_GROUP: True, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - }, - }, - ), - ( - { - "lights": ["2", "3", "1"], - }, - { - "entity_id": "light.group", - "state": ATTR_ON, - "attributes": { - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 500, - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP, ColorMode.XY], - ATTR_COLOR_MODE: ColorMode.XY, - ATTR_HS_COLOR: (52.0, 100.0), - ATTR_RGB_COLOR: (255, 221, 0), - ATTR_XY_COLOR: (0.5, 0.5), - "all_on": False, - DECONZ_GROUP: True, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - }, - }, - ), - ], -) -async def test_groups( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected -) -> None: - """Test that different group entities are created with expected values.""" - data = { - "groups": { - "0": { - "id": "Light group id", - "name": "Group", - "type": "LightGroup", - "state": {"all_on": False, "any_on": True}, - "action": { - "alert": "none", - "bri": 127, - "colormode": "hs", - "ct": 0, - "effect": "none", - "hue": 0, - "on": True, - "sat": 127, - "scene": None, - "xy": [0, 0], - }, - "scenes": [], - "lights": input["lights"], - }, - }, - "lights": { + { "1": { "name": "RGB light", "state": { @@ -964,29 +704,95 @@ async def test_groups( "state": {"bri": 255, "on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:02-00", }, + } + ], +) +@pytest.mark.parametrize( + "input", + [ + ({"lights": ["1", "2", "3"]}), + ({"lights": ["3", "1", "2"]}), + ({"lights": ["2", "3", "1"]}), + ], +) +async def test_groups( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + group_payload: dict[str, Any], + input: dict[str, list[str]], + snapshot: SnapshotAssertion, +) -> None: + """Test that different group entities are created with expected values.""" + group_payload |= { + "0": { + "id": "Light group id", + "name": "Group", + "type": "LightGroup", + "state": {"all_on": False, "any_on": True}, + "action": { + "alert": "none", + "bri": 127, + "colormode": "hs", + "ct": 0, + "effect": "none", + "hue": 0, + "on": True, + "sat": 127, + "scene": None, + "xy": [0, 0], + }, + "scenes": [], + "lights": input["lights"], }, } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 4 - - group = hass.states.get(expected["entity_id"]) - assert group.state == expected["state"] - for attribute, expected_value in expected["attributes"].items(): - assert group.attributes[attribute] == expected_value - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.LIGHT]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) +@pytest.mark.parametrize( + "light_payload", + [ + { + "1": { + "name": "RGB light", + "state": { + "bri": 255, + "colormode": "xy", + "effect": "colorloop", + "hue": 53691, + "on": True, + "reachable": True, + "sat": 141, + "xy": (0.5, 0.5), + }, + "type": "Extended color light", + "uniqueid": "00:00:00:00:00:00:00:00-00", + }, + "2": { + "ctmax": 454, + "ctmin": 155, + "name": "Tunable white light", + "state": { + "on": True, + "colormode": "ct", + "ct": 2500, + "reachable": True, + }, + "type": "Tunable white light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + }, + "3": { + "name": "Dimmable light", + "type": "Dimmable light", + "state": {"bri": 254, "on": True, "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:02-00", + }, + } + ], +) @pytest.mark.parametrize( ("input", "expected"), [ @@ -1045,62 +851,28 @@ async def test_groups( ], ) async def test_group_service_calls( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected + hass: HomeAssistant, + config_entry_factory: ConfigEntryFactoryType, + group_payload: dict[str, Any], + mock_put_request: Callable[[str, str], AiohttpClientMocker], + input: dict[str, Any], + expected: dict[str, Any], ) -> None: """Verify expected group web request from different service calls.""" - data = { - "groups": { - "0": { - "id": "Light group id", - "name": "Group", - "type": "LightGroup", - "state": {"all_on": False, "any_on": input["group_on"]}, - "action": {}, - "scenes": [], - "lights": input["lights"], - }, - }, - "lights": { - "1": { - "name": "RGB light", - "state": { - "bri": 255, - "colormode": "xy", - "effect": "colorloop", - "hue": 53691, - "on": True, - "reachable": True, - "sat": 141, - "xy": (0.5, 0.5), - }, - "type": "Extended color light", - "uniqueid": "00:00:00:00:00:00:00:00-00", - }, - "2": { - "ctmax": 454, - "ctmin": 155, - "name": "Tunable white light", - "state": { - "on": True, - "colormode": "ct", - "ct": 2500, - "reachable": True, - }, - "type": "Tunable white light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - }, - "3": { - "name": "Dimmable light", - "type": "Dimmable light", - "state": {"bri": 254, "on": True, "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:02-00", - }, + group_payload |= { + "0": { + "id": "Light group id", + "name": "Group", + "type": "LightGroup", + "state": {"all_on": False, "any_on": input["group_on"]}, + "action": {}, + "scenes": [], + "lights": input["lights"], }, } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) + await config_entry_factory() - mock_deconz_put_request(aioclient_mock, config_entry.data, "/groups/0/action") + aioclient_mock = mock_put_request("/groups/0/action") await hass.services.async_call( LIGHT_DOMAIN, @@ -1114,12 +886,10 @@ async def test_group_service_calls( assert len(aioclient_mock.mock_calls) == 1 # not called -async def test_empty_group( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Verify that a group without a list of lights is not created.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "0": { "id": "Empty group id", "name": "Empty group", @@ -1129,21 +899,20 @@ async def test_empty_group( "scenes": [], "lights": [], }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_empty_group(hass: HomeAssistant) -> None: + """Verify that a group without a list of lights is not created.""" assert len(hass.states.async_all()) == 0 assert not hass.states.get("light.empty_group") -async def test_disable_light_groups( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test disallowing light groups work.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "1": { "id": "Light group id", "name": "Light group", @@ -1151,7 +920,7 @@ async def test_disable_light_groups( "state": {"all_on": False, "any_on": True}, "action": {}, "scenes": [], - "lights": ["1"], + "lights": ["0"], }, "2": { "id": "Empty group id", @@ -1162,32 +931,35 @@ async def test_disable_light_groups( "scenes": [], "lights": [], }, - }, - "lights": { - "1": { - "ctmax": 454, - "ctmin": 155, - "name": "Tunable white light", - "state": {"on": True, "colormode": "ct", "ct": 2500, "reachable": True}, - "type": "Tunable white light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration( - hass, - aioclient_mock, - options={CONF_ALLOW_DECONZ_GROUPS: False}, - ) - + } + ], +) +@pytest.mark.parametrize( + "light_payload", + [ + { + "ctmax": 454, + "ctmin": 155, + "name": "Tunable white light", + "state": {"on": True, "colormode": "ct", "ct": 2500, "reachable": True}, + "type": "Tunable white light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_DECONZ_GROUPS: False}]) +async def test_disable_light_groups( + hass: HomeAssistant, + config_entry_setup: MockConfigEntry, +) -> None: + """Test disallowing light groups work.""" assert len(hass.states.async_all()) == 1 assert hass.states.get("light.tunable_white_light") assert not hass.states.get("light.light_group") assert not hass.states.get("light.empty_group") hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_DECONZ_GROUPS: True} + config_entry_setup, options={CONF_ALLOW_DECONZ_GROUPS: True} ) await hass.async_block_till_done() @@ -1195,7 +967,7 @@ async def test_disable_light_groups( assert hass.states.get("light.light_group") hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_DECONZ_GROUPS: False} + config_entry_setup, options={CONF_ALLOW_DECONZ_GROUPS: False} ) await hass.async_block_till_done() @@ -1203,16 +975,10 @@ async def test_disable_light_groups( assert not hass.states.get("light.light_group") -async def test_non_color_light_reports_color( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Verify hs_color does not crash when a group gets updated with a bad color value. - - After calling a scene color temp light of certain manufacturers - report color temp in color space. - """ - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "0": { "action": { "alert": "none", @@ -1234,8 +1000,13 @@ async def test_non_color_light_reports_color( "state": {"all_on": False, "any_on": True}, "type": "LightGroup", } - }, - "lights": { + } + ], +) +@pytest.mark.parametrize( + "light_payload", + [ + { "0": { "ctmax": 500, "ctmin": 153, @@ -1285,11 +1056,19 @@ async def test_non_color_light_reports_color( "type": "Color temperature light", "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_non_color_light_reports_color( + hass: HomeAssistant, + light_ws_data: WebsocketDataType, +) -> None: + """Verify hs_color does not crash when a group gets updated with a bad color value. + After calling a scene color temp light of certain manufacturers + report color temp in color space. + """ assert len(hass.states.async_all()) == 3 assert hass.states.get("light.group").attributes[ATTR_SUPPORTED_COLOR_MODES] == [ ColorMode.COLOR_TEMP, @@ -1305,9 +1084,7 @@ async def test_non_color_light_reports_color( # Updating a scene will return a faulty color value # for a non-color light causing an exception in hs_color event_changed_light = { - "e": "changed", "id": "1", - "r": "lights", "state": { "alert": None, "bri": 216, @@ -1316,24 +1093,19 @@ async def test_non_color_light_reports_color( "on": True, "reachable": True, }, - "t": "event", "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data(event_changed_light) group = hass.states.get("light.group") assert group.attributes[ATTR_COLOR_MODE] == ColorMode.XY assert group.attributes[ATTR_HS_COLOR] == (40.571, 41.176) assert group.attributes.get(ATTR_COLOR_TEMP) is None -async def test_verify_group_supported_features( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that group supported features reflect what included lights support.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "1": { "id": "Group1", "name": "Group", @@ -1343,8 +1115,13 @@ async def test_verify_group_supported_features( "scenes": [], "lights": ["1", "2", "3"], }, - }, - "lights": { + } + ], +) +@pytest.mark.parametrize( + "light_payload", + [ + { "1": { "name": "Dimmable light", "state": {"on": True, "bri": 255, "reachable": True}, @@ -1372,11 +1149,12 @@ async def test_verify_group_supported_features( "type": "Tunable white light", "uniqueid": "00:00:00:00:00:00:00:03-00", }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_verify_group_supported_features(hass: HomeAssistant) -> None: + """Test that group supported features reflect what included lights support.""" assert len(hass.states.async_all()) == 4 group_state = hass.states.get("light.group") @@ -1390,12 +1168,10 @@ async def test_verify_group_supported_features( ) -async def test_verify_group_color_mode_fallback( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that group supported features reflect what included lights support.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "43": { "action": { "alert": "none", @@ -1412,7 +1188,7 @@ async def test_verify_group_color_mode_fallback( "devicemembership": [], "etag": "4548e982c4cfff942f7af80958abb2a0", "id": "43", - "lights": ["13"], + "lights": ["0"], "name": "Opbergruimte", "scenes": [ { @@ -1443,62 +1219,68 @@ async def test_verify_group_color_mode_fallback( "state": {"all_on": False, "any_on": False}, "type": "LightGroup", }, - }, - "lights": { - "13": { - "capabilities": { - "alerts": [ - "none", - "select", - "lselect", - "blink", - "breathe", - "okay", - "channelchange", - "finish", - "stop", - ], - "bri": {"min_dim_level": 5}, - }, - "config": { - "bri": {"execute_if_off": True, "startup": "previous"}, - "groups": ["43"], - "on": {"startup": "previous"}, - }, - "etag": "ca0ed7763eca37f5e6b24f6d46f8a518", - "hascolor": False, - "lastannounced": None, - "lastseen": "2024-03-02T20:08Z", - "manufacturername": "Signify Netherlands B.V.", - "modelid": "LWA001", - "name": "Opbergruimte Lamp Plafond", - "productid": "Philips-LWA001-1-A19DLv5", - "productname": "Hue white lamp", - "state": { - "alert": "none", - "bri": 76, - "effect": "none", - "on": False, - "reachable": True, - }, - "swconfigid": "87169548", - "swversion": "1.104.2", - "type": "Dimmable light", - "uniqueid": "00:17:88:01:08:11:22:33-01", + } + ], +) +@pytest.mark.parametrize( + "light_payload", + [ + { + "capabilities": { + "alerts": [ + "none", + "select", + "lselect", + "blink", + "breathe", + "okay", + "channelchange", + "finish", + "stop", + ], + "bri": {"min_dim_level": 5}, }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + "config": { + "bri": {"execute_if_off": True, "startup": "previous"}, + "groups": ["43"], + "on": {"startup": "previous"}, + }, + "etag": "ca0ed7763eca37f5e6b24f6d46f8a518", + "hascolor": False, + "lastannounced": None, + "lastseen": "2024-03-02T20:08Z", + "manufacturername": "Signify Netherlands B.V.", + "modelid": "LWA001", + "name": "Opbergruimte Lamp Plafond", + "productid": "Philips-LWA001-1-A19DLv5", + "productname": "Hue white lamp", + "state": { + "alert": "none", + "bri": 76, + "effect": "none", + "on": False, + "reachable": True, + }, + "swconfigid": "87169548", + "swversion": "1.104.2", + "type": "Dimmable light", + "uniqueid": "00:17:88:01:08:11:22:33-01", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_verify_group_color_mode_fallback( + hass: HomeAssistant, + mock_websocket_data: WebsocketDataType, +) -> None: + """Test that group supported features reflect what included lights support.""" group_state = hass.states.get("light.opbergruimte") assert group_state.state == STATE_OFF assert group_state.attributes[ATTR_COLOR_MODE] is None - await mock_deconz_websocket( - data={ - "e": "changed", - "id": "13", + await mock_websocket_data( + { + "id": "0", "r": "lights", "state": { "alert": "none", @@ -1507,17 +1289,14 @@ async def test_verify_group_color_mode_fallback( "on": True, "reachable": True, }, - "t": "event", "uniqueid": "00:17:88:01:08:11:22:33-01", } ) - await mock_deconz_websocket( - data={ - "e": "changed", + await mock_websocket_data( + { "id": "43", "r": "groups", "state": {"all_on": True, "any_on": True}, - "t": "event", } ) group_state = hass.states.get("light.opbergruimte") diff --git a/tests/components/deconz/test_lock.py b/tests/components/deconz/test_lock.py index 03d14802083..28d60e403ef 100644 --- a/tests/components/deconz/test_lock.py +++ b/tests/components/deconz/test_lock.py @@ -1,79 +1,56 @@ """deCONZ lock platform tests.""" -from unittest.mock import patch +from collections.abc import Callable + +import pytest from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_LOCKED, - STATE_UNAVAILABLE, - STATE_UNLOCKED, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import WebsocketDataType from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_locks( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no lock entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "5c2ec06cde4bd654aef3a555fcd8ad12", + "hascolor": False, + "lastannounced": None, + "lastseen": "2020-08-22T15:29:03Z", + "manufacturername": "Danalock", + "modelid": "V3-BTZB", + "name": "Door lock", + "state": {"alert": "none", "on": False, "reachable": True}, + "swversion": "19042019", + "type": "Door Lock", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_lock_from_light( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + light_ws_data: WebsocketDataType, ) -> None: """Test that all supported lock entities based on lights are created.""" - data = { - "lights": { - "1": { - "etag": "5c2ec06cde4bd654aef3a555fcd8ad12", - "hascolor": False, - "lastannounced": None, - "lastseen": "2020-08-22T15:29:03Z", - "manufacturername": "Danalock", - "modelid": "V3-BTZB", - "name": "Door lock", - "state": {"alert": "none", "on": False, "reachable": True}, - "swversion": "19042019", - "type": "Door Lock", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 1 assert hass.states.get("lock.door_lock").state == STATE_UNLOCKED - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data({"state": {"on": True}}) assert hass.states.get("lock.door_lock").state == STATE_LOCKED # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service lock door @@ -95,68 +72,49 @@ async def test_lock_from_light( ) assert aioclient_mock.mock_calls[2][2] == {"on": False} - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 1 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 100, + "lock": False, + "on": True, + "reachable": True, + }, + "ep": 11, + "etag": "a43862f76b7fa48b0fbb9107df123b0e", + "lastseen": "2021-03-06T22:25Z", + "manufacturername": "Onesti Products AS", + "modelid": "easyCodeTouch_v1", + "name": "Door lock", + "state": { + "lastupdated": "2021-03-06T21:25:45.624", + "lockstate": "unlocked", + }, + "swversion": "20201211", + "type": "ZHADoorLock", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_lock_from_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, ) -> None: """Test that all supported lock entities based on sensors are created.""" - data = { - "sensors": { - "1": { - "config": { - "battery": 100, - "lock": False, - "on": True, - "reachable": True, - }, - "ep": 11, - "etag": "a43862f76b7fa48b0fbb9107df123b0e", - "lastseen": "2021-03-06T22:25Z", - "manufacturername": "Onesti Products AS", - "modelid": "easyCodeTouch_v1", - "name": "Door lock", - "state": { - "lastupdated": "2021-03-06T21:25:45.624", - "lockstate": "unlocked", - }, - "swversion": "20201211", - "type": "ZHADoorLock", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 2 assert hass.states.get("lock.door_lock").state == STATE_UNLOCKED - event_changed_light = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"lockstate": "locked"}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"lockstate": "locked"}}) assert hass.states.get("lock.door_lock").state == STATE_LOCKED # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/1/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service lock door @@ -177,14 +135,3 @@ async def test_lock_from_sensor( blocking=True, ) assert aioclient_mock.mock_calls[2][2] == {"lock": False} - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_logbook.py b/tests/components/deconz/test_logbook.py index 5940d2e8e34..d23680225f1 100644 --- a/tests/components/deconz/test_logbook.py +++ b/tests/components/deconz/test_logbook.py @@ -1,6 +1,8 @@ """The tests for deCONZ logbook.""" -from unittest.mock import patch +from typing import Any + +import pytest from homeassistant.components.deconz.const import CONF_GESTURE, DOMAIN as DECONZ_DOMAIN from homeassistant.components.deconz.deconz_event import ( @@ -21,51 +23,47 @@ from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component from homeassistant.util import slugify -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration - from tests.components.logbook.common import MockRow, mock_humanify -from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "armed": "disarmed", + "enrolled": 0, + "on": True, + "panel": "disarmed", + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "3c4008d74035dfaa1f0bb30d24468b12", + "lastseen": "2021-04-02T13:07Z", + "manufacturername": "Universal Electronics Inc", + "modelid": "URC4450BC0-X-R", + "name": "Keypad", + "state": { + "action": "armed_away,1111,55", + "lastupdated": "2021-04-02T13:08:18.937", + "lowbattery": False, + "tampered": True, + }, + "type": "ZHAAncillaryControl", + "uniqueid": "00:0d:6f:00:13:4f:61:39-01-0501", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_humanifying_deconz_alarm_event( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, + sensor_payload: dict[str, Any], ) -> None: - """Test humanifying deCONZ event.""" - data = { - "sensors": { - "1": { - "config": { - "armed": "disarmed", - "enrolled": 0, - "on": True, - "panel": "disarmed", - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "3c4008d74035dfaa1f0bb30d24468b12", - "lastseen": "2021-04-02T13:07Z", - "manufacturername": "Universal Electronics Inc", - "modelid": "URC4450BC0-X-R", - "name": "Keypad", - "state": { - "action": "armed_away,1111,55", - "lastupdated": "2021-04-02T13:08:18.937", - "lowbattery": False, - "tampered": True, - }, - "type": "ZHAAncillaryControl", - "uniqueid": "00:0d:6f:00:13:4f:61:39-01-0501", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - - keypad_event_id = slugify(data["sensors"]["1"]["name"]) - keypad_serial = serial_from_unique_id(data["sensors"]["1"]["uniqueid"]) + """Test humanifying deCONZ alarm event.""" + keypad_event_id = slugify(sensor_payload["name"]) + keypad_serial = serial_from_unique_id(sensor_payload["uniqueid"]) keypad_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, keypad_serial)} ) @@ -113,14 +111,10 @@ async def test_humanifying_deconz_alarm_event( assert events[1]["message"] == "fired event 'armed_away'" -async def test_humanifying_deconz_event( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, -) -> None: - """Test humanifying deCONZ event.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Switch 1", "type": "ZHASwitch", @@ -152,30 +146,35 @@ async def test_humanifying_deconz_event( "uniqueid": "00:00:00:00:00:00:00:04-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - - switch_event_id = slugify(data["sensors"]["1"]["name"]) - switch_serial = serial_from_unique_id(data["sensors"]["1"]["uniqueid"]) + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_humanifying_deconz_event( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + sensor_payload: dict[str, Any], +) -> None: + """Test humanifying deCONZ event.""" + switch_event_id = slugify(sensor_payload["1"]["name"]) + switch_serial = serial_from_unique_id(sensor_payload["1"]["uniqueid"]) switch_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, switch_serial)} ) - hue_remote_event_id = slugify(data["sensors"]["2"]["name"]) - hue_remote_serial = serial_from_unique_id(data["sensors"]["2"]["uniqueid"]) + hue_remote_event_id = slugify(sensor_payload["2"]["name"]) + hue_remote_serial = serial_from_unique_id(sensor_payload["2"]["uniqueid"]) hue_remote_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, hue_remote_serial)} ) - xiaomi_cube_event_id = slugify(data["sensors"]["3"]["name"]) - xiaomi_cube_serial = serial_from_unique_id(data["sensors"]["3"]["uniqueid"]) + xiaomi_cube_event_id = slugify(sensor_payload["3"]["name"]) + xiaomi_cube_serial = serial_from_unique_id(sensor_payload["3"]["uniqueid"]) xiaomi_cube_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, xiaomi_cube_serial)} ) - faulty_event_id = slugify(data["sensors"]["4"]["name"]) - faulty_serial = serial_from_unique_id(data["sensors"]["4"]["uniqueid"]) + faulty_event_id = slugify(sensor_payload["4"]["name"]) + faulty_serial = serial_from_unique_id(sensor_payload["4"]["uniqueid"]) faulty_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, faulty_serial)} ) diff --git a/tests/components/deconz/test_number.py b/tests/components/deconz/test_number.py index 655ae2f42e2..962c2c0a89b 100644 --- a/tests/components/deconz/test_number.py +++ b/tests/components/deconz/test_number.py @@ -1,36 +1,27 @@ """deCONZ number platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, EntityCategory +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker - -async def test_no_number_entities( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no number entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - TEST_DATA = [ ( # Presence sensor - delay configuration { @@ -46,19 +37,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "number.presence_sensor_delay", - "unique_id": "00:00:00:00:00:00:00:00-00-delay", - "state": "0", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "min": 0, - "max": 65535, - "step": 1, - "mode": "auto", - "friendly_name": "Presence sensor Delay", - }, "websocket_event": {"config": {"delay": 10}}, "next_state": "10", "supported_service_value": 111, @@ -82,19 +61,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "number.presence_sensor_duration", - "unique_id": "00:00:00:00:00:00:00:00-00-duration", - "state": "0", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "min": 0, - "max": 65535, - "step": 1, - "mode": "auto", - "friendly_name": "Presence sensor Duration", - }, "websocket_event": {"config": {"duration": 10}}, "next_state": "10", "supported_service_value": 111, @@ -107,57 +74,29 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("sensor_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) async def test_number_entities( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - mock_deconz_websocket, - sensor_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + sensor_ws_data: WebsocketDataType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of number entities.""" - - with patch.dict(DECONZ_WEB_REQUEST, {"sensors": {"0": sensor_data}}): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - entity = hass.states.get(expected["entity_id"]) - assert entity.state == expected["state"] - assert entity.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.NUMBER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Change state - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - } | expected["websocket_event"] - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data(expected["websocket_event"]) assert hass.states.get(expected["entity_id"]).state == expected["next_state"] # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set supported value @@ -197,14 +136,3 @@ async def test_number_entities( }, blocking=True, ) - - # Unload entry - - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_scene.py b/tests/components/deconz/test_scene.py index 2bace605db5..c1240b6881c 100644 --- a/tests/components/deconz/test_scene.py +++ b/tests/components/deconz/test_scene.py @@ -1,97 +1,60 @@ """deCONZ scene platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN, SERVICE_TURN_ON -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker - -async def test_no_scenes( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that scenes can be loaded without scenes being available.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - TEST_DATA = [ ( # Scene { - "groups": { - "1": { - "id": "Light group id", - "name": "Light group", - "type": "LightGroup", - "state": {"all_on": False, "any_on": True}, - "action": {}, - "scenes": [{"id": "1", "name": "Scene"}], - "lights": [], - } + "1": { + "id": "Light group id", + "name": "Light group", + "type": "LightGroup", + "state": {"all_on": False, "any_on": True}, + "action": {}, + "scenes": [{"id": "1", "name": "Scene"}], + "lights": [], } }, { - "entity_count": 2, - "device_count": 3, "entity_id": "scene.light_group_scene", - "unique_id": "01234E56789A/groups/1/scenes/1", - "entity_category": None, - "attributes": { - "friendly_name": "Light group Scene", - }, "request": "/groups/1/scenes/1/recall", }, ), ] -@pytest.mark.parametrize(("raw_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("group_payload", "expected"), TEST_DATA) async def test_scenes( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - raw_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of scene entities.""" - with patch.dict(DECONZ_WEB_REQUEST, raw_data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - scene = hass.states.get(expected["entity_id"]) - assert scene.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SCENE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify button press - mock_deconz_put_request(aioclient_mock, config_entry.data, expected["request"]) + aioclient_mock = mock_put_request(expected["request"]) await hass.services.async_call( SCENE_DOMAIN, @@ -101,24 +64,11 @@ async def test_scenes( ) assert aioclient_mock.mock_calls[1][2] == {} - # Unload entry - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_only_new_scenes_are_created( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that scenes works.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "1": { "id": "Light group id", "name": "Light group", @@ -129,20 +79,20 @@ async def test_only_new_scenes_are_created( "lights": [], } } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_only_new_scenes_are_created( + hass: HomeAssistant, + mock_websocket_data: WebsocketDataType, +) -> None: + """Test that scenes works.""" assert len(hass.states.async_all()) == 2 event_changed_group = { - "t": "event", - "e": "changed", "r": "groups", "id": "1", "scenes": [{"id": "1", "name": "Scene"}], } - await mock_deconz_websocket(data=event_changed_group) - await hass.async_block_till_done() - + await mock_websocket_data(event_changed_group) assert len(hass.states.async_all()) == 2 diff --git a/tests/components/deconz/test_select.py b/tests/components/deconz/test_select.py index fb8f41293a2..900283d88bb 100644 --- a/tests/components/deconz/test_select.py +++ b/tests/components/deconz/test_select.py @@ -1,5 +1,7 @@ """deCONZ select platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch from pydeconz.models.sensor.presence import ( @@ -7,202 +9,135 @@ from pydeconz.models.sensor.presence import ( PresenceConfigTriggerDistance, ) import pytest +from syrupy import SnapshotAssertion from homeassistant.components.select import ( ATTR_OPTION, DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, EntityCategory +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker - -async def test_no_select_entities( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no sensor entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - TEST_DATA = [ ( # Presence Device Mode { - "sensors": { - "1": { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", - } - } + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", }, { - "entity_count": 5, - "device_count": 3, "entity_id": "select.aqara_fp1_device_mode", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "friendly_name": "Aqara FP1 Device Mode", - "options": ["leftright", "undirected"], - }, "option": PresenceConfigDeviceMode.LEFT_AND_RIGHT.value, - "request": "/sensors/1/config", + "request": "/sensors/0/config", "request_data": {"devicemode": "leftright"}, }, ), ( # Presence Sensitivity { - "sensors": { - "1": { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", - } - } + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", }, { - "entity_count": 5, - "device_count": 3, "entity_id": "select.aqara_fp1_sensitivity", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "friendly_name": "Aqara FP1 Sensitivity", - "options": ["High", "Medium", "Low"], - }, "option": "Medium", - "request": "/sensors/1/config", + "request": "/sensors/0/config", "request_data": {"sensitivity": 2}, }, ), ( # Presence Trigger Distance { - "sensors": { - "1": { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", - } - } + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", }, { - "entity_count": 5, - "device_count": 3, "entity_id": "select.aqara_fp1_trigger_distance", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "friendly_name": "Aqara FP1 Trigger Distance", - "options": ["far", "medium", "near"], - }, "option": PresenceConfigTriggerDistance.FAR.value, - "request": "/sensors/1/config", + "request": "/sensors/0/config", "request_data": {"triggerdistance": "far"}, }, ), ] -@pytest.mark.parametrize(("raw_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) async def test_select( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - raw_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of button entities.""" - with patch.dict(DECONZ_WEB_REQUEST, raw_data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - button = hass.states.get(expected["entity_id"]) - assert button.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SELECT]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify selecting option - - mock_deconz_put_request(aioclient_mock, config_entry.data, expected["request"]) + aioclient_mock = mock_put_request(expected["request"]) await hass.services.async_call( SELECT_DOMAIN, @@ -214,14 +149,3 @@ async def test_select( blocking=True, ) assert aioclient_mock.mock_calls[1][2] == expected["request_data"] - - # Unload entry - - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_sensor.py b/tests/components/deconz/test_sensor.py index 1e1ca6efe7c..e6ae85df615 100644 --- a/tests/components/deconz/test_sensor.py +++ b/tests/components/deconz/test_sensor.py @@ -1,42 +1,23 @@ """deCONZ sensor platform tests.""" from datetime import timedelta +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.deconz.const import CONF_ALLOW_CLIP_SENSOR -from homeassistant.components.sensor import ( - DOMAIN as SENSOR_DOMAIN, - SensorDeviceClass, - SensorStateClass, -) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - CONCENTRATION_PARTS_PER_BILLION, - CONCENTRATION_PARTS_PER_MILLION, - STATE_UNAVAILABLE, - EntityCategory, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration - -from tests.common import async_fire_time_changed -from tests.test_util.aiohttp import AiohttpClientMocker - - -async def test_no_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no sensor entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import async_fire_time_changed, snapshot_platform TEST_DATA = [ ( # Air quality sensor @@ -61,17 +42,7 @@ TEST_DATA = [ "uniqueid": "00:12:4b:00:14:4d:00:07-02-fdef", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.bosch_air_quality_sensor", - "unique_id": "00:12:4b:00:14:4d:00:07-02-fdef-air_quality", - "state": "poor", - "entity_category": None, - "device_class": None, - "state_class": None, - "attributes": { - "friendly_name": "BOSCH Air quality sensor", - }, "websocket_event": {"state": {"airquality": "excellent"}}, "next_state": "excellent", }, @@ -98,19 +69,7 @@ TEST_DATA = [ "uniqueid": "00:12:4b:00:14:4d:00:07-02-fdef", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.bosch_air_quality_sensor_ppb", - "unique_id": "00:12:4b:00:14:4d:00:07-02-fdef-air_quality_ppb", - "state": "809", - "entity_category": None, - "device_class": None, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "BOSCH Air quality sensor PPB", - "state_class": "measurement", - "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, - }, "websocket_event": {"state": {"airqualityppb": 1000}}, "next_state": "1000", }, @@ -137,20 +96,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:01-02-0113", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "sensor.airquality_1_co2", - "unique_id": "00:00:00:00:00:00:00:01-02-0113-air_quality_co2", - "state": "359", - "entity_category": None, - "device_class": SensorDeviceClass.CO2, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "AirQuality 1 CO2", - "device_class": SensorDeviceClass.CO2, - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_PARTS_PER_MILLION, - }, "websocket_event": {"state": {"airquality_co2_density": 332}}, "next_state": "332", }, @@ -177,20 +123,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:01-02-0113", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "sensor.airquality_1_ch2o", - "unique_id": "00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde", - "state": "4", - "entity_category": None, - "device_class": SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "AirQuality 1 CH2O", - "device_class": SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - }, "websocket_event": {"state": {"airquality_formaldehyde_density": 5}}, "next_state": "5", }, @@ -217,20 +150,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:01-02-0113", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "sensor.airquality_1_pm25", - "unique_id": "00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5", - "state": "8", - "entity_category": None, - "device_class": SensorDeviceClass.PM25, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "AirQuality 1 PM25", - "device_class": SensorDeviceClass.PM25, - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - }, "websocket_event": {"state": {"pm2_5": 11}}, "next_state": "11", }, @@ -256,21 +176,7 @@ TEST_DATA = [ "uniqueid": "00:0d:6f:ff:fe:01:23:45-01-0001", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.fyrtur_block_out_roller_blind_battery", - "unique_id": "00:0d:6f:ff:fe:01:23:45-01-0001-battery", - "state": "100", - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": SensorDeviceClass.BATTERY, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "unit_of_measurement": "%", - "device_class": "battery", - "friendly_name": "FYRTUR block-out roller blind Battery", - }, "websocket_event": {"state": {"battery": 50}}, "next_state": "50", }, @@ -300,20 +206,7 @@ TEST_DATA = [ "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-040d", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.carbondioxide_35", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-040d-carbon_dioxide", - "state": "370", - "entity_category": None, - "device_class": SensorDeviceClass.CO2, - "state_class": CONCENTRATION_PARTS_PER_BILLION, - "attributes": { - "device_class": "carbon_dioxide", - "friendly_name": "CarbonDioxide 35", - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, - }, "websocket_event": {"state": {"measured_value": 500}}, "next_state": "500", }, @@ -335,22 +228,7 @@ TEST_DATA = [ "uniqueid": "00:0d:6f:00:0b:7a:64:29-01-0702", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.consumption_15", - "unique_id": "00:0d:6f:00:0b:7a:64:29-01-0702-consumption", - "state": "11.342", - "entity_category": None, - "device_class": SensorDeviceClass.ENERGY, - "state_class": SensorStateClass.TOTAL_INCREASING, - "attributes": { - "state_class": "total_increasing", - "on": True, - "power": 123, - "unit_of_measurement": "kWh", - "device_class": "energy", - "friendly_name": "Consumption 15", - }, "websocket_event": {"state": {"consumption": 10000}}, "next_state": "10.0", }, @@ -378,21 +256,7 @@ TEST_DATA = [ }, { "enable_entity": True, - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.daylight", - "unique_id": "01:23:4E:FF:FF:56:78:9A-01-daylight_status", - "old-unique_id": "01:23:4E:FF:FF:56:78:9A-01", - "state": "solar_noon", - "entity_category": None, - "device_class": None, - "state_class": None, - "attributes": { - "on": True, - "daylight": True, - "icon": "mdi:white-balance-sunny", - "friendly_name": "Daylight", - }, "websocket_event": {"state": {"status": 210}}, "next_state": "dusk", }, @@ -422,20 +286,7 @@ TEST_DATA = [ "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-042b", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.formaldehyde_34", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-042b-formaldehyde", - "state": "1", - "entity_category": None, - "device_class": SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "device_class": "volatile_organic_compounds", - "friendly_name": "Formaldehyde 34", - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, - }, "websocket_event": {"state": {"measured_value": 2}}, "next_state": "2", }, @@ -459,18 +310,7 @@ TEST_DATA = [ "uniqueid": "fsm-state-1520195376277", }, { - "entity_count": 1, - "device_count": 2, "entity_id": "sensor.fsm_state_motion_stair", - "unique_id": "fsm-state-1520195376277-status", - "state": "0", - "entity_category": None, - "device_class": None, - "state_class": None, - "attributes": { - "on": True, - "friendly_name": "FSM_STATE Motion stair", - }, "websocket_event": {"state": {"status": 1}}, "next_state": "1", }, @@ -497,24 +337,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:45:dc:53-01-0405", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.mi_temperature_1", - "unique_id": "00:15:8d:00:02:45:dc:53-01-0405-humidity", - "state": "35.55", - "entity_category": None, - "device_class": SensorDeviceClass.HUMIDITY, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "unit_of_measurement": "%", - "device_class": "humidity", - "friendly_name": "Mi temperature 1", - }, - "options": { - "suggested_display_precision": 1, - }, "websocket_event": {"state": {"humidity": 1000}}, "next_state": "10.0", }, @@ -538,20 +361,7 @@ TEST_DATA = [ "uniqueid": "a4:c1:38:fe:86:8f:07:a3-01-0408", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "sensor.soil_sensor", - "unique_id": "a4:c1:38:fe:86:8f:07:a3-01-0408-moisture", - "state": "72.13", - "entity_category": None, - "device_class": SensorDeviceClass.MOISTURE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "unit_of_measurement": "%", - "device_class": "moisture", - "friendly_name": "Soil Sensor", - }, "websocket_event": {"state": {"moisture": 6923}}, "next_state": "69.23", }, @@ -586,23 +396,7 @@ TEST_DATA = [ "uniqueid": "00:17:88:01:03:28:8c:9b-02-0400", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.motion_sensor_4", - "unique_id": "00:17:88:01:03:28:8c:9b-02-0400-light_level", - "state": "5.0", - "entity_category": None, - "device_class": SensorDeviceClass.ILLUMINANCE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "on": True, - "dark": True, - "daylight": False, - "unit_of_measurement": "lx", - "device_class": "illuminance", - "friendly_name": "Motion sensor 4", - "state_class": "measurement", - }, "websocket_event": {"state": {"lightlevel": 1000}}, "next_state": "1.3", }, @@ -638,20 +432,7 @@ TEST_DATA = [ "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-042a", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.starkvind_airpurifier_pm25", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-042a-particulate_matter_pm2_5", - "state": "1", - "entity_category": None, - "device_class": SensorDeviceClass.PM25, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "STARKVIND AirPurifier PM25", - "device_class": SensorDeviceClass.PM25, - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - }, "websocket_event": {"state": {"measured_value": 2}}, "next_state": "2", }, @@ -677,23 +458,7 @@ TEST_DATA = [ "uniqueid": "00:0d:6f:00:0b:7a:64:29-01-0b04", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.power_16", - "unique_id": "00:0d:6f:00:0b:7a:64:29-01-0b04-power", - "state": "64", - "entity_category": None, - "device_class": SensorDeviceClass.POWER, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "current": 34, - "voltage": 231, - "unit_of_measurement": "W", - "device_class": "power", - "friendly_name": "Power 16", - }, "websocket_event": {"state": {"power": 1000}}, "next_state": "1000", }, @@ -719,21 +484,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:45:dc:53-01-0403", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.mi_temperature_1", - "unique_id": "00:15:8d:00:02:45:dc:53-01-0403-pressure", - "state": "1010", - "entity_category": None, - "device_class": SensorDeviceClass.PRESSURE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "unit_of_measurement": "hPa", - "device_class": "pressure", - "friendly_name": "Mi temperature 1", - }, "websocket_event": {"state": {"pressure": 500}}, "next_state": "500", }, @@ -760,24 +511,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:45:dc:53-01-0402", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.mi_temperature_1", - "unique_id": "00:15:8d:00:02:45:dc:53-01-0402-temperature", - "state": "21.82", - "entity_category": None, - "device_class": SensorDeviceClass.TEMPERATURE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "unit_of_measurement": "°C", - "device_class": "temperature", - "friendly_name": "Mi temperature 1", - }, - "options": { - "suggested_display_precision": 1, - }, "websocket_event": {"state": {"temperature": 1800}}, "next_state": "18.0", }, @@ -806,17 +540,7 @@ TEST_DATA = [ "uniqueid": "cc:cc:cc:ff:fe:38:4d:b3-01-000a", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.etrv_sejour", - "unique_id": "cc:cc:cc:ff:fe:38:4d:b3-01-000a-last_set", - "state": "2020-11-19T08:07:08+00:00", - "entity_category": None, - "device_class": SensorDeviceClass.TIMESTAMP, - "attributes": { - "device_class": "timestamp", - "friendly_name": "eTRV Séjour", - }, "websocket_event": {"state": {"lastset": "2020-12-14T10:12:14Z"}}, "next_state": "2020-12-14T10:12:14+00:00", }, @@ -845,20 +569,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:b5:d1:80-01-0500", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "sensor.alarm_10_temperature", - "unique_id": "00:15:8d:00:02:b5:d1:80-01-0500-internal_temperature", - "state": "26.0", - "entity_category": None, - "device_class": SensorDeviceClass.TEMPERATURE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "unit_of_measurement": "°C", - "device_class": "temperature", - "friendly_name": "Alarm 10 Temperature", - }, "websocket_event": {"state": {"temperature": 1800}}, "next_state": "26.0", }, @@ -886,22 +597,7 @@ TEST_DATA = [ "uniqueid": "00:17:88:01:02:0e:32:a3-02-fc00", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.dimmer_switch_3_battery", - "unique_id": "00:17:88:01:02:0e:32:a3-02-fc00-battery", - "state": "90", - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": SensorDeviceClass.BATTERY, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "event_id": "dimmer_switch_3", - "unit_of_measurement": "%", - "device_class": "battery", - "friendly_name": "Dimmer switch 3 Battery", - }, "websocket_event": {"config": {"battery": 80}}, "next_state": "80", }, @@ -909,22 +605,19 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("sensor_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) async def test_sensors( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, - sensor_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + sensor_ws_data: WebsocketDataType, + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - - with patch.dict(DECONZ_WEB_REQUEST, {"sensors": {"1": sensor_data}}): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SENSOR]): + config_entry = await config_entry_factory() # Enable in entity registry if expected.get("enable_entity"): @@ -939,79 +632,37 @@ async def test_sensors( ) await hass.async_block_till_done() - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify entity state - sensor = hass.states.get(expected["entity_id"]) - assert sensor.state == expected["state"] - assert sensor.attributes.get(ATTR_DEVICE_CLASS) == expected["device_class"] - assert sensor.attributes == expected["attributes"] - - # Verify entity registry - assert ( - entity_registry.async_get(expected["entity_id"]).entity_category - is expected["entity_category"] - ) - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Change state - event_changed_sensor = {"t": "event", "e": "changed", "r": "sensors", "id": "1"} - event_changed_sensor |= expected["websocket_event"] - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data(expected["websocket_event"]) assert hass.states.get(expected["entity_id"]).state == expected["next_state"] - # Unload entry - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_not_allow_clip_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "CLIP temperature sensor", + "type": "CLIPTemperature", + "state": {"temperature": 2600}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:02-00", + }, + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: False}]) +@pytest.mark.usefixtures("config_entry_setup") +async def test_not_allow_clip_sensor(hass: HomeAssistant) -> None: """Test that CLIP sensors are not allowed.""" - data = { - "sensors": { - "1": { - "name": "CLIP temperature sensor", - "type": "CLIPTemperature", - "state": {"temperature": 2600}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:02-00", - }, - } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: False} - ) - assert len(hass.states.async_all()) == 0 -async def test_allow_clip_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that CLIP sensors can be allowed.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Light level sensor", "type": "ZHALightLevel", @@ -1039,17 +690,19 @@ async def test_allow_clip_sensors( "uniqueid": "/sensors/3", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration( - hass, - aioclient_mock, - options={CONF_ALLOW_CLIP_SENSOR: True}, - ) - - assert len(hass.states.async_all()) == 4 - assert hass.states.get("sensor.clip_light_level_sensor").state == "999.8" - assert hass.states.get("sensor.clip_flur").state == "0" + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) +async def test_allow_clip_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, +) -> None: + """Test that CLIP sensors can be allowed.""" + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SENSOR]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Disallow clip sensors @@ -1074,15 +727,14 @@ async def test_allow_clip_sensors( assert hass.states.get("sensor.clip_flur").state == "0" +@pytest.mark.usefixtures("config_entry_setup") async def test_add_new_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new sensor works.""" event_added_sensor = { - "t": "event", "e": "added", - "r": "sensors", - "id": "1", "sensor": { "id": "Light sensor id", "name": "Light level sensor", @@ -1093,13 +745,9 @@ async def test_add_new_sensor( }, } - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() - + await sensor_ws_data(event_added_sensor) assert len(hass.states.async_all()) == 2 assert hass.states.get("sensor.light_level_sensor").state == "999.8" @@ -1115,71 +763,58 @@ BAD_SENSOR_DATA = [ @pytest.mark.parametrize(("sensor_type", "sensor_property"), BAD_SENSOR_DATA) async def test_dont_add_sensor_if_state_is_none( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - sensor_type, - sensor_property, + config_entry_factory: ConfigEntryFactoryType, + sensor_payload: dict[str, Any], + sensor_type: str, + sensor_property: str, ) -> None: """Test sensor with scaled data is not created if state is None.""" - data = { - "sensors": { - "1": { - "name": "Sensor 1", - "type": sensor_type, - "state": {sensor_property: None}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } + sensor_payload["0"] = { + "name": "Sensor 1", + "type": sensor_type, + "state": {sensor_property: None}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:00-00", } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + await config_entry_factory() assert len(hass.states.async_all()) == 0 -async def test_air_quality_sensor_without_ppb( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test sensor with scaled data is not created if state is None.""" - data = { - "sensors": { - "1": { - "config": { - "on": True, - "reachable": True, - }, - "ep": 2, - "etag": "c2d2e42396f7c78e11e46c66e2ec0200", - "lastseen": "2020-11-20T22:48Z", - "manufacturername": "BOSCH", - "modelid": "AIR", - "name": "BOSCH Air quality sensor", - "state": { - "airquality": "poor", - "lastupdated": "2020-11-20T22:48:00.209", - }, - "swversion": "20200402", - "type": "ZHAAirQuality", - "uniqueid": "00:00:00:00:00:00:00:00-02-fdef", - } +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "on": True, + "reachable": True, + }, + "ep": 2, + "etag": "c2d2e42396f7c78e11e46c66e2ec0200", + "lastseen": "2020-11-20T22:48Z", + "manufacturername": "BOSCH", + "modelid": "AIR", + "name": "BOSCH Air quality sensor", + "state": { + "airquality": "poor", + "lastupdated": "2020-11-20T22:48:00.209", + }, + "swversion": "20200402", + "type": "ZHAAirQuality", + "uniqueid": "00:00:00:00:00:00:00:00-02-fdef", } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_air_quality_sensor_without_ppb(hass: HomeAssistant) -> None: + """Test sensor with scaled data is not created if state is None.""" assert len(hass.states.async_all()) == 1 -async def test_add_battery_later( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that a battery sensor can be created later on. - - Without an initial battery state a battery sensor - can be created once a value is reported. - """ - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Switch 1", "type": "ZHASwitch", @@ -1195,190 +830,175 @@ async def test_add_battery_later( "uniqueid": "00:00:00:00:00:00:00:00-00-0001", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_add_battery_later( + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, +) -> None: + """Test that a battery sensor can be created later on. + Without an initial battery state a battery sensor + can be created once a value is reported. + """ assert len(hass.states.async_all()) == 0 - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "2", - "config": {"battery": 50}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"id": "2", "config": {"battery": 50}}) assert len(hass.states.async_all()) == 0 - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "config": {"battery": 50}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"id": "1", "config": {"battery": 50}}) assert len(hass.states.async_all()) == 1 - assert hass.states.get("sensor.switch_1_battery").state == "50" @pytest.mark.parametrize("model_id", ["0x8030", "0x8031", "0x8034", "0x8035"]) async def test_special_danfoss_battery_creation( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, model_id + hass: HomeAssistant, + config_entry_factory: ConfigEntryFactoryType, + sensor_payload: dict[str, Any], + model_id: str, ) -> None: """Test the special Danfoss battery creation works. Normally there should only be one battery sensor per device from deCONZ. With specific Danfoss devices each endpoint can report its own battery state. """ - data = { - "sensors": { - "1": { - "config": { - "battery": 70, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 1, - "etag": "982d9acc38bee5b251e24a9be26558e4", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:07.994", - "on": False, - "temperature": 2307, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-01-0201", + sensor_payload |= { + "1": { + "config": { + "battery": 70, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, }, - "2": { - "config": { - "battery": 86, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 2, - "etag": "62f12749f9f51c950086aff37dd02b61", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:22.399", - "on": False, - "temperature": 2316, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-02-0201", + "ep": 1, + "etag": "982d9acc38bee5b251e24a9be26558e4", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:07.994", + "on": False, + "temperature": 2307, }, - "3": { - "config": { - "battery": 86, - "heatsetpoint": 2350, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 3, - "etag": "f50061174bb7f18a3d95789bab8b646d", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:25.466", - "on": False, - "temperature": 2337, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-03-0201", + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-01-0201", + }, + "2": { + "config": { + "battery": 86, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, }, - "4": { - "config": { - "battery": 85, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 4, - "etag": "eea97adf8ce1b971b8b6a3a31793f96b", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:41.939", - "on": False, - "temperature": 2333, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-04-0201", + "ep": 2, + "etag": "62f12749f9f51c950086aff37dd02b61", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:22.399", + "on": False, + "temperature": 2316, }, - "5": { - "config": { - "battery": 83, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 5, - "etag": "1f7cd1a5d66dc27ac5eb44b8c47362fb", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": {"lastupdated": "none", "on": False, "temperature": 2325}, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-05-0201", + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-02-0201", + }, + "3": { + "config": { + "battery": 86, + "heatsetpoint": 2350, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, }, - } + "ep": 3, + "etag": "f50061174bb7f18a3d95789bab8b646d", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:25.466", + "on": False, + "temperature": 2337, + }, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-03-0201", + }, + "4": { + "config": { + "battery": 85, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, + }, + "ep": 4, + "etag": "eea97adf8ce1b971b8b6a3a31793f96b", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:41.939", + "on": False, + "temperature": 2333, + }, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-04-0201", + }, + "5": { + "config": { + "battery": 83, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, + }, + "ep": 5, + "etag": "1f7cd1a5d66dc27ac5eb44b8c47362fb", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": {"lastupdated": "none", "on": False, "temperature": 2325}, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-05-0201", + }, } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + + await config_entry_factory() assert len(hass.states.async_all()) == 10 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 5 -async def test_unsupported_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.parametrize( + "sensor_payload", + [{"type": "not supported", "name": "name", "state": {}, "config": {}}], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_unsupported_sensor(hass: HomeAssistant) -> None: """Test that unsupported sensors doesn't break anything.""" - data = { - "sensors": { - "0": {"type": "not supported", "name": "name", "state": {}, "config": {}} - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_services.py b/tests/components/deconz/test_services.py index de061fc4e8c..9a30564385c 100644 --- a/tests/components/deconz/test_services.py +++ b/tests/components/deconz/test_services.py @@ -1,6 +1,7 @@ """deCONZ service tests.""" -from unittest.mock import patch +from collections.abc import Callable +from typing import Any import pytest import voluptuous as vol @@ -23,31 +24,25 @@ from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from .test_gateway import ( - BRIDGEID, - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - mock_deconz_request, - setup_deconz_integration, -) +from .test_hub import BRIDGE_ID -from tests.common import async_capture_events +from tests.common import MockConfigEntry, async_capture_events from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_field( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that service invokes pydeconz with the correct path and data.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - data = { SERVICE_FIELD: "/lights/2", - CONF_BRIDGE_ID: BRIDGEID, + CONF_BRIDGE_ID: BRIDGE_ID, SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20}, } - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/2") + aioclient_mock = mock_put_request("/lights/2") await hass.services.async_call( DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data, blocking=True @@ -55,29 +50,28 @@ async def test_configure_service_with_field( assert aioclient_mock.mock_calls[1][2] == {"on": True, "attr1": 10, "attr2": 20} +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "Test", + "state": {"reachable": True}, + "type": "Light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_entity( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that service invokes pydeconz with the correct path and data.""" - data = { - "lights": { - "1": { - "name": "Test", - "state": {"reachable": True}, - "type": "Light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - data = { SERVICE_ENTITY: "light.test", SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20}, } - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1") + aioclient_mock = mock_put_request("/lights/0") await hass.services.async_call( DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data, blocking=True @@ -85,30 +79,29 @@ async def test_configure_service_with_entity( assert aioclient_mock.mock_calls[1][2] == {"on": True, "attr1": 10, "attr2": 20} +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "Test", + "state": {"reachable": True}, + "type": "Light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_entity_and_field( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that service invokes pydeconz with the correct path and data.""" - data = { - "lights": { - "1": { - "name": "Test", - "state": {"reachable": True}, - "type": "Light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - data = { SERVICE_ENTITY: "light.test", SERVICE_FIELD: "/state", SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20}, } - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") await hass.services.async_call( DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data, blocking=True @@ -116,11 +109,11 @@ async def test_configure_service_with_entity_and_field( assert aioclient_mock.mock_calls[1][2] == {"on": True, "attr1": 10, "attr2": 20} +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_faulty_bridgeid( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service fails on a bad bridge id.""" - await setup_deconz_integration(hass, aioclient_mock) aioclient_mock.clear_requests() data = { @@ -137,12 +130,9 @@ async def test_configure_service_with_faulty_bridgeid( assert len(aioclient_mock.mock_calls) == 0 -async def test_configure_service_with_faulty_field( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.usefixtures("config_entry_setup") +async def test_configure_service_with_faulty_field(hass: HomeAssistant) -> None: """Test that service fails on a bad field.""" - await setup_deconz_integration(hass, aioclient_mock) - data = {SERVICE_FIELD: "light/2", SERVICE_DATA: {}} with pytest.raises(vol.Invalid): @@ -151,11 +141,11 @@ async def test_configure_service_with_faulty_field( ) +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_faulty_entity( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service on a non existing entity.""" - await setup_deconz_integration(hass, aioclient_mock) aioclient_mock.clear_requests() data = { @@ -171,13 +161,12 @@ async def test_configure_service_with_faulty_entity( assert len(aioclient_mock.mock_calls) == 0 +@pytest.mark.parametrize("config_entry_options", [{CONF_MASTER_GATEWAY: False}]) +@pytest.mark.usefixtures("config_entry_setup") async def test_calling_service_with_no_master_gateway_fails( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service call fails when no master gateway exist.""" - await setup_deconz_integration( - hass, aioclient_mock, options={CONF_MASTER_GATEWAY: False} - ) aioclient_mock.clear_requests() data = { @@ -193,18 +182,19 @@ async def test_calling_service_with_no_master_gateway_fails( assert len(aioclient_mock.mock_calls) == 0 +@pytest.mark.usefixtures("config_entry_setup") async def test_service_refresh_devices( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + deconz_payload: dict[str, Any], + mock_requests: Callable[[], None], ) -> None: """Test that service can refresh devices.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 aioclient_mock.clear_requests() - data = { - "config": {}, + deconz_payload |= { "groups": { "1": { "id": "Group 1 id", @@ -234,43 +224,43 @@ async def test_service_refresh_devices( } }, } - - mock_deconz_request(aioclient_mock, config_entry.data, data) + mock_requests() await hass.services.async_call( - DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGEID} + DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGE_ID} ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 5 +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "Switch 1", + "type": "ZHASwitch", + "state": {"buttonevent": 1000}, + "config": {"battery": 100}, + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_service_refresh_devices_trigger_no_state_update( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + deconz_payload: dict[str, Any], + mock_requests, ) -> None: """Verify that gateway.ignore_state_updates are honored.""" - data = { - "sensors": { - "1": { - "name": "Switch 1", - "type": "ZHASwitch", - "state": {"buttonevent": 1000}, - "config": {"battery": 100}, - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 1 captured_events = async_capture_events(hass, CONF_DECONZ_EVENT) aioclient_mock.clear_requests() - data = { - "config": {}, + deconz_payload |= { "groups": { "1": { "id": "Group 1 id", @@ -291,7 +281,7 @@ async def test_service_refresh_devices_trigger_no_state_update( } }, "sensors": { - "1": { + "0": { "name": "Switch 1", "type": "ZHASwitch", "state": {"buttonevent": 1000}, @@ -300,11 +290,10 @@ async def test_service_refresh_devices_trigger_no_state_update( } }, } - - mock_deconz_request(aioclient_mock, config_entry.data, data) + mock_requests() await hass.services.async_call( - DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGEID} + DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGE_ID} ) await hass.async_block_till_done() @@ -312,37 +301,38 @@ async def test_service_refresh_devices_trigger_no_state_update( assert len(captured_events) == 0 +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "Light 0 name", + "state": {"reachable": True}, + "type": "Light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "Switch 1", + "type": "ZHASwitch", + "state": {"buttonevent": 1000, "gesture": 1}, + "config": {"battery": 100}, + "uniqueid": "00:00:00:00:00:00:00:03-00", + } + ], +) async def test_remove_orphaned_entries_service( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, + config_entry_setup: MockConfigEntry, ) -> None: """Test service works and also don't remove more than expected.""" - data = { - "lights": { - "1": { - "name": "Light 1 name", - "state": {"reachable": True}, - "type": "Light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - }, - "sensors": { - "1": { - "name": "Switch 1", - "type": "ZHASwitch", - "state": {"buttonevent": 1000, "gesture": 1}, - "config": {"battery": 100}, - "uniqueid": "00:00:00:00:00:00:00:03-00", - }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, + config_entry_id=config_entry_setup.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "123")}, ) @@ -351,7 +341,7 @@ async def test_remove_orphaned_entries_service( [ entry for entry in device_registry.devices.values() - if config_entry.entry_id in entry.config_entries + if config_entry_setup.entry_id in entry.config_entries ] ) == 5 # Host, gateway, light, switch and orphan @@ -362,19 +352,23 @@ async def test_remove_orphaned_entries_service( DECONZ_DOMAIN, "12345", suggested_object_id="Orphaned sensor", - config_entry=config_entry, + config_entry=config_entry_setup, device_id=device.id, ) assert ( - len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) + len( + er.async_entries_for_config_entry( + entity_registry, config_entry_setup.entry_id + ) + ) == 3 # Light, switch battery and orphan ) await hass.services.async_call( DECONZ_DOMAIN, SERVICE_REMOVE_ORPHANED_ENTRIES, - service_data={CONF_BRIDGE_ID: BRIDGEID}, + service_data={CONF_BRIDGE_ID: BRIDGE_ID}, ) await hass.async_block_till_done() @@ -383,13 +377,17 @@ async def test_remove_orphaned_entries_service( [ entry for entry in device_registry.devices.values() - if config_entry.entry_id in entry.config_entries + if config_entry_setup.entry_id in entry.config_entries ] ) == 4 # Host, gateway, light and switch ) assert ( - len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) + len( + er.async_entries_for_config_entry( + entity_registry, config_entry_setup.entry_id + ) + ) == 2 # Light and switch battery ) diff --git a/tests/components/deconz/test_siren.py b/tests/components/deconz/test_siren.py index 62ed1b732b8..5c80feef38c 100644 --- a/tests/components/deconz/test_siren.py +++ b/tests/components/deconz/test_siren.py @@ -1,6 +1,8 @@ """deCONZ switch platform tests.""" -from unittest.mock import patch +from collections.abc import Callable + +import pytest from homeassistant.components.siren import ATTR_DURATION, DOMAIN as SIREN_DOMAIN from homeassistant.const import ( @@ -9,61 +11,41 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_OFF, STATE_ON, - STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import WebsocketDataType from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "Warning device", + "type": "Warning device", + "state": {"alert": "lselect", "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_sirens( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + light_ws_data: WebsocketDataType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that siren entities are created.""" - data = { - "lights": { - "1": { - "name": "Warning device", - "type": "Warning device", - "state": {"alert": "lselect", "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - }, - "2": { - "name": "Unsupported siren", - "type": "Not a siren", - "state": {"reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:01-00", - }, - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 + assert len(hass.states.async_all()) == 1 assert hass.states.get("siren.warning_device").state == STATE_ON - assert not hass.states.get("siren.unsupported_siren") - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"alert": None}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() + await light_ws_data({"state": {"alert": None}}) assert hass.states.get("siren.warning_device").state == STATE_OFF # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service turn on siren @@ -94,14 +76,3 @@ async def test_sirens( blocking=True, ) assert aioclient_mock.mock_calls[3][2] == {"alert": "lselect", "ontime": 100} - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_switch.py b/tests/components/deconz/test_switch.py index 9ef2382a2e2..ed82b0c2ac3 100644 --- a/tests/components/deconz/test_switch.py +++ b/tests/components/deconz/test_switch.py @@ -1,6 +1,8 @@ """deCONZ switch platform tests.""" -from unittest.mock import patch +from collections.abc import Callable + +import pytest from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN @@ -9,83 +11,65 @@ from homeassistant.components.switch import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_switches( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no switch entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - -async def test_power_plugs( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that all supported switch entities are created.""" - data = { - "lights": { - "1": { +@pytest.mark.parametrize( + "light_payload", + [ + { + "0": { "name": "On off switch", "type": "On/Off plug-in unit", "state": {"on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:00-00", }, - "2": { + "1": { "name": "Smart plug", "type": "Smart plug", "state": {"on": False, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:01-00", }, - "3": { + "2": { "name": "Unsupported switch", "type": "Not a switch", "state": {"reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:03-00", + "uniqueid": "00:00:00:00:00:00:00:02-00", }, - "4": { + "3": { "name": "On off relay", "state": {"on": True, "reachable": True}, "type": "On/Off light", - "uniqueid": "00:00:00:00:00:00:00:04-00", + "uniqueid": "00:00:00:00:00:00:00:03-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_power_plugs( + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + light_ws_data: WebsocketDataType, +) -> None: + """Test that all supported switch entities are created.""" assert len(hass.states.async_all()) == 4 assert hass.states.get("switch.on_off_switch").state == STATE_ON assert hass.states.get("switch.smart_plug").state == STATE_OFF assert hass.states.get("switch.on_off_relay").state == STATE_ON assert hass.states.get("switch.unsupported_switch") is None - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data({"state": {"on": False}}) assert hass.states.get("switch.on_off_switch").state == STATE_OFF # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service turn on power plug @@ -107,44 +91,29 @@ async def test_power_plugs( ) assert aioclient_mock.mock_calls[2][2] == {"on": False} - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 4 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "On Off output device", + "type": "On/Off output", + "state": {"on": True, "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) async def test_remove_legacy_on_off_output_as_light( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, ) -> None: """Test that switch platform cleans up legacy light entities.""" - unique_id = "00:00:00:00:00:00:00:00-00" - - switch_light_entity = entity_registry.async_get_or_create( - LIGHT_DOMAIN, DECONZ_DOMAIN, unique_id + assert entity_registry.async_get_or_create( + LIGHT_DOMAIN, DECONZ_DOMAIN, "00:00:00:00:00:00:00:00-00" ) - assert switch_light_entity - - data = { - "lights": { - "1": { - "name": "On Off output device", - "type": "On/Off output", - "state": {"on": True, "reachable": True}, - "uniqueid": unique_id, - }, - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + await config_entry_factory() assert not entity_registry.async_get("light.on_off_output_device") assert entity_registry.async_get("switch.on_off_output_device") diff --git a/tests/components/demo/test_camera.py b/tests/components/demo/test_camera.py index ecbd3fecee3..89dd8e0cdf7 100644 --- a/tests/components/demo/test_camera.py +++ b/tests/components/demo/test_camera.py @@ -1,5 +1,6 @@ """The tests for local file camera component.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -24,7 +25,7 @@ ENTITY_CAMERA = "camera.demo_camera" @pytest.fixture -async def camera_only() -> None: +def camera_only() -> Generator[None]: """Enable only the button platform.""" with patch( "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", @@ -34,7 +35,7 @@ async def camera_only() -> None: @pytest.fixture(autouse=True) -async def demo_camera(hass, camera_only): +async def demo_camera(hass: HomeAssistant, camera_only: None) -> None: """Initialize a demo camera platform.""" assert await async_setup_component( hass, CAMERA_DOMAIN, {CAMERA_DOMAIN: {"platform": DOMAIN}} diff --git a/tests/components/demo/test_climate.py b/tests/components/demo/test_climate.py index ff18f9e6a4e..383e00834b8 100644 --- a/tests/components/demo/test_climate.py +++ b/tests/components/demo/test_climate.py @@ -1,5 +1,6 @@ """The tests for the demo climate component.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -50,7 +51,7 @@ ENTITY_HEATPUMP = "climate.heatpump" @pytest.fixture -async def climate_only() -> None: +def climate_only() -> Generator[None]: """Enable only the climate platform.""" with patch( "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", @@ -60,7 +61,7 @@ async def climate_only() -> None: @pytest.fixture(autouse=True) -async def setup_demo_climate(hass, climate_only): +async def setup_demo_climate(hass: HomeAssistant, climate_only: None) -> None: """Initialize setup demo climate.""" hass.config.units = METRIC_SYSTEM assert await async_setup_component(hass, DOMAIN, {"climate": {"platform": "demo"}}) diff --git a/tests/components/demo/test_cover.py b/tests/components/demo/test_cover.py index 9ea743a0a01..009d2ca2f49 100644 --- a/tests/components/demo/test_cover.py +++ b/tests/components/demo/test_cover.py @@ -1,5 +1,6 @@ """The tests for the Demo cover platform.""" +from collections.abc import Generator from datetime import timedelta from unittest.mock import patch @@ -42,7 +43,7 @@ ENTITY_COVER = "cover.living_room_window" @pytest.fixture -async def cover_only() -> None: +def cover_only() -> Generator[None]: """Enable only the climate platform.""" with patch( "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", @@ -51,15 +52,15 @@ async def cover_only() -> None: yield -@pytest.fixture -async def setup_comp(hass, cover_only): +@pytest.fixture(autouse=True) +async def setup_comp(hass: HomeAssistant, cover_only: None) -> None: """Set up demo cover component.""" with assert_setup_component(1, DOMAIN): await async_setup_component(hass, DOMAIN, CONFIG) await hass.async_block_till_done() -async def test_supported_features(hass: HomeAssistant, setup_comp) -> None: +async def test_supported_features(hass: HomeAssistant) -> None: """Test cover supported features.""" state = hass.states.get("cover.garage_door") assert state.attributes[ATTR_SUPPORTED_FEATURES] == 3 @@ -71,7 +72,7 @@ async def test_supported_features(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_SUPPORTED_FEATURES] == 255 -async def test_close_cover(hass: HomeAssistant, setup_comp) -> None: +async def test_close_cover(hass: HomeAssistant) -> None: """Test closing the cover.""" state = hass.states.get(ENTITY_COVER) assert state.state == STATE_OPEN @@ -92,7 +93,7 @@ async def test_close_cover(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_CURRENT_POSITION] == 0 -async def test_open_cover(hass: HomeAssistant, setup_comp) -> None: +async def test_open_cover(hass: HomeAssistant) -> None: """Test opening the cover.""" state = hass.states.get(ENTITY_COVER) assert state.state == STATE_OPEN @@ -112,7 +113,7 @@ async def test_open_cover(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_CURRENT_POSITION] == 100 -async def test_toggle_cover(hass: HomeAssistant, setup_comp) -> None: +async def test_toggle_cover(hass: HomeAssistant) -> None: """Test toggling the cover.""" # Start open await hass.services.async_call( @@ -152,7 +153,7 @@ async def test_toggle_cover(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_CURRENT_POSITION] == 100 -async def test_set_cover_position(hass: HomeAssistant, setup_comp) -> None: +async def test_set_cover_position(hass: HomeAssistant) -> None: """Test moving the cover to a specific position.""" state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_POSITION] == 70 @@ -171,7 +172,7 @@ async def test_set_cover_position(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_CURRENT_POSITION] == 10 -async def test_stop_cover(hass: HomeAssistant, setup_comp) -> None: +async def test_stop_cover(hass: HomeAssistant) -> None: """Test stopping the cover.""" state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_POSITION] == 70 @@ -190,7 +191,7 @@ async def test_stop_cover(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_CURRENT_POSITION] == 80 -async def test_close_cover_tilt(hass: HomeAssistant, setup_comp) -> None: +async def test_close_cover_tilt(hass: HomeAssistant) -> None: """Test closing the cover tilt.""" state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -206,7 +207,7 @@ async def test_close_cover_tilt(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 -async def test_open_cover_tilt(hass: HomeAssistant, setup_comp) -> None: +async def test_open_cover_tilt(hass: HomeAssistant) -> None: """Test opening the cover tilt.""" state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -222,7 +223,7 @@ async def test_open_cover_tilt(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 -async def test_toggle_cover_tilt(hass: HomeAssistant, setup_comp) -> None: +async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: """Test toggling the cover tilt.""" # Start open await hass.services.async_call( @@ -259,7 +260,7 @@ async def test_toggle_cover_tilt(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 -async def test_set_cover_tilt_position(hass: HomeAssistant, setup_comp) -> None: +async def test_set_cover_tilt_position(hass: HomeAssistant) -> None: """Test moving the cover til to a specific position.""" state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 @@ -278,7 +279,7 @@ async def test_set_cover_tilt_position(hass: HomeAssistant, setup_comp) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 90 -async def test_stop_cover_tilt(hass: HomeAssistant, setup_comp) -> None: +async def test_stop_cover_tilt(hass: HomeAssistant) -> None: """Test stopping the cover tilt.""" state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 diff --git a/tests/components/demo/test_init.py b/tests/components/demo/test_init.py index 2d60f7caf94..0af15455949 100644 --- a/tests/components/demo/test_init.py +++ b/tests/components/demo/test_init.py @@ -1,5 +1,6 @@ """The tests for the Demo component.""" +from collections.abc import Generator import json from unittest.mock import patch @@ -12,19 +13,19 @@ from homeassistant.setup import async_setup_component @pytest.fixture -def mock_history(hass): +def mock_history(hass: HomeAssistant) -> None: """Mock history component loaded.""" hass.config.components.add("history") @pytest.fixture(autouse=True) -def mock_device_tracker_update_config(): +def mock_device_tracker_update_config() -> Generator[None]: """Prevent device tracker from creating known devices file.""" with patch("homeassistant.components.device_tracker.legacy.update_config"): yield -async def test_setting_up_demo(mock_history, hass: HomeAssistant) -> None: +async def test_setting_up_demo(mock_history: None, hass: HomeAssistant) -> None: """Test if we can set up the demo and dump it to JSON.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) await hass.async_block_till_done() diff --git a/tests/components/demo/test_light.py b/tests/components/demo/test_light.py index b67acf3f60f..e3b1efc7eec 100644 --- a/tests/components/demo/test_light.py +++ b/tests/components/demo/test_light.py @@ -1,5 +1,6 @@ """The tests for the demo light component.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -27,7 +28,7 @@ ENTITY_LIGHT = "light.bed_light" @pytest.fixture -async def light_only() -> None: +def light_only() -> Generator[None]: """Enable only the light platform.""" with patch( "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", @@ -37,7 +38,7 @@ async def light_only() -> None: @pytest.fixture(autouse=True) -async def setup_comp(hass, light_only): +async def setup_comp(hass: HomeAssistant, light_only: None) -> None: """Set up demo component.""" assert await async_setup_component( hass, LIGHT_DOMAIN, {LIGHT_DOMAIN: {"platform": DOMAIN}} diff --git a/tests/components/demo/test_media_player.py b/tests/components/demo/test_media_player.py index a6669fa705c..7487a4c13e3 100644 --- a/tests/components/demo/test_media_player.py +++ b/tests/components/demo/test_media_player.py @@ -497,7 +497,7 @@ async def test_media_image_proxy( class MockResponse: """Test response.""" - def __init__(self): + def __init__(self) -> None: """Test response init.""" self.status = 200 self.headers = {"Content-Type": "sometype"} diff --git a/tests/components/demo/test_notify.py b/tests/components/demo/test_notify.py index 4ebbfbdac04..98b3de8448a 100644 --- a/tests/components/demo/test_notify.py +++ b/tests/components/demo/test_notify.py @@ -1,9 +1,9 @@ """The tests for the notify demo platform.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components import notify from homeassistant.components.demo import DOMAIN @@ -81,6 +81,6 @@ async def test_calling_notify_from_script_loaded_from_yaml( await hass.services.async_call("script", "test") await hass.async_block_till_done() assert len(events) == 1 - assert { + assert events[0].data == { "message": "Test 123 4", - } == events[0].data + } diff --git a/tests/components/demo/test_number.py b/tests/components/demo/test_number.py index 20e3ce8fc11..79885fa8581 100644 --- a/tests/components/demo/test_number.py +++ b/tests/components/demo/test_number.py @@ -1,5 +1,6 @@ """The tests for the demo number component.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -26,7 +27,7 @@ ENTITY_SMALL_RANGE = "number.small_range" @pytest.fixture -async def number_only() -> None: +def number_only() -> Generator[None]: """Enable only the number platform.""" with patch( "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", @@ -36,7 +37,7 @@ async def number_only() -> None: @pytest.fixture(autouse=True) -async def setup_demo_number(hass, number_only): +async def setup_demo_number(hass: HomeAssistant, number_only: None) -> None: """Initialize setup demo Number entity.""" assert await async_setup_component(hass, DOMAIN, {"number": {"platform": "demo"}}) await hass.async_block_till_done() diff --git a/tests/components/demo/test_switch.py b/tests/components/demo/test_switch.py index d8c3284875e..57384526dc0 100644 --- a/tests/components/demo/test_switch.py +++ b/tests/components/demo/test_switch.py @@ -1,5 +1,6 @@ """The tests for the demo switch component.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -18,7 +19,7 @@ SWITCH_ENTITY_IDS = ["switch.decorative_lights", "switch.ac"] @pytest.fixture -async def switch_only() -> None: +def switch_only() -> Generator[None]: """Enable only the switch platform.""" with patch( "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", @@ -28,7 +29,7 @@ async def switch_only() -> None: @pytest.fixture(autouse=True) -async def setup_comp(hass, switch_only): +async def setup_comp(hass: HomeAssistant, switch_only: None) -> None: """Set up demo component.""" assert await async_setup_component( hass, SWITCH_DOMAIN, {SWITCH_DOMAIN: {"platform": DOMAIN}} @@ -37,7 +38,7 @@ async def setup_comp(hass, switch_only): @pytest.mark.parametrize("switch_entity_id", SWITCH_ENTITY_IDS) -async def test_turn_on(hass: HomeAssistant, switch_entity_id) -> None: +async def test_turn_on(hass: HomeAssistant, switch_entity_id: str) -> None: """Test switch turn on method.""" await hass.services.async_call( SWITCH_DOMAIN, @@ -61,7 +62,7 @@ async def test_turn_on(hass: HomeAssistant, switch_entity_id) -> None: @pytest.mark.parametrize("switch_entity_id", SWITCH_ENTITY_IDS) -async def test_turn_off(hass: HomeAssistant, switch_entity_id) -> None: +async def test_turn_off(hass: HomeAssistant, switch_entity_id: str) -> None: """Test switch turn off method.""" await hass.services.async_call( SWITCH_DOMAIN, @@ -86,7 +87,7 @@ async def test_turn_off(hass: HomeAssistant, switch_entity_id) -> None: @pytest.mark.parametrize("switch_entity_id", SWITCH_ENTITY_IDS) async def test_turn_off_without_entity_id( - hass: HomeAssistant, switch_entity_id + hass: HomeAssistant, switch_entity_id: str ) -> None: """Test switch turn off all switches.""" await hass.services.async_call( diff --git a/tests/components/demo/test_text.py b/tests/components/demo/test_text.py index faf611d9875..4ca172e5143 100644 --- a/tests/components/demo/test_text.py +++ b/tests/components/demo/test_text.py @@ -1,5 +1,6 @@ """The tests for the demo text component.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -25,7 +26,7 @@ ENTITY_TEXT = "text.text" @pytest.fixture -async def text_only() -> None: +def text_only() -> Generator[None]: """Enable only the text platform.""" with patch( "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", @@ -35,7 +36,7 @@ async def text_only() -> None: @pytest.fixture(autouse=True) -async def setup_demo_text(hass, text_only): +async def setup_demo_text(hass: HomeAssistant, text_only: None) -> None: """Initialize setup demo text.""" assert await async_setup_component(hass, DOMAIN, {"text": {"platform": "demo"}}) await hass.async_block_till_done() diff --git a/tests/components/device_automation/test_init.py b/tests/components/device_automation/test_init.py index 7d68a944de1..750817f3c41 100644 --- a/tests/components/device_automation/test_init.py +++ b/tests/components/device_automation/test_init.py @@ -23,13 +23,7 @@ from homeassistant.loader import IntegrationNotFound from homeassistant.requirements import RequirementsNotFound from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - MockModule, - async_mock_service, - mock_integration, - mock_platform, -) +from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform from tests.typing import WebSocketGenerator @@ -46,7 +40,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @pytest.fixture -def fake_integration(hass): +def fake_integration(hass: HomeAssistant) -> None: """Set up a mock integration with device automation support.""" DOMAIN = "fake_integration" @@ -1384,15 +1378,9 @@ async def test_automation_with_bad_condition( assert expected_error.format(path="['condition'][0]") in caplog.text -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_automation_with_sub_condition( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -1492,29 +1480,29 @@ async def test_automation_with_sub_condition( await hass.async_block_till_done() assert hass.states.get(entity_entry1.entity_id).state == STATE_ON assert hass.states.get(entity_entry2.entity_id).state == STATE_OFF - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "or event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "or event - test_event1" hass.states.async_set(entity_entry1.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entity_entry2.entity_id, STATE_ON) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "or event - test_event1" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "or event - test_event1" hass.states.async_set(entity_entry1.entity_id, STATE_ON) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 4 - assert [calls[2].data["some"], calls[3].data["some"]] == unordered( + assert len(service_calls) == 4 + assert [service_calls[2].data["some"], service_calls[3].data["some"]] == unordered( ["or event - test_event1", "and event - test_event1"] ) diff --git a/tests/components/device_automation/test_toggle_entity.py b/tests/components/device_automation/test_toggle_entity.py index f15730d9525..be4d3bd4c9e 100644 --- a/tests/components/device_automation/test_toggle_entity.py +++ b/tests/components/device_automation/test_toggle_entity.py @@ -11,7 +11,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, async_fire_time_changed, async_mock_service +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -19,17 +19,11 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing. @@ -121,20 +115,20 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -145,7 +139,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], trigger: str, ) -> None: """Test for triggers firing with delay.""" @@ -193,16 +187,16 @@ async def test_if_fires_on_state_change_with_for( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/device_sun_light_trigger/test_init.py b/tests/components/device_sun_light_trigger/test_init.py index 65afd5743f5..f3821eb5af9 100644 --- a/tests/components/device_sun_light_trigger/test_init.py +++ b/tests/components/device_sun_light_trigger/test_init.py @@ -77,11 +77,10 @@ async def scanner( ) await hass.async_block_till_done() - return scanner - +@pytest.mark.usefixtures("scanner") async def test_lights_on_when_sun_sets( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, scanner + hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights go on when there is someone home and the sun sets.""" test_time = datetime(2017, 4, 5, 1, 2, 3, tzinfo=dt_util.UTC) @@ -136,8 +135,9 @@ async def test_lights_turn_off_when_everyone_leaves(hass: HomeAssistant) -> None ) +@pytest.mark.usefixtures("scanner") async def test_lights_turn_on_when_coming_home_after_sun_set( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, scanner + hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights turn on when coming home after sun set.""" test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC) @@ -172,8 +172,9 @@ async def test_lights_turn_on_when_coming_home_after_sun_set( ) +@pytest.mark.usefixtures("scanner") async def test_lights_turn_on_when_coming_home_after_sun_set_person( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, scanner + hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights turn on when coming home after sun set.""" device_1 = f"{DOMAIN}.device_1" diff --git a/tests/components/device_tracker/common.py b/tests/components/device_tracker/common.py index d30db984a66..b6341443d36 100644 --- a/tests/components/device_tracker/common.py +++ b/tests/components/device_tracker/common.py @@ -61,7 +61,7 @@ def async_see( class MockScannerEntity(ScannerEntity): """Test implementation of a ScannerEntity.""" - def __init__(self): + def __init__(self) -> None: """Init.""" self.connected = False self._hostname = "test.hostname.org" @@ -110,7 +110,7 @@ class MockScannerEntity(ScannerEntity): class MockScanner(DeviceScanner): """Mock device scanner.""" - def __init__(self): + def __init__(self) -> None: """Initialize the MockScanner.""" self.devices_home = [] diff --git a/tests/components/device_tracker/test_config_entry.py b/tests/components/device_tracker/test_config_entry.py index 45b94012051..5b9ce78e4f5 100644 --- a/tests/components/device_tracker/test_config_entry.py +++ b/tests/components/device_tracker/test_config_entry.py @@ -1,9 +1,9 @@ """Test Device Tracker config entry things.""" +from collections.abc import Generator from typing import Any import pytest -from typing_extensions import Generator from homeassistant.components.device_tracker import ( ATTR_HOST_NAME, diff --git a/tests/components/device_tracker/test_device_condition.py b/tests/components/device_tracker/test_device_condition.py index 6ea4ed7a372..aff020d61a8 100644 --- a/tests/components/device_tracker/test_device_condition.py +++ b/tests/components/device_tracker/test_device_condition.py @@ -12,11 +12,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -24,12 +20,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -114,7 +104,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -184,22 +174,22 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_home - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_home - event - test_event1" hass.states.async_set(entry.entity_id, "school") hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_not_home - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_not_home - event - test_event2" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -247,5 +237,5 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_home - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_home - event - test_event1" diff --git a/tests/components/device_tracker/test_device_trigger.py b/tests/components/device_tracker/test_device_trigger.py index 8932eb15997..ebff89e1a15 100644 --- a/tests/components/device_tracker/test_device_trigger.py +++ b/tests/components/device_tracker/test_device_trigger.py @@ -17,11 +17,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -36,14 +32,8 @@ HOME_LATITUDE = 32.880837 HOME_LONGITUDE = -117.237561 -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) -def setup_zone(hass): +def setup_zone(hass: HomeAssistant) -> None: """Create test zone.""" hass.loop.run_until_complete( async_setup_component( @@ -145,7 +135,7 @@ async def test_if_fires_on_zone_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for enter and leave triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -228,9 +218,9 @@ async def test_if_fires_on_zone_change( {"latitude": HOME_LATITUDE, "longitude": HOME_LONGITUDE}, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"enter - device - {entry.entity_id} - -117.235 - -117.238" ) @@ -241,9 +231,9 @@ async def test_if_fires_on_zone_change( {"latitude": AWAY_LATITUDE, "longitude": AWAY_LONGITUDE}, ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"leave - device - {entry.entity_id} - -117.238 - -117.235" ) @@ -252,7 +242,7 @@ async def test_if_fires_on_zone_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for enter and leave triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -311,9 +301,9 @@ async def test_if_fires_on_zone_change_legacy( {"latitude": HOME_LATITUDE, "longitude": HOME_LONGITUDE}, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"enter - device - {entry.entity_id} - -117.235 - -117.238" ) diff --git a/tests/components/device_tracker/test_init.py b/tests/components/device_tracker/test_init.py index 6999a99f7ba..362258b035a 100644 --- a/tests/components/device_tracker/test_init.py +++ b/tests/components/device_tracker/test_init.py @@ -1,5 +1,6 @@ """The tests for the device tracker component.""" +from collections.abc import Generator from datetime import datetime, timedelta import json import logging @@ -49,7 +50,7 @@ _LOGGER = logging.getLogger(__name__) @pytest.fixture(name="yaml_devices") -def mock_yaml_devices(hass): +def mock_yaml_devices(hass: HomeAssistant) -> Generator[str]: """Get a path for storing yaml devices.""" yaml_devices = hass.config.path(legacy.YAML_DEVICES) if os.path.isfile(yaml_devices): @@ -108,7 +109,7 @@ async def test_reading_broken_yaml_config(hass: HomeAssistant) -> None: assert res[0].dev_id == "my_device" -async def test_reading_yaml_config(hass: HomeAssistant, yaml_devices) -> None: +async def test_reading_yaml_config(hass: HomeAssistant, yaml_devices: str) -> None: """Test the rendering of the YAML configuration.""" dev_id = "test" device = legacy.Device( @@ -186,7 +187,7 @@ async def test_duplicate_mac_dev_id(mock_warning, hass: HomeAssistant) -> None: assert "Duplicate device IDs" in args[0], "Duplicate device IDs warning expected" -async def test_setup_without_yaml_file(hass: HomeAssistant, yaml_devices) -> None: +async def test_setup_without_yaml_file(hass: HomeAssistant, yaml_devices: str) -> None: """Test with no YAML file.""" with assert_setup_component(1, device_tracker.DOMAIN): assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM) @@ -487,7 +488,7 @@ async def test_invalid_dev_id( assert not devices -async def test_see_state(hass: HomeAssistant, yaml_devices) -> None: +async def test_see_state(hass: HomeAssistant, yaml_devices: str) -> None: """Test device tracker see records state correctly.""" assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/conftest.py b/tests/components/devolo_home_control/conftest.py index 04752da5925..55e072d075c 100644 --- a/tests/components/devolo_home_control/conftest.py +++ b/tests/components/devolo_home_control/conftest.py @@ -1,9 +1,9 @@ """Fixtures for tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/devolo_home_control/mocks.py b/tests/components/devolo_home_control/mocks.py index 02823871e0f..33c0a230e90 100644 --- a/tests/components/devolo_home_control/mocks.py +++ b/tests/components/devolo_home_control/mocks.py @@ -117,6 +117,7 @@ class DeviceMock(Zwave): self.uid = "Test" self.device_model_uid = "Test" self.device_type = "Test" + self.identifier = "MT01234" self.settings_property = {"general_device_settings": SettingsMock()} self.href = "https://www.mydevolo.com" diff --git a/tests/components/devolo_home_control/test_diagnostics.py b/tests/components/devolo_home_control/test_diagnostics.py index f52a9d49017..dfadc4d1c4b 100644 --- a/tests/components/devolo_home_control/test_diagnostics.py +++ b/tests/components/devolo_home_control/test_diagnostics.py @@ -5,6 +5,7 @@ from __future__ import annotations from unittest.mock import patch from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -35,4 +36,4 @@ async def test_entry_diagnostics( assert entry.state is ConfigEntryState.LOADED result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/devolo_home_network/snapshots/test_init.ambr b/tests/components/devolo_home_network/snapshots/test_init.ambr index b042dfec2f1..619a8ce1121 100644 --- a/tests/components/devolo_home_network/snapshots/test_init.ambr +++ b/tests/components/devolo_home_network/snapshots/test_init.ambr @@ -25,8 +25,10 @@ }), 'manufacturer': 'devolo', 'model': 'dLAN pro 1200+ WiFi ac', + 'model_id': '2730', 'name': 'Mock Title', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': '1234567890', 'suggested_area': None, 'sw_version': '5.6.1', diff --git a/tests/components/devolo_home_network/test_diagnostics.py b/tests/components/devolo_home_network/test_diagnostics.py index a3580cac954..05d3c594677 100644 --- a/tests/components/devolo_home_network/test_diagnostics.py +++ b/tests/components/devolo_home_network/test_diagnostics.py @@ -4,6 +4,7 @@ from __future__ import annotations import pytest from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -28,4 +29,4 @@ async def test_entry_diagnostics( assert entry.state is ConfigEntryState.LOADED result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/diagnostics/test_init.py b/tests/components/diagnostics/test_init.py index eeb4f420225..7f583395387 100644 --- a/tests/components/diagnostics/test_init.py +++ b/tests/components/diagnostics/test_init.py @@ -19,7 +19,7 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator @pytest.fixture(autouse=True) -async def mock_diagnostics_integration(hass): +async def mock_diagnostics_integration(hass: HomeAssistant) -> None: """Mock a diagnostics integration.""" hass.config.components.add("fake_integration") mock_platform( diff --git a/tests/components/discovergy/conftest.py b/tests/components/discovergy/conftest.py index 056f763c3e2..4f65099c1b4 100644 --- a/tests/components/discovergy/conftest.py +++ b/tests/components/discovergy/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Discovergy integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from pydiscovergy.models import Reading import pytest -from typing_extensions import Generator from homeassistant.components.discovergy.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/dlink/conftest.py b/tests/components/dlink/conftest.py index 4bbf99000a9..c56b93c4d3d 100644 --- a/tests/components/dlink/conftest.py +++ b/tests/components/dlink/conftest.py @@ -1,11 +1,10 @@ """Configure pytest for D-Link tests.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from copy import deepcopy from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components import dhcp from homeassistant.components.dlink.const import CONF_USE_LEGACY_PROTOCOL, DOMAIN diff --git a/tests/components/dlna_dmr/conftest.py b/tests/components/dlna_dmr/conftest.py index 0d88009f58e..21cb2bc0daf 100644 --- a/tests/components/dlna_dmr/conftest.py +++ b/tests/components/dlna_dmr/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Iterable +from collections.abc import Generator from socket import AddressFamily # pylint: disable=no-name-in-module from unittest.mock import Mock, create_autospec, patch, seal @@ -32,7 +32,7 @@ NEW_DEVICE_LOCATION = "http://198.51.100.7" + "/dmr_description.xml" @pytest.fixture -def domain_data_mock(hass: HomeAssistant) -> Iterable[Mock]: +def domain_data_mock(hass: HomeAssistant) -> Mock: """Mock the global data used by this component. This includes network clients and library object factories. Mocking it @@ -72,6 +72,7 @@ def domain_data_mock(hass: HomeAssistant) -> Iterable[Mock]: service_id="urn:upnp-org:serviceId:RenderingControl", ), } + upnp_device.all_services = list(upnp_device.services.values()) seal(upnp_device) domain_data.upnp_factory.async_create_device.return_value = upnp_device @@ -113,7 +114,7 @@ def config_entry_mock_no_mac() -> MockConfigEntry: @pytest.fixture -def dmr_device_mock(domain_data_mock: Mock) -> Iterable[Mock]: +def dmr_device_mock(domain_data_mock: Mock) -> Generator[Mock]: """Mock the async_upnp_client DMR device, initially connected.""" with patch( "homeassistant.components.dlna_dmr.media_player.DmrDevice", autospec=True @@ -134,7 +135,7 @@ def dmr_device_mock(domain_data_mock: Mock) -> Iterable[Mock]: @pytest.fixture(autouse=True) -def ssdp_scanner_mock() -> Iterable[Mock]: +def ssdp_scanner_mock() -> Generator[Mock]: """Mock the SSDP Scanner.""" with patch("homeassistant.components.ssdp.Scanner", autospec=True) as mock_scanner: reg_callback = mock_scanner.return_value.async_register_callback @@ -143,14 +144,14 @@ def ssdp_scanner_mock() -> Iterable[Mock]: @pytest.fixture(autouse=True) -def ssdp_server_mock() -> Iterable[Mock]: +def ssdp_server_mock() -> Generator[None]: """Mock the SSDP Server.""" with patch("homeassistant.components.ssdp.Server", autospec=True): yield @pytest.fixture(autouse=True) -def async_get_local_ip_mock() -> Iterable[Mock]: +def async_get_local_ip_mock() -> Generator[Mock]: """Mock the async_get_local_ip utility function to prevent network access.""" with patch( "homeassistant.components.dlna_dmr.media_player.async_get_local_ip", diff --git a/tests/components/dlna_dmr/test_config_flow.py b/tests/components/dlna_dmr/test_config_flow.py index 765d65ff0b9..d60a8f17b83 100644 --- a/tests/components/dlna_dmr/test_config_flow.py +++ b/tests/components/dlna_dmr/test_config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Iterable +from collections.abc import Generator import dataclasses import logging from unittest.mock import Mock, patch @@ -89,7 +89,7 @@ MOCK_DISCOVERY = ssdp.SsdpServiceInfo( @pytest.fixture(autouse=True) -def mock_get_mac_address() -> Iterable[Mock]: +def mock_get_mac_address() -> Generator[Mock]: """Mock the get_mac_address function to prevent network access and assist tests.""" with patch( "homeassistant.components.dlna_dmr.config_flow.get_mac_address", autospec=True @@ -99,7 +99,7 @@ def mock_get_mac_address() -> Iterable[Mock]: @pytest.fixture(autouse=True) -def mock_setup_entry() -> Iterable[Mock]: +def mock_setup_entry() -> Generator[Mock]: """Mock async_setup_entry.""" with patch( "homeassistant.components.dlna_dmr.async_setup_entry", return_value=True @@ -238,7 +238,9 @@ async def test_user_flow_embedded_st( embedded_device.device_type = MOCK_DEVICE_TYPE embedded_device.name = MOCK_DEVICE_NAME embedded_device.services = upnp_device.services + embedded_device.all_services = upnp_device.all_services upnp_device.services = {} + upnp_device.all_services = [] upnp_device.all_devices.append(embedded_device) result = await hass.config_entries.flow.async_init( diff --git a/tests/components/dlna_dmr/test_data.py b/tests/components/dlna_dmr/test_data.py index 57652747ffd..e67a559f934 100644 --- a/tests/components/dlna_dmr/test_data.py +++ b/tests/components/dlna_dmr/test_data.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Iterable +from collections.abc import Generator from unittest.mock import ANY, Mock, patch from async_upnp_client.aiohttp import AiohttpNotifyServer @@ -16,7 +16,7 @@ from homeassistant.core import Event, HomeAssistant @pytest.fixture -def aiohttp_notify_servers_mock() -> Iterable[Mock]: +def aiohttp_notify_servers_mock() -> Generator[Mock]: """Construct mock AiohttpNotifyServer on demand, eliminating network use. This fixture provides a list of the constructed servers. diff --git a/tests/components/dlna_dmr/test_media_player.py b/tests/components/dlna_dmr/test_media_player.py index d202994f988..3d8f9da8ed9 100644 --- a/tests/components/dlna_dmr/test_media_player.py +++ b/tests/components/dlna_dmr/test_media_player.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncIterable, Mapping +from collections.abc import AsyncGenerator, Mapping from dataclasses import dataclass from datetime import timedelta from typing import Any @@ -95,7 +95,7 @@ async def mock_entity_id( config_entry_mock: MockConfigEntry, ssdp_scanner_mock: Mock, dmr_device_mock: Mock, -) -> AsyncIterable[str]: +) -> AsyncGenerator[str]: """Fixture to set up a mock DlnaDmrEntity in a connected state. Yields the entity ID. Cleans up the entity after the test is complete. @@ -145,7 +145,7 @@ async def mock_disconnected_entity_id( config_entry_mock: MockConfigEntry, ssdp_scanner_mock: Mock, dmr_device_mock: Mock, -) -> AsyncIterable[str]: +) -> AsyncGenerator[str]: """Fixture to set up a mock DlnaDmrEntity in a disconnected state. Yields the entity ID. Cleans up the entity after the test is complete. diff --git a/tests/components/dlna_dms/conftest.py b/tests/components/dlna_dms/conftest.py index 1fa56f4bc24..eb10babf527 100644 --- a/tests/components/dlna_dms/conftest.py +++ b/tests/components/dlna_dms/conftest.py @@ -2,9 +2,9 @@ from __future__ import annotations -from collections.abc import AsyncIterable, Iterable +from collections.abc import AsyncGenerator, Generator from typing import Final, cast -from unittest.mock import Mock, create_autospec, patch, seal +from unittest.mock import AsyncMock, MagicMock, Mock, create_autospec, patch, seal from async_upnp_client.client import UpnpDevice, UpnpService from async_upnp_client.utils import absolute_url @@ -44,7 +44,7 @@ async def setup_media_source(hass: HomeAssistant) -> None: @pytest.fixture -def upnp_factory_mock() -> Iterable[Mock]: +def upnp_factory_mock() -> Generator[Mock]: """Mock the UpnpFactory class to construct DMS-style UPnP devices.""" with patch( "homeassistant.components.dlna_dms.dms.UpnpFactory", @@ -82,11 +82,13 @@ def upnp_factory_mock() -> Iterable[Mock]: @pytest.fixture(autouse=True, scope="module") -def aiohttp_session_requester_mock() -> Iterable[Mock]: +def aiohttp_session_requester_mock() -> Generator[Mock]: """Mock the AiohttpSessionRequester to prevent network use.""" with patch( "homeassistant.components.dlna_dms.dms.AiohttpSessionRequester", autospec=True ) as requester_mock: + requester_mock.return_value = mock = AsyncMock() + mock.async_http_request.return_value.body = MagicMock() yield requester_mock @@ -107,7 +109,7 @@ def config_entry_mock() -> MockConfigEntry: @pytest.fixture -def dms_device_mock(upnp_factory_mock: Mock) -> Iterable[Mock]: +def dms_device_mock(upnp_factory_mock: Mock) -> Generator[Mock]: """Mock the async_upnp_client DMS device, initially connected.""" with patch( "homeassistant.components.dlna_dms.dms.DmsDevice", autospec=True @@ -128,7 +130,7 @@ def dms_device_mock(upnp_factory_mock: Mock) -> Iterable[Mock]: @pytest.fixture(autouse=True) -def ssdp_scanner_mock() -> Iterable[Mock]: +def ssdp_scanner_mock() -> Generator[Mock]: """Mock the SSDP Scanner.""" with patch("homeassistant.components.ssdp.Scanner", autospec=True) as mock_scanner: reg_callback = mock_scanner.return_value.async_register_callback @@ -137,7 +139,7 @@ def ssdp_scanner_mock() -> Iterable[Mock]: @pytest.fixture(autouse=True) -def ssdp_server_mock() -> Iterable[Mock]: +def ssdp_server_mock() -> Generator[None]: """Mock the SSDP Server.""" with patch("homeassistant.components.ssdp.Server", autospec=True): yield @@ -149,7 +151,7 @@ async def device_source_mock( config_entry_mock: MockConfigEntry, ssdp_scanner_mock: Mock, dms_device_mock: Mock, -) -> AsyncIterable[None]: +) -> AsyncGenerator[None]: """Fixture to set up a DmsDeviceSource in a connected state and cleanup at completion.""" config_entry_mock.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry_mock.entry_id) diff --git a/tests/components/dlna_dms/test_config_flow.py b/tests/components/dlna_dms/test_config_flow.py index b61b4a42c49..14da36a0381 100644 --- a/tests/components/dlna_dms/test_config_flow.py +++ b/tests/components/dlna_dms/test_config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Iterable +from collections.abc import Generator import dataclasses import logging from typing import Final @@ -68,7 +68,7 @@ MOCK_DISCOVERY: Final = ssdp.SsdpServiceInfo( @pytest.fixture(autouse=True) -def mock_setup_entry() -> Iterable[Mock]: +def mock_setup_entry() -> Generator[Mock]: """Avoid setting up the entire integration.""" with patch( "homeassistant.components.dlna_dms.async_setup_entry", diff --git a/tests/components/doorbird/__init__.py b/tests/components/doorbird/__init__.py index 57bf4c04e39..2d517dfcefe 100644 --- a/tests/components/doorbird/__init__.py +++ b/tests/components/doorbird/__init__.py @@ -1 +1,85 @@ """Tests for the DoorBird integration.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock, Mock + +import aiohttp +from doorbirdpy import DoorBird, DoorBirdScheduleEntry + +from homeassistant import config_entries +from homeassistant.components.doorbird.const import API_URL +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, +) + +VALID_CONFIG = { + CONF_HOST: "1.2.3.4", + CONF_USERNAME: "friend", + CONF_PASSWORD: "password", + CONF_NAME: "mydoorbird", +} + + +def _get_aiohttp_client_error(status: int) -> aiohttp.ClientResponseError: + """Return a mock aiohttp client response error.""" + return aiohttp.ClientResponseError( + request_info=Mock(), + history=Mock(), + status=status, + ) + + +def mock_unauthorized_exception() -> aiohttp.ClientResponseError: + """Return a mock unauthorized exception.""" + return _get_aiohttp_client_error(401) + + +def mock_not_found_exception() -> aiohttp.ClientResponseError: + """Return a mock not found exception.""" + return _get_aiohttp_client_error(404) + + +def get_mock_doorbird_api( + info: dict[str, Any] | None = None, + info_side_effect: Exception | None = None, + schedule: list[DoorBirdScheduleEntry] | None = None, + schedule_side_effect: Exception | None = None, + favorites: dict[str, dict[str, Any]] | None = None, + favorites_side_effect: Exception | None = None, + change_schedule: tuple[bool, int] | None = None, +) -> DoorBird: + """Return a mock DoorBirdAPI object with return values.""" + doorbirdapi_mock = MagicMock(spec_set=DoorBird) + api_mock_type = type(doorbirdapi_mock) + api_mock_type.info = AsyncMock(side_effect=info_side_effect, return_value=info) + api_mock_type.favorites = AsyncMock( + side_effect=favorites_side_effect, return_value=favorites + ) + api_mock_type.change_favorite = AsyncMock(return_value=True) + api_mock_type.change_schedule = AsyncMock( + return_value=change_schedule or (True, 200) + ) + api_mock_type.schedule = AsyncMock( + return_value=schedule, side_effect=schedule_side_effect + ) + api_mock_type.energize_relay = AsyncMock(return_value=True) + api_mock_type.turn_light_on = AsyncMock(return_value=True) + api_mock_type.delete_favorite = AsyncMock(return_value=True) + api_mock_type.get_image = AsyncMock(return_value=b"image") + api_mock_type.doorbell_state = AsyncMock(side_effect=mock_unauthorized_exception()) + return doorbirdapi_mock + + +async def mock_webhook_call( + config_entry: config_entries.ConfigEntry, + aiohttp_client: aiohttp.ClientSession, + event: str, +) -> None: + """Mock the webhook call.""" + token = config_entry.data.get(CONF_TOKEN, config_entry.entry_id) + response = await aiohttp_client.get(f"{API_URL}/{event}?token={token}") + response.raise_for_status() diff --git a/tests/components/doorbird/conftest.py b/tests/components/doorbird/conftest.py new file mode 100644 index 00000000000..2e367e4e1d8 --- /dev/null +++ b/tests/components/doorbird/conftest.py @@ -0,0 +1,133 @@ +"""Test configuration for DoorBird tests.""" + +from collections.abc import Callable, Coroutine, Generator +from contextlib import contextmanager +from dataclasses import dataclass +from typing import Any +from unittest.mock import MagicMock, patch + +from doorbirdpy import DoorBird, DoorBirdScheduleEntry +import pytest + +from homeassistant.components.doorbird.const import ( + CONF_EVENTS, + DEFAULT_DOORBELL_EVENT, + DEFAULT_MOTION_EVENT, + DOMAIN, +) +from homeassistant.core import HomeAssistant + +from . import VALID_CONFIG, get_mock_doorbird_api + +from tests.common import MockConfigEntry, load_json_value_fixture + +type DoorbirdMockerType = Callable[[], Coroutine[Any, Any, MockDoorbirdEntry]] + + +@dataclass +class MockDoorbirdEntry: + """Mock DoorBird config entry.""" + + entry: MockConfigEntry + api: MagicMock + + +@pytest.fixture(scope="session") +def doorbird_info() -> dict[str, Any]: + """Return a loaded DoorBird info fixture.""" + return load_json_value_fixture("info.json", "doorbird")["BHA"]["VERSION"][0] + + +@pytest.fixture(scope="session") +def doorbird_schedule() -> list[DoorBirdScheduleEntry]: + """Return a loaded DoorBird schedule fixture.""" + return DoorBirdScheduleEntry.parse_all( + load_json_value_fixture("schedule.json", "doorbird") + ) + + +@pytest.fixture(scope="session") +def doorbird_schedule_wrong_param() -> list[DoorBirdScheduleEntry]: + """Return a loaded DoorBird schedule fixture with an incorrect param.""" + return DoorBirdScheduleEntry.parse_all( + load_json_value_fixture("schedule_wrong_param.json", "doorbird") + ) + + +@pytest.fixture(scope="session") +def doorbird_favorites() -> dict[str, dict[str, Any]]: + """Return a loaded DoorBird favorites fixture.""" + return load_json_value_fixture("favorites.json", "doorbird") + + +@pytest.fixture +def doorbird_api( + doorbird_info: dict[str, Any], doorbird_schedule: dict[str, Any] +) -> Generator[DoorBird]: + """Mock the DoorBirdAPI.""" + api = get_mock_doorbird_api(info=doorbird_info, schedule=doorbird_schedule) + with patch_doorbird_api_entry_points(api): + yield api + + +@contextmanager +def patch_doorbird_api_entry_points(api: MagicMock) -> Generator[DoorBird]: + """Mock the DoorBirdAPI.""" + with ( + patch( + "homeassistant.components.doorbird.DoorBird", + return_value=api, + ), + patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=api, + ), + ): + yield api + + +@pytest.fixture +async def doorbird_mocker( + hass: HomeAssistant, + doorbird_info: dict[str, Any], + doorbird_schedule: dict[str, Any], + doorbird_favorites: dict[str, dict[str, Any]], +) -> DoorbirdMockerType: + """Create a MockDoorbirdEntry.""" + + async def _async_mock( + entry: MockConfigEntry | None = None, + api: DoorBird | None = None, + change_schedule: tuple[bool, int] | None = None, + info: dict[str, Any] | None = None, + info_side_effect: Exception | None = None, + schedule: list[DoorBirdScheduleEntry] | None = None, + schedule_side_effect: Exception | None = None, + favorites: dict[str, dict[str, Any]] | None = None, + favorites_side_effect: Exception | None = None, + options: dict[str, Any] | None = None, + ) -> MockDoorbirdEntry: + """Create a MockDoorbirdEntry from defaults or specific values.""" + entry = entry or MockConfigEntry( + domain=DOMAIN, + unique_id="1CCAE3AAAAAA", + data=VALID_CONFIG, + options=options + or {CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT]}, + ) + api = api or get_mock_doorbird_api( + info=info or doorbird_info, + info_side_effect=info_side_effect, + schedule=schedule or doorbird_schedule, + schedule_side_effect=schedule_side_effect, + favorites=favorites or doorbird_favorites, + favorites_side_effect=favorites_side_effect, + change_schedule=change_schedule, + ) + entry.add_to_hass(hass) + with patch_doorbird_api_entry_points(api): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return MockDoorbirdEntry(entry=entry, api=api) + + return _async_mock diff --git a/tests/components/doorbird/fixtures/favorites.json b/tests/components/doorbird/fixtures/favorites.json new file mode 100644 index 00000000000..50dddb850a5 --- /dev/null +++ b/tests/components/doorbird/fixtures/favorites.json @@ -0,0 +1,16 @@ +{ + "http": { + "0": { + "title": "Home Assistant (mydoorbird_doorbell)", + "value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_doorbell?token=01J2F4B97Y7P1SARXEJ6W07EKD" + }, + "1": { + "title": "Home Assistant (mydoorbird_motion)", + "value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_motion?token=01J2F4B97Y7P1SARXEJ6W07EKD" + }, + "2": { + "title": "externally added event", + "value": "http://127.0.0.1/" + } + } +} diff --git a/tests/components/doorbird/fixtures/info.json b/tests/components/doorbird/fixtures/info.json new file mode 100644 index 00000000000..46fb8fbac86 --- /dev/null +++ b/tests/components/doorbird/fixtures/info.json @@ -0,0 +1,23 @@ +{ + "BHA": { + "RETURNCODE": "1", + "VERSION": [ + { + "FIRMWARE": "000125", + "BUILD_NUMBER": "15870439", + "WIFI_MAC_ADDR": "1234ABCD", + "RELAYS": [ + "1", + "2", + "ghchdi@1", + "ghchdi@2", + "ghchdi@3", + "ghdwkh@1", + "ghdwkh@2", + "ghdwkh@3" + ], + "DEVICE-TYPE": "DoorBird D2101V" + } + ] + } +} diff --git a/tests/components/doorbird/fixtures/schedule.json b/tests/components/doorbird/fixtures/schedule.json new file mode 100644 index 00000000000..c300180777c --- /dev/null +++ b/tests/components/doorbird/fixtures/schedule.json @@ -0,0 +1,67 @@ +[ + { + "input": "doorbell", + "param": "1", + "output": [ + { + "event": "notify", + "param": "", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + }, + { + "event": "http", + "param": "0", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + } + ] + }, + { + "input": "motion", + "param": "", + "output": [ + { + "event": "notify", + "param": "", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + }, + { + "event": "http", + "param": "5", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + } + ] + }, + { + "input": "relay", + "param": "1", + "output": [] + } +] diff --git a/tests/components/doorbird/fixtures/schedule_wrong_param.json b/tests/components/doorbird/fixtures/schedule_wrong_param.json new file mode 100644 index 00000000000..724f19b1774 --- /dev/null +++ b/tests/components/doorbird/fixtures/schedule_wrong_param.json @@ -0,0 +1,67 @@ +[ + { + "input": "doorbell", + "param": "99", + "output": [ + { + "event": "notify", + "param": "", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + }, + { + "event": "http", + "param": "0", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + } + ] + }, + { + "input": "motion", + "param": "", + "output": [ + { + "event": "notify", + "param": "", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + }, + { + "event": "http", + "param": "5", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + } + ] + }, + { + "input": "relay", + "param": "1", + "output": [] + } +] diff --git a/tests/components/doorbird/test_button.py b/tests/components/doorbird/test_button.py new file mode 100644 index 00000000000..cb4bab656ee --- /dev/null +++ b/tests/components/doorbird/test_button.py @@ -0,0 +1,52 @@ +"""Test DoorBird buttons.""" + +from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant + +from .conftest import DoorbirdMockerType + + +async def test_relay_button( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test pressing a relay button.""" + doorbird_entry = await doorbird_mocker() + relay_1_entity_id = "button.mydoorbird_relay_1" + assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN + await hass.services.async_call( + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: relay_1_entity_id}, blocking=True + ) + assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN + assert doorbird_entry.api.energize_relay.call_count == 1 + + +async def test_ir_button( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test pressing the IR button.""" + doorbird_entry = await doorbird_mocker() + ir_entity_id = "button.mydoorbird_ir" + assert hass.states.get(ir_entity_id).state == STATE_UNKNOWN + await hass.services.async_call( + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ir_entity_id}, blocking=True + ) + assert hass.states.get(ir_entity_id).state != STATE_UNKNOWN + assert doorbird_entry.api.turn_light_on.call_count == 1 + + +async def test_reset_favorites_button( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test pressing the reset favorites button.""" + doorbird_entry = await doorbird_mocker() + reset_entity_id = "button.mydoorbird_reset_favorites" + assert hass.states.get(reset_entity_id).state == STATE_UNKNOWN + await hass.services.async_call( + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True + ) + assert hass.states.get(reset_entity_id).state != STATE_UNKNOWN + assert doorbird_entry.api.delete_favorite.call_count == 3 diff --git a/tests/components/doorbird/test_camera.py b/tests/components/doorbird/test_camera.py new file mode 100644 index 00000000000..228a6c81daa --- /dev/null +++ b/tests/components/doorbird/test_camera.py @@ -0,0 +1,46 @@ +"""Test DoorBird cameras.""" + +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.camera import ( + STATE_IDLE, + async_get_image, + async_get_stream_source, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from . import mock_not_found_exception +from .conftest import DoorbirdMockerType + + +async def test_doorbird_cameras( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the doorbird cameras.""" + doorbird_entry = await doorbird_mocker() + live_camera_entity_id = "camera.mydoorbird_live" + assert hass.states.get(live_camera_entity_id).state == STATE_IDLE + last_motion_camera_entity_id = "camera.mydoorbird_last_motion" + assert hass.states.get(last_motion_camera_entity_id).state == STATE_IDLE + last_ring_camera_entity_id = "camera.mydoorbird_last_ring" + assert hass.states.get(last_ring_camera_entity_id).state == STATE_IDLE + assert await async_get_stream_source(hass, live_camera_entity_id) is not None + api = doorbird_entry.api + api.get_image.side_effect = mock_not_found_exception() + with pytest.raises(HomeAssistantError): + await async_get_image(hass, live_camera_entity_id) + api.get_image.side_effect = TimeoutError() + with pytest.raises(HomeAssistantError): + await async_get_image(hass, live_camera_entity_id) + api.get_image.side_effect = None + assert (await async_get_image(hass, live_camera_entity_id)).content == b"image" + api.get_image.return_value = b"notyet" + # Ensure rate limit works + assert (await async_get_image(hass, live_camera_entity_id)).content == b"image" + + freezer.tick(60) + assert (await async_get_image(hass, live_camera_entity_id)).content == b"notyet" diff --git a/tests/components/doorbird/test_config_flow.py b/tests/components/doorbird/test_config_flow.py index cd4ddccda87..3abdd2b87a3 100644 --- a/tests/components/doorbird/test_config_flow.py +++ b/tests/components/doorbird/test_config_flow.py @@ -1,47 +1,35 @@ """Test the DoorBird config flow.""" from ipaddress import ip_address -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import AsyncMock, Mock, patch +import aiohttp +from doorbirdpy import DoorBird import pytest -import requests from homeassistant import config_entries from homeassistant.components import zeroconf -from homeassistant.components.doorbird.const import CONF_EVENTS, DOMAIN +from homeassistant.components.doorbird.const import ( + CONF_EVENTS, + DEFAULT_DOORBELL_EVENT, + DEFAULT_MOTION_EVENT, + DOMAIN, +) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import ( + VALID_CONFIG, + get_mock_doorbird_api, + mock_not_found_exception, + mock_unauthorized_exception, +) + from tests.common import MockConfigEntry -VALID_CONFIG = { - CONF_HOST: "1.2.3.4", - CONF_USERNAME: "friend", - CONF_PASSWORD: "password", - CONF_NAME: "mydoorbird", -} - -def _get_mock_doorbirdapi_return_values(ready=None, info=None): - doorbirdapi_mock = MagicMock() - type(doorbirdapi_mock).ready = MagicMock(return_value=ready) - type(doorbirdapi_mock).info = MagicMock(return_value=info) - type(doorbirdapi_mock).doorbell_state = MagicMock( - side_effect=requests.exceptions.HTTPError(response=Mock(status_code=401)) - ) - return doorbirdapi_mock - - -def _get_mock_doorbirdapi_side_effects(ready=None, info=None): - doorbirdapi_mock = MagicMock() - type(doorbirdapi_mock).ready = MagicMock(side_effect=ready) - type(doorbirdapi_mock).info = MagicMock(side_effect=info) - - return doorbirdapi_mock - - -async def test_user_form(hass: HomeAssistant) -> None: +async def test_user_form(hass: HomeAssistant, doorbird_api: DoorBird) -> None: """Test we get the user form.""" result = await hass.config_entries.flow.async_init( @@ -50,14 +38,7 @@ async def test_user_form(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - doorbirdapi = _get_mock_doorbirdapi_return_values( - ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} - ) with ( - patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ), patch( "homeassistant.components.doorbird.async_setup", return_value=True ) as mock_setup, @@ -80,6 +61,9 @@ async def test_user_form(hass: HomeAssistant) -> None: "password": "password", "username": "friend", } + assert result2["options"] == { + CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT] + } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -172,39 +156,30 @@ async def test_form_zeroconf_non_ipv4_ignored(hass: HomeAssistant) -> None: assert result["reason"] == "not_ipv4_address" -async def test_form_zeroconf_correct_oui(hass: HomeAssistant) -> None: +async def test_form_zeroconf_correct_oui( + hass: HomeAssistant, doorbird_api: DoorBird +) -> None: """Test we can setup from zeroconf with the correct OUI source.""" - doorbirdapi = _get_mock_doorbirdapi_return_values( - ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} - ) - with patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, - data=zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("192.168.1.5"), - ip_addresses=[ip_address("192.168.1.5")], - hostname="mock_hostname", - name="Doorstation - abc123._axis-video._tcp.local.", - port=None, - properties={"macaddress": "1CCAE3DOORBIRD"}, - type="mock_type", - ), - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.5"), + ip_addresses=[ip_address("192.168.1.5")], + hostname="mock_hostname", + name="Doorstation - abc123._axis-video._tcp.local.", + port=None, + properties={"macaddress": "1CCAE3DOORBIRD"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} with ( - patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ), patch("homeassistant.components.logbook.async_setup", return_value=True), patch( "homeassistant.components.doorbird.async_setup", return_value=True @@ -234,19 +209,19 @@ async def test_form_zeroconf_correct_oui(hass: HomeAssistant) -> None: @pytest.mark.parametrize( "doorbell_state_side_effect", [ - requests.exceptions.HTTPError(response=Mock(status_code=404)), + aiohttp.ClientResponseError(request_info=Mock(), history=Mock(), status=404), OSError, None, ], ) async def test_form_zeroconf_correct_oui_wrong_device( - hass: HomeAssistant, doorbell_state_side_effect + hass: HomeAssistant, + doorbird_api: DoorBird, + doorbell_state_side_effect: Exception | None, ) -> None: """Test we can setup from zeroconf with the correct OUI source but not a doorstation.""" - doorbirdapi = _get_mock_doorbirdapi_return_values( - ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} - ) - type(doorbirdapi).doorbell_state = MagicMock(side_effect=doorbell_state_side_effect) + doorbirdapi = get_mock_doorbird_api(info={"WIFI_MAC_ADDR": "macaddr"}) + type(doorbirdapi).doorbell_state = AsyncMock(side_effect=doorbell_state_side_effect) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", @@ -276,7 +251,7 @@ async def test_form_user_cannot_connect(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - doorbirdapi = _get_mock_doorbirdapi_side_effects(ready=OSError) + doorbirdapi = get_mock_doorbird_api(info_side_effect=OSError) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", return_value=doorbirdapi, @@ -296,8 +271,8 @@ async def test_form_user_invalid_auth(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_error = requests.exceptions.HTTPError(response=Mock(status_code=401)) - doorbirdapi = _get_mock_doorbirdapi_side_effects(ready=mock_error) + mock_error = mock_unauthorized_exception() + doorbirdapi = get_mock_doorbird_api(info_side_effect=mock_error) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", return_value=doorbirdapi, @@ -311,6 +286,100 @@ async def test_form_user_invalid_auth(hass: HomeAssistant) -> None: assert result2["errors"] == {"base": "invalid_auth"} +async def test_form_user_doorbird_not_found( + doorbird_api: DoorBird, hass: HomeAssistant +) -> None: + """Test handling unable to connect to the device.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + mock_error = mock_not_found_exception() + doorbirdapi = get_mock_doorbird_api(info_side_effect=mock_error) + with patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + + with ( + patch( + "homeassistant.components.doorbird.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.doorbird.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], VALID_CONFIG + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "1.2.3.4" + assert result3["data"] == { + "host": "1.2.3.4", + "name": "mydoorbird", + "password": "password", + "username": "friend", + } + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_user_doorbird_unknown_exception( + doorbird_api: DoorBird, hass: HomeAssistant +) -> None: + """Test handling unable an unknown exception.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + doorbirdapi = get_mock_doorbird_api(info_side_effect=ValueError) + with patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} + + with ( + patch( + "homeassistant.components.doorbird.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.doorbird.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], VALID_CONFIG + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "1.2.3.4" + assert result3["data"] == { + "host": "1.2.3.4", + "name": "mydoorbird", + "password": "password", + "username": "friend", + } + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_options_flow(hass: HomeAssistant) -> None: """Test config flow options.""" @@ -336,3 +405,67 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert config_entry.options == {CONF_EVENTS: ["eventa", "eventc", "eventq"]} + + +async def test_reauth(hass: HomeAssistant) -> None: + """Test reauth flow.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "DoorBird", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + config_entry.add_to_hass(hass) + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) + assert len(flows) == 1 + flow = flows[0] + + mock_error = mock_unauthorized_exception() + doorbirdapi = get_mock_doorbird_api(info_side_effect=mock_error) + with patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ): + result2 = await hass.config_entries.flow.async_configure( + flow["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} + + doorbirdapi = get_mock_doorbird_api(info={"WIFI_MAC_ADDR": "macaddr"}) + with ( + patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ), + patch( + "homeassistant.components.doorbird.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.doorbird.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + flow["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_setup.mock_calls) == 1 diff --git a/tests/components/doorbird/test_device.py b/tests/components/doorbird/test_device.py new file mode 100644 index 00000000000..cf3beae5e68 --- /dev/null +++ b/tests/components/doorbird/test_device.py @@ -0,0 +1,59 @@ +"""Test DoorBird device.""" + +from copy import deepcopy +from http import HTTPStatus + +from doorbirdpy import DoorBirdScheduleEntry +import pytest + +from homeassistant.components.doorbird.const import CONF_EVENTS +from homeassistant.core import HomeAssistant + +from .conftest import DoorbirdMockerType + + +async def test_no_configured_events( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test a doorbird with no events configured.""" + await doorbird_mocker(options={CONF_EVENTS: []}) + assert not hass.states.async_all("event") + + +async def test_change_schedule_success( + doorbird_mocker: DoorbirdMockerType, + doorbird_schedule_wrong_param: list[DoorBirdScheduleEntry], + caplog: pytest.LogCaptureFixture, +) -> None: + """Test a doorbird when change_schedule fails.""" + schedule_copy = deepcopy(doorbird_schedule_wrong_param) + mock_doorbird = await doorbird_mocker(schedule=schedule_copy) + assert "Unable to update schedule entry mydoorbird" not in caplog.text + assert mock_doorbird.api.change_schedule.call_count == 1 + new_schedule: list[DoorBirdScheduleEntry] = ( + mock_doorbird.api.change_schedule.call_args[0] + ) + # Ensure the attempt to update the schedule to fix the incorrect + # param is made + assert new_schedule[-1].output[-1].param == "1" + + +async def test_change_schedule_fails( + doorbird_mocker: DoorbirdMockerType, + doorbird_schedule_wrong_param: list[DoorBirdScheduleEntry], + caplog: pytest.LogCaptureFixture, +) -> None: + """Test a doorbird when change_schedule fails.""" + schedule_copy = deepcopy(doorbird_schedule_wrong_param) + mock_doorbird = await doorbird_mocker( + schedule=schedule_copy, change_schedule=(False, HTTPStatus.UNAUTHORIZED) + ) + assert "Unable to update schedule entry mydoorbird" in caplog.text + assert mock_doorbird.api.change_schedule.call_count == 1 + new_schedule: list[DoorBirdScheduleEntry] = ( + mock_doorbird.api.change_schedule.call_args[0] + ) + # Ensure the attempt to update the schedule to fix the incorrect + # param is made + assert new_schedule[-1].output[-1].param == "1" diff --git a/tests/components/doorbird/test_event.py b/tests/components/doorbird/test_event.py new file mode 100644 index 00000000000..11e0f3a306d --- /dev/null +++ b/tests/components/doorbird/test_event.py @@ -0,0 +1,37 @@ +"""Test DoorBird events.""" + +from homeassistant.const import STATE_UNKNOWN +from homeassistant.core import HomeAssistant + +from . import mock_webhook_call +from .conftest import DoorbirdMockerType + +from tests.typing import ClientSessionGenerator + + +async def test_doorbell_ring_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test a doorbell ring event.""" + doorbird_entry = await doorbird_mocker() + relay_1_entity_id = "event.mydoorbird_doorbell" + assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN + client = await hass_client() + await mock_webhook_call(doorbird_entry.entry, client, "mydoorbird_doorbell") + assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN + + +async def test_motion_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test a doorbell motion event.""" + doorbird_entry = await doorbird_mocker() + relay_1_entity_id = "event.mydoorbird_motion" + assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN + client = await hass_client() + await mock_webhook_call(doorbird_entry.entry, client, "mydoorbird_motion") + assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN diff --git a/tests/components/doorbird/test_init.py b/tests/components/doorbird/test_init.py new file mode 100644 index 00000000000..31266c4acf0 --- /dev/null +++ b/tests/components/doorbird/test_init.py @@ -0,0 +1,93 @@ +"""Test DoorBird init.""" + +import pytest + +from homeassistant.components.doorbird.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import mock_not_found_exception, mock_unauthorized_exception +from .conftest import DoorbirdMockerType + + +async def test_basic_setup( + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test basic setup.""" + doorbird_entry = await doorbird_mocker() + entry = doorbird_entry.entry + assert entry.state is ConfigEntryState.LOADED + + +async def test_auth_fails( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test basic setup with an auth failure.""" + doorbird_entry = await doorbird_mocker( + info_side_effect=mock_unauthorized_exception() + ) + entry = doorbird_entry.entry + assert entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress(DOMAIN) + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" + + +@pytest.mark.parametrize( + "side_effect", + [OSError, mock_not_found_exception()], +) +async def test_http_info_request_fails( + doorbird_mocker: DoorbirdMockerType, side_effect: Exception +) -> None: + """Test basic setup with an http failure.""" + doorbird_entry = await doorbird_mocker(info_side_effect=side_effect) + assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_http_favorites_request_fails( + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test basic setup with an http failure.""" + doorbird_entry = await doorbird_mocker( + favorites_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_http_schedule_api_missing( + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test missing the schedule API is non-fatal as not all models support it.""" + doorbird_entry = await doorbird_mocker( + schedule_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.LOADED + + +async def test_events_changed( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test basic setup.""" + doorbird_entry = await doorbird_mocker() + entry = doorbird_entry.entry + assert entry.state is ConfigEntryState.LOADED + api = doorbird_entry.api + api.favorites.reset_mock() + api.change_favorite.reset_mock() + api.schedule.reset_mock() + + hass.config_entries.async_update_entry(entry, options={"events": ["xyz"]}) + await hass.async_block_till_done() + assert len(api.favorites.mock_calls) == 2 + assert len(api.schedule.mock_calls) == 1 + + assert len(api.change_favorite.mock_calls) == 1 + favorite_type, title, url = api.change_favorite.mock_calls[0][1] + assert favorite_type == "http" + assert title == "Home Assistant (mydoorbird_xyz)" + assert url == ( + f"http://10.10.10.10:8123/api/doorbird/mydoorbird_xyz?token={entry.entry_id}" + ) diff --git a/tests/components/doorbird/test_repairs.py b/tests/components/doorbird/test_repairs.py new file mode 100644 index 00000000000..7449250b718 --- /dev/null +++ b/tests/components/doorbird/test_repairs.py @@ -0,0 +1,61 @@ +"""Test repairs for doorbird.""" + +from __future__ import annotations + +from http import HTTPStatus + +from homeassistant.components.doorbird.const import DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from . import mock_not_found_exception +from .conftest import DoorbirdMockerType + +from tests.typing import ClientSessionGenerator + + +async def test_change_schedule_fails( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, + hass_client: ClientSessionGenerator, +) -> None: + """Test a doorbird when change_schedule fails.""" + assert await async_setup_component(hass, "repairs", {}) + doorbird_entry = await doorbird_mocker( + favorites_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY + issue_reg = ir.async_get(hass) + assert len(issue_reg.issues) == 1 + issue = list(issue_reg.issues.values())[0] + issue_id = issue.issue_id + assert issue.domain == DOMAIN + + await async_process_repairs_platforms(hass) + client = await hass_client() + + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + placeholders = data["description_placeholders"] + assert "404" in placeholders["error"] + assert data["step_id"] == "confirm" + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "create_entry" diff --git a/tests/components/doorbird/test_view.py b/tests/components/doorbird/test_view.py new file mode 100644 index 00000000000..9d2b53714b6 --- /dev/null +++ b/tests/components/doorbird/test_view.py @@ -0,0 +1,21 @@ +"""Test DoorBird view.""" + +from http import HTTPStatus + +from homeassistant.components.doorbird.const import API_URL + +from .conftest import DoorbirdMockerType + +from tests.typing import ClientSessionGenerator + + +async def test_non_webhook_with_wrong_token( + hass_client: ClientSessionGenerator, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test calling the webhook with the wrong token.""" + await doorbird_mocker() + client = await hass_client() + + response = await client.get(f"{API_URL}/doorbell?token=wrong") + assert response.status == HTTPStatus.UNAUTHORIZED diff --git a/tests/components/drop_connect/test_sensor.py b/tests/components/drop_connect/test_sensor.py index 4873d1edbd1..cb56522a09d 100644 --- a/tests/components/drop_connect/test_sensor.py +++ b/tests/components/drop_connect/test_sensor.py @@ -47,7 +47,7 @@ from tests.typing import MqttMockHAClient @pytest.fixture(autouse=True) -def only_sensor_platform() -> Generator[[], None]: +def only_sensor_platform() -> Generator[None]: """Only setup the DROP sensor platform.""" with patch("homeassistant.components.drop_connect.PLATFORMS", [Platform.SENSOR]): yield diff --git a/tests/components/dsmr/conftest.py b/tests/components/dsmr/conftest.py index 05881d9c877..2301b9dfc80 100644 --- a/tests/components/dsmr/conftest.py +++ b/tests/components/dsmr/conftest.py @@ -1,6 +1,7 @@ """Common test tools.""" import asyncio +from collections.abc import Generator from unittest.mock import MagicMock, patch from dsmr_parser.clients.protocol import DSMRProtocol @@ -18,7 +19,7 @@ import pytest @pytest.fixture -async def dsmr_connection_fixture(hass): +def dsmr_connection_fixture() -> Generator[tuple[MagicMock, MagicMock, MagicMock]]: """Fixture that mocks serial connection.""" transport = MagicMock(spec=asyncio.Transport) @@ -44,7 +45,9 @@ async def dsmr_connection_fixture(hass): @pytest.fixture -async def rfxtrx_dsmr_connection_fixture(hass): +def rfxtrx_dsmr_connection_fixture() -> ( + Generator[tuple[MagicMock, MagicMock, MagicMock]] +): """Fixture that mocks RFXtrx connection.""" transport = MagicMock(spec=asyncio.Transport) @@ -70,7 +73,9 @@ async def rfxtrx_dsmr_connection_fixture(hass): @pytest.fixture -async def dsmr_connection_send_validate_fixture(hass): +def dsmr_connection_send_validate_fixture() -> ( + Generator[tuple[MagicMock, MagicMock, MagicMock]] +): """Fixture that mocks serial connection.""" transport = MagicMock(spec=asyncio.Transport) @@ -151,7 +156,9 @@ async def dsmr_connection_send_validate_fixture(hass): @pytest.fixture -async def rfxtrx_dsmr_connection_send_validate_fixture(hass): +def rfxtrx_dsmr_connection_send_validate_fixture() -> ( + Generator[tuple[MagicMock, MagicMock, MagicMock]] +): """Fixture that mocks serial connection.""" transport = MagicMock(spec=asyncio.Transport) diff --git a/tests/components/dsmr/snapshots/test_diagnostics.ambr b/tests/components/dsmr/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..ec2dc274efa --- /dev/null +++ b/tests/components/dsmr/snapshots/test_diagnostics.ambr @@ -0,0 +1,29 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'data': dict({ + 'CURRENT_ELECTRICITY_USAGE': dict({ + 'unit': 'W', + 'value': 0.0, + }), + 'ELECTRICITY_ACTIVE_TARIFF': dict({ + 'unit': '', + 'value': '0001', + }), + 'GAS_METER_READING': dict({ + 'datetime': '2019-03-03T19:43:33+00:00', + 'unit': 'm³', + 'value': 745.695, + }), + }), + 'entry': dict({ + 'data': dict({ + 'dsmr_version': '2.2', + 'port': '/dev/ttyUSB0', + 'serial_id': '1234', + 'serial_id_gas': '5678', + }), + 'unique_id': '/dev/ttyUSB0', + }), + }) +# --- diff --git a/tests/components/dsmr/test_config_flow.py b/tests/components/dsmr/test_config_flow.py index 711b29f4ae0..91adf38eacf 100644 --- a/tests/components/dsmr/test_config_flow.py +++ b/tests/components/dsmr/test_config_flow.py @@ -10,7 +10,8 @@ import serial import serial.tools.list_ports from homeassistant import config_entries -from homeassistant.components.dsmr import DOMAIN, config_flow +from homeassistant.components.dsmr import config_flow +from homeassistant.components.dsmr.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -32,7 +33,8 @@ def com_port(): async def test_setup_network( - hass: HomeAssistant, dsmr_connection_send_validate_fixture + hass: HomeAssistant, + dsmr_connection_send_validate_fixture: tuple[MagicMock, MagicMock, MagicMock], ) -> None: """Test we can setup network.""" result = await hass.config_entries.flow.async_init( @@ -77,8 +79,10 @@ async def test_setup_network( async def test_setup_network_rfxtrx( hass: HomeAssistant, - dsmr_connection_send_validate_fixture, - rfxtrx_dsmr_connection_send_validate_fixture, + dsmr_connection_send_validate_fixture: tuple[MagicMock, MagicMock, MagicMock], + rfxtrx_dsmr_connection_send_validate_fixture: tuple[ + MagicMock, MagicMock, MagicMock + ], ) -> None: """Test we can setup network.""" (connection_factory, transport, protocol) = dsmr_connection_send_validate_fixture @@ -185,7 +189,7 @@ async def test_setup_network_rfxtrx( async def test_setup_serial( com_mock, hass: HomeAssistant, - dsmr_connection_send_validate_fixture, + dsmr_connection_send_validate_fixture: tuple[MagicMock, MagicMock, MagicMock], version: str, entry_data: dict[str, Any], ) -> None: @@ -225,8 +229,10 @@ async def test_setup_serial( async def test_setup_serial_rfxtrx( com_mock, hass: HomeAssistant, - dsmr_connection_send_validate_fixture, - rfxtrx_dsmr_connection_send_validate_fixture, + dsmr_connection_send_validate_fixture: tuple[MagicMock, MagicMock, MagicMock], + rfxtrx_dsmr_connection_send_validate_fixture: tuple[ + MagicMock, MagicMock, MagicMock + ], ) -> None: """Test we can setup serial.""" (connection_factory, transport, protocol) = dsmr_connection_send_validate_fixture @@ -273,7 +279,9 @@ async def test_setup_serial_rfxtrx( @patch("serial.tools.list_ports.comports", return_value=[com_port()]) async def test_setup_serial_manual( - com_mock, hass: HomeAssistant, dsmr_connection_send_validate_fixture + com_mock, + hass: HomeAssistant, + dsmr_connection_send_validate_fixture: tuple[MagicMock, MagicMock, MagicMock], ) -> None: """Test we can setup serial with manual entry.""" result = await hass.config_entries.flow.async_init( @@ -321,7 +329,9 @@ async def test_setup_serial_manual( @patch("serial.tools.list_ports.comports", return_value=[com_port()]) async def test_setup_serial_fail( - com_mock, hass: HomeAssistant, dsmr_connection_send_validate_fixture + com_mock, + hass: HomeAssistant, + dsmr_connection_send_validate_fixture: tuple[MagicMock, MagicMock, MagicMock], ) -> None: """Test failed serial connection.""" (connection_factory, transport, protocol) = dsmr_connection_send_validate_fixture @@ -369,8 +379,10 @@ async def test_setup_serial_fail( async def test_setup_serial_timeout( com_mock, hass: HomeAssistant, - dsmr_connection_send_validate_fixture, - rfxtrx_dsmr_connection_send_validate_fixture, + dsmr_connection_send_validate_fixture: tuple[MagicMock, MagicMock, MagicMock], + rfxtrx_dsmr_connection_send_validate_fixture: tuple[ + MagicMock, MagicMock, MagicMock + ], ) -> None: """Test failed serial connection.""" (connection_factory, transport, protocol) = dsmr_connection_send_validate_fixture @@ -425,8 +437,10 @@ async def test_setup_serial_timeout( async def test_setup_serial_wrong_telegram( com_mock, hass: HomeAssistant, - dsmr_connection_send_validate_fixture, - rfxtrx_dsmr_connection_send_validate_fixture, + dsmr_connection_send_validate_fixture: tuple[MagicMock, MagicMock, MagicMock], + rfxtrx_dsmr_connection_send_validate_fixture: tuple[ + MagicMock, MagicMock, MagicMock + ], ) -> None: """Test failed telegram data.""" (connection_factory, transport, protocol) = dsmr_connection_send_validate_fixture diff --git a/tests/components/dsmr/test_diagnostics.py b/tests/components/dsmr/test_diagnostics.py new file mode 100644 index 00000000000..8fc996f6e34 --- /dev/null +++ b/tests/components/dsmr/test_diagnostics.py @@ -0,0 +1,82 @@ +"""Test DSMR diagnostics.""" + +import datetime +from decimal import Decimal +from unittest.mock import MagicMock + +from dsmr_parser.obis_references import ( + CURRENT_ELECTRICITY_USAGE, + ELECTRICITY_ACTIVE_TARIFF, + GAS_METER_READING, +) +from dsmr_parser.objects import CosemObject, MBusObject, Telegram +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + entry_data = { + "port": "/dev/ttyUSB0", + "dsmr_version": "2.2", + "serial_id": "1234", + "serial_id_gas": "5678", + } + entry_options = { + "time_between_update": 0, + } + + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), + [{"value": Decimal("0.0"), "unit": "W"}], + ), + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), + [ + {"value": datetime.datetime.fromtimestamp(1551642213)}, + {"value": Decimal(745.695), "unit": "m³"}, + ], + ), + "GAS_METER_READING", + ) + + mock_entry = MockConfigEntry( + domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options + ) + + mock_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) + assert result == snapshot diff --git a/tests/components/dsmr/test_mbus_migration.py b/tests/components/dsmr/test_mbus_migration.py index 284a0001b89..7c7d182aa97 100644 --- a/tests/components/dsmr/test_mbus_migration.py +++ b/tests/components/dsmr/test_mbus_migration.py @@ -2,13 +2,14 @@ import datetime from decimal import Decimal +from unittest.mock import MagicMock from dsmr_parser.obis_references import ( - BELGIUM_MBUS1_DEVICE_TYPE, - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS1_METER_READING2, + MBUS_DEVICE_TYPE, + MBUS_EQUIPMENT_IDENTIFIER, + MBUS_METER_READING, ) -from dsmr_parser.objects import CosemObject, MBusObject +from dsmr_parser.objects import CosemObject, MBusObject, Telegram from homeassistant.components.dsmr.const import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN @@ -22,7 +23,7 @@ async def test_migrate_gas_to_mbus( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - dsmr_connection_fixture, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], ) -> None: """Test migration of unique_id.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -64,22 +65,31 @@ async def test_migrate_gas_to_mbus( assert entity.unique_id == old_unique_id await hass.async_block_till_done() - telegram = { - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - } + "MBUS_METER_READING", + ) assert await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() @@ -109,11 +119,206 @@ async def test_migrate_gas_to_mbus( ) +async def test_migrate_hourly_gas_to_mbus( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], +) -> None: + """Test migration of unique_id.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="/dev/ttyUSB0", + data={ + "port": "/dev/ttyUSB0", + "dsmr_version": "5", + "serial_id": "1234", + "serial_id_gas": "4730303738353635363037343639323231", + }, + options={ + "time_between_update": 0, + }, + ) + + mock_entry.add_to_hass(hass) + + old_unique_id = "4730303738353635363037343639323231_hourly_gas_meter_reading" + + device = device_registry.async_get_or_create( + config_entry_id=mock_entry.entry_id, + identifiers={(DOMAIN, mock_entry.entry_id)}, + name="Gas Meter", + ) + await hass.async_block_till_done() + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + suggested_object_id="gas_meter_reading", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + device_id=device.id, + unique_id=old_unique_id, + config_entry=mock_entry, + ) + assert entity.unique_id == old_unique_id + await hass.async_block_till_done() + + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), + [{"value": "4730303738353635363037343639323231", "unit": ""}], + ), + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), + [ + {"value": datetime.datetime.fromtimestamp(1722749707)}, + {"value": Decimal(778.963), "unit": "m3"}, + ], + ), + "MBUS_METER_READING", + ) + + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + dev_entities = er.async_entries_for_device( + entity_registry, device.id, include_disabled_entities=True + ) + assert not dev_entities + + assert ( + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) + is None + ) + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, DOMAIN, "4730303738353635363037343639323231" + ) + == "sensor.gas_meter_reading" + ) + + +async def test_migrate_gas_with_devid_to_mbus( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], +) -> None: + """Test migration of unique_id.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="/dev/ttyUSB0", + data={ + "port": "/dev/ttyUSB0", + "dsmr_version": "5B", + "serial_id": "1234", + "serial_id_gas": "37464C4F32313139303333373331", + }, + options={ + "time_between_update": 0, + }, + ) + + mock_entry.add_to_hass(hass) + + old_unique_id = "37464C4F32313139303333373331_belgium_5min_gas_meter_reading" + + device = device_registry.async_get_or_create( + config_entry_id=mock_entry.entry_id, + identifiers={(DOMAIN, "37464C4F32313139303333373331")}, + name="Gas Meter", + ) + await hass.async_block_till_done() + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + suggested_object_id="gas_meter_reading", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + device_id=device.id, + unique_id=old_unique_id, + config_entry=mock_entry, + ) + assert entity.unique_id == old_unique_id + await hass.async_block_till_done() + + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), + [{"value": "37464C4F32313139303333373331", "unit": ""}], + ), + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), + [ + {"value": datetime.datetime.fromtimestamp(1551642213)}, + {"value": Decimal(745.695), "unit": "m3"}, + ], + ), + "MBUS_METER_READING", + ) + + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + assert ( + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) + is None + ) + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, DOMAIN, "37464C4F32313139303333373331" + ) + == "sensor.gas_meter_reading" + ) + + async def test_migrate_gas_to_mbus_exists( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - dsmr_connection_fixture, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], ) -> None: """Test migration of unique_id.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -172,22 +377,31 @@ async def test_migrate_gas_to_mbus_exists( ) await hass.async_block_till_done() - telegram = { - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - } + "MBUS_METER_READING", + ) assert await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index e014fdb68f2..c2c6d48b007 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -11,35 +11,24 @@ from decimal import Decimal from itertools import chain, repeat from unittest.mock import DEFAULT, MagicMock +from dsmr_parser import obis_references from dsmr_parser.obis_references import ( BELGIUM_CURRENT_AVERAGE_DEMAND, BELGIUM_MAXIMUM_DEMAND_MONTH, - BELGIUM_MBUS1_DEVICE_TYPE, - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS1_METER_READING1, - BELGIUM_MBUS1_METER_READING2, - BELGIUM_MBUS2_DEVICE_TYPE, - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS2_METER_READING1, - BELGIUM_MBUS2_METER_READING2, - BELGIUM_MBUS3_DEVICE_TYPE, - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS3_METER_READING1, - BELGIUM_MBUS3_METER_READING2, - BELGIUM_MBUS4_DEVICE_TYPE, - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS4_METER_READING1, - BELGIUM_MBUS4_METER_READING2, CURRENT_ELECTRICITY_USAGE, ELECTRICITY_ACTIVE_TARIFF, ELECTRICITY_EXPORTED_TOTAL, ELECTRICITY_IMPORTED_TOTAL, GAS_METER_READING, HOURLY_GAS_METER_READING, + MBUS_DEVICE_TYPE, + MBUS_EQUIPMENT_IDENTIFIER, + MBUS_METER_READING, ) -from dsmr_parser.objects import CosemObject, MBusObject +from dsmr_parser.objects import CosemObject, MBusObject, Telegram import pytest +from homeassistant.components.dsmr.sensor import SENSORS, SENSORS_MBUS_DEVICE_TYPE from homeassistant.components.sensor import ( ATTR_OPTIONS, ATTR_STATE_CLASS, @@ -63,7 +52,9 @@ from tests.common import MockConfigEntry, patch async def test_default_setup( - hass: HomeAssistant, entity_registry: er.EntityRegistry, dsmr_connection_fixture + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], ) -> None: """Test the default setup.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -78,22 +69,31 @@ async def test_default_setup( "time_between_update": 0, } - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("0.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - GAS_METER_READING: MBusObject( - GAS_METER_READING, + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": UnitOfVolume.CUBIC_METERS}, ], ), - } + "GAS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -132,22 +132,31 @@ async def test_default_setup( ) assert power_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "W" - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - GAS_METER_READING: MBusObject( - GAS_METER_READING, + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642214)}, {"value": Decimal(745.701), "unit": UnitOfVolume.CUBIC_METERS}, ], ), - } + "GAS_METER_READING", + ) # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser telegram_callback(telegram) @@ -191,7 +200,9 @@ async def test_default_setup( async def test_setup_only_energy( - hass: HomeAssistant, entity_registry: er.EntityRegistry, dsmr_connection_fixture + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], ) -> None: """Test the default setup.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -205,15 +216,20 @@ async def test_setup_only_energy( "time_between_update": 0, } - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -240,7 +256,9 @@ async def test_setup_only_energy( assert not entry -async def test_v4_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_v4_meter( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """Test if v4 meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -254,18 +272,23 @@ async def test_v4_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: "time_between_update": 0, } - telegram = { - HOURLY_GAS_METER_READING: MBusObject( - HOURLY_GAS_METER_READING, + telegram = Telegram() + telegram.add( + HOURLY_GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "HOURLY_GAS_METER_READING", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -319,7 +342,10 @@ async def test_v4_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: ], ) async def test_v5_meter( - hass: HomeAssistant, dsmr_connection_fixture, value: Decimal, state: str + hass: HomeAssistant, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], + value: Decimal, + state: str, ) -> None: """Test if v5 meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -334,18 +360,23 @@ async def test_v5_meter( "time_between_update": 0, } - telegram = { - HOURLY_GAS_METER_READING: MBusObject( - HOURLY_GAS_METER_READING, + telegram = Telegram() + telegram.add( + HOURLY_GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": value, "unit": "m3"}, ], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "HOURLY_GAS_METER_READING", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -386,7 +417,9 @@ async def test_v5_meter( ) -async def test_luxembourg_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_luxembourg_meter( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """Test if v5 meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -400,23 +433,34 @@ async def test_luxembourg_meter(hass: HomeAssistant, dsmr_connection_fixture) -> "time_between_update": 0, } - telegram = { - HOURLY_GAS_METER_READING: MBusObject( - HOURLY_GAS_METER_READING, + telegram = Telegram() + telegram.add( + HOURLY_GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - ELECTRICITY_IMPORTED_TOTAL: CosemObject( - ELECTRICITY_IMPORTED_TOTAL, + "HOURLY_GAS_METER_READING", + ) + telegram.add( + ELECTRICITY_IMPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(123.456), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - ELECTRICITY_EXPORTED_TOTAL: CosemObject( - ELECTRICITY_EXPORTED_TOTAL, + "ELECTRICITY_IMPORTED_TOTAL", + ) + telegram.add( + ELECTRICITY_EXPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(654.321), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - } + "ELECTRICITY_EXPORTED_TOTAL", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -468,7 +512,9 @@ async def test_luxembourg_meter(hass: HomeAssistant, dsmr_connection_fixture) -> ) -async def test_belgian_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_belgian_meter( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """Test if Belgian meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -482,78 +528,127 @@ async def test_belgian_meter(hass: HomeAssistant, dsmr_connection_fixture) -> No "time_between_update": 0, } - telegram = { - BELGIUM_CURRENT_AVERAGE_DEMAND: CosemObject( - BELGIUM_CURRENT_AVERAGE_DEMAND, + telegram = Telegram() + telegram.add( + BELGIUM_CURRENT_AVERAGE_DEMAND, + CosemObject( + (0, 0), [{"value": Decimal(1.75), "unit": "kW"}], ), - BELGIUM_MAXIMUM_DEMAND_MONTH: MBusObject( - BELGIUM_MAXIMUM_DEMAND_MONTH, + "BELGIUM_CURRENT_AVERAGE_DEMAND", + ) + telegram.add( + BELGIUM_MAXIMUM_DEMAND_MONTH, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(4.11), "unit": "kW"}, ], ), - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + "BELGIUM_MAXIMUM_DEMAND_MONTH", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 2), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - BELGIUM_MBUS2_METER_READING1: MBusObject( - BELGIUM_MBUS2_METER_READING1, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 2), [ {"value": datetime.datetime.fromtimestamp(1551642214)}, {"value": Decimal(678.695), "unit": "m3"}, ], ), - BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 3), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - BELGIUM_MBUS3_METER_READING2: MBusObject( - BELGIUM_MBUS3_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 3), [ {"value": datetime.datetime.fromtimestamp(1551642215)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 4), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 4), [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - BELGIUM_MBUS4_METER_READING1: MBusObject( - BELGIUM_MBUS4_METER_READING1, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 4), [ {"value": datetime.datetime.fromtimestamp(1551642216)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "MBUS_METER_READING", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -651,7 +746,9 @@ async def test_belgian_meter(hass: HomeAssistant, dsmr_connection_fixture) -> No ) -async def test_belgian_meter_alt(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_belgian_meter_alt( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """Test if Belgian meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -665,64 +762,103 @@ async def test_belgian_meter_alt(hass: HomeAssistant, dsmr_connection_fixture) - "time_between_update": 0, } - telegram = { - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING1: MBusObject( - BELGIUM_MBUS1_METER_READING1, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642215)}, {"value": Decimal(123.456), "unit": "m3"}, ], ), - BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 2), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - BELGIUM_MBUS2_METER_READING2: MBusObject( - BELGIUM_MBUS2_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 2), [ {"value": datetime.datetime.fromtimestamp(1551642216)}, {"value": Decimal(678.901), "unit": "m3"}, ], ), - BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 3), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - BELGIUM_MBUS3_METER_READING1: MBusObject( - BELGIUM_MBUS3_METER_READING1, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 3), [ {"value": datetime.datetime.fromtimestamp(1551642217)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 4), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 4), [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - BELGIUM_MBUS4_METER_READING2: MBusObject( - BELGIUM_MBUS4_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 4), [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - } + "MBUS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -798,7 +934,9 @@ async def test_belgian_meter_alt(hass: HomeAssistant, dsmr_connection_fixture) - ) -async def test_belgian_meter_mbus(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_belgian_meter_mbus( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """Test if Belgian meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -812,49 +950,78 @@ async def test_belgian_meter_mbus(hass: HomeAssistant, dsmr_connection_fixture) "time_between_update": 0, } - telegram = { - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0003", "unit": ""}] - ), - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "006", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0003", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "006", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 2), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 3), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - BELGIUM_MBUS3_METER_READING2: MBusObject( - BELGIUM_MBUS3_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 3), [ {"value": datetime.datetime.fromtimestamp(1551642217)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS4_METER_READING1: MBusObject( - BELGIUM_MBUS4_METER_READING1, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 4), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 4), [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - } + "MBUS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -877,20 +1044,32 @@ async def test_belgian_meter_mbus(hass: HomeAssistant, dsmr_connection_fixture) active_tariff = hass.states.get("sensor.electricity_meter_active_tariff") assert active_tariff.state == "unknown" - # check if gas consumption mbus2 is parsed correctly + # check if gas consumption mbus1 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption") assert gas_consumption is None - # check if water usage mbus3 is parsed correctly - water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") - assert water_consumption is None - - # check if gas consumption mbus4 is parsed correctly + # check if gas consumption mbus2 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption_2") assert gas_consumption is None - # check if gas consumption mbus4 is parsed correctly + # check if water usage mbus3 is parsed correctly water_consumption = hass.states.get("sensor.water_meter_water_consumption") + assert water_consumption + assert water_consumption.state == "12.12" + assert ( + water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER + ) + assert ( + water_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + water_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) + + # check if gas consumption mbus4 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") assert water_consumption.state == "13.13" assert ( water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER @@ -905,7 +1084,9 @@ async def test_belgian_meter_mbus(hass: HomeAssistant, dsmr_connection_fixture) ) -async def test_belgian_meter_low(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_belgian_meter_low( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """Test if Belgian meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -919,11 +1100,12 @@ async def test_belgian_meter_low(hass: HomeAssistant, dsmr_connection_fixture) - "time_between_update": 0, } - telegram = { - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0002", "unit": ""}] - ) - } + telegram = Telegram() + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0002", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -951,7 +1133,9 @@ async def test_belgian_meter_low(hass: HomeAssistant, dsmr_connection_fixture) - assert active_tariff.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None -async def test_swedish_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_swedish_meter( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """Test if v5 meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -965,16 +1149,23 @@ async def test_swedish_meter(hass: HomeAssistant, dsmr_connection_fixture) -> No "time_between_update": 0, } - telegram = { - ELECTRICITY_IMPORTED_TOTAL: CosemObject( - ELECTRICITY_IMPORTED_TOTAL, + telegram = Telegram() + telegram.add( + ELECTRICITY_IMPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(123.456), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - ELECTRICITY_EXPORTED_TOTAL: CosemObject( - ELECTRICITY_EXPORTED_TOTAL, + "ELECTRICITY_IMPORTED_TOTAL", + ) + telegram.add( + ELECTRICITY_EXPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(654.321), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - } + "ELECTRICITY_EXPORTED_TOTAL", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -1017,7 +1208,9 @@ async def test_swedish_meter(hass: HomeAssistant, dsmr_connection_fixture) -> No ) -async def test_easymeter(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_easymeter( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """Test if Q3D meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -1031,16 +1224,23 @@ async def test_easymeter(hass: HomeAssistant, dsmr_connection_fixture) -> None: "time_between_update": 0, } - telegram = { - ELECTRICITY_IMPORTED_TOTAL: CosemObject( - ELECTRICITY_IMPORTED_TOTAL, + telegram = Telegram() + telegram.add( + ELECTRICITY_IMPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(54184.6316), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - ELECTRICITY_EXPORTED_TOTAL: CosemObject( - ELECTRICITY_EXPORTED_TOTAL, + "ELECTRICITY_IMPORTED_TOTAL", + ) + telegram.add( + ELECTRICITY_EXPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(19981.1069), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - } + "ELECTRICITY_EXPORTED_TOTAL", + ) mock_entry = MockConfigEntry( domain="dsmr", @@ -1086,7 +1286,9 @@ async def test_easymeter(hass: HomeAssistant, dsmr_connection_fixture) -> None: ) -async def test_tcp(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_tcp( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """If proper config provided TCP connection should be made.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -1112,7 +1314,10 @@ async def test_tcp(hass: HomeAssistant, dsmr_connection_fixture) -> None: assert connection_factory.call_args_list[0][0][1] == "1234" -async def test_rfxtrx_tcp(hass: HomeAssistant, rfxtrx_dsmr_connection_fixture) -> None: +async def test_rfxtrx_tcp( + hass: HomeAssistant, + rfxtrx_dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], +) -> None: """If proper config provided RFXtrx TCP connection should be made.""" (connection_factory, transport, protocol) = rfxtrx_dsmr_connection_fixture @@ -1140,7 +1345,7 @@ async def test_rfxtrx_tcp(hass: HomeAssistant, rfxtrx_dsmr_connection_fixture) - @patch("homeassistant.components.dsmr.sensor.DEFAULT_RECONNECT_INTERVAL", 0) async def test_connection_errors_retry( - hass: HomeAssistant, dsmr_connection_fixture + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] ) -> None: """Connection should be retried on error during setup.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -1177,7 +1382,9 @@ async def test_connection_errors_retry( @patch("homeassistant.components.dsmr.sensor.DEFAULT_RECONNECT_INTERVAL", 0) -async def test_reconnect(hass: HomeAssistant, dsmr_connection_fixture) -> None: +async def test_reconnect( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: """If transport disconnects, the connection should be retried.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -1192,15 +1399,20 @@ async def test_reconnect(hass: HomeAssistant, dsmr_connection_fixture) -> None: "time_between_update": 0, } - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) # mock waiting coroutine while connection lasts closed = asyncio.Event() @@ -1255,7 +1467,7 @@ async def test_reconnect(hass: HomeAssistant, dsmr_connection_fixture) -> None: async def test_gas_meter_providing_energy_reading( - hass: HomeAssistant, dsmr_connection_fixture + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] ) -> None: """Test that gas providing energy readings use the correct device class.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -1270,15 +1482,18 @@ async def test_gas_meter_providing_energy_reading( "time_between_update": 0, } - telegram = { - GAS_METER_READING: MBusObject( - GAS_METER_READING, + telegram = Telegram() + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(123.456), "unit": UnitOfEnergy.GIGA_JOULE}, ], ), - } + "GAS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -1304,3 +1519,13 @@ async def test_gas_meter_providing_energy_reading( gas_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.GIGA_JOULE ) + + +def test_all_obis_references_exists() -> None: + """Verify that all attributes exist by name in database.""" + for sensor in SENSORS: + assert hasattr(obis_references, sensor.obis_reference) + + for sensors in SENSORS_MBUS_DEVICE_TYPE.values(): + for sensor in sensors: + assert hasattr(obis_references, sensor.obis_reference) diff --git a/tests/components/dsmr_reader/test_diagnostics.py b/tests/components/dsmr_reader/test_diagnostics.py index 553efd0b38b..793fe1362b0 100644 --- a/tests/components/dsmr_reader/test_diagnostics.py +++ b/tests/components/dsmr_reader/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.dsmr_reader.const import DOMAIN from homeassistant.core import HomeAssistant @@ -36,4 +37,4 @@ async def test_get_config_entry_diagnostics( diagnostics = await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) - assert diagnostics == snapshot + assert diagnostics == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/duotecno/conftest.py b/tests/components/duotecno/conftest.py index 1b6ba8f65e5..1bdd26bab9c 100644 --- a/tests/components/duotecno/conftest.py +++ b/tests/components/duotecno/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the duotecno tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/dwd_weather_warnings/conftest.py b/tests/components/dwd_weather_warnings/conftest.py index 40c8bf3cfa0..50c0fe51024 100644 --- a/tests/components/dwd_weather_warnings/conftest.py +++ b/tests/components/dwd_weather_warnings/conftest.py @@ -1,9 +1,9 @@ """Configuration for Deutscher Wetterdienst (DWD) Weather Warnings tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.dwd_weather_warnings.const import ( ADVANCE_WARNING_SENSOR, diff --git a/tests/components/easyenergy/conftest.py b/tests/components/easyenergy/conftest.py index 96d356b8906..ffe0e36f3d2 100644 --- a/tests/components/easyenergy/conftest.py +++ b/tests/components/easyenergy/conftest.py @@ -1,11 +1,11 @@ """Fixtures for easyEnergy integration tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from easyenergy import Electricity, Gas import pytest -from typing_extensions import Generator from homeassistant.components.easyenergy.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/ecobee/conftest.py b/tests/components/ecobee/conftest.py index d9583e15986..01f249bea15 100644 --- a/tests/components/ecobee/conftest.py +++ b/tests/components/ecobee/conftest.py @@ -1,10 +1,10 @@ """Fixtures for tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from requests_mock import Mocker -from typing_extensions import Generator from homeassistant.components.ecobee import ECOBEE_API_KEY, ECOBEE_REFRESH_TOKEN diff --git a/tests/components/ecobee/test_climate.py b/tests/components/ecobee/test_climate.py index ae53132fe46..1c9dcec0ad2 100644 --- a/tests/components/ecobee/test_climate.py +++ b/tests/components/ecobee/test_climate.py @@ -195,7 +195,7 @@ async def test_hvac_mode(ecobee_fixture, thermostat) -> None: async def test_hvac_modes(thermostat) -> None: """Test operation list property.""" - assert ["heat_cool", "heat", "cool", "off"] == thermostat.hvac_modes + assert thermostat.hvac_modes == ["heat_cool", "heat", "cool", "off"] async def test_hvac_mode2(ecobee_fixture, thermostat) -> None: @@ -208,51 +208,51 @@ async def test_hvac_mode2(ecobee_fixture, thermostat) -> None: async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: """Test device state attributes property.""" ecobee_fixture["equipmentStatus"] = "heatPump2" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "heatPump2", - } == thermostat.extra_state_attributes + } ecobee_fixture["equipmentStatus"] = "auxHeat2" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "auxHeat2", - } == thermostat.extra_state_attributes + } ecobee_fixture["equipmentStatus"] = "compCool1" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "compCool1", - } == thermostat.extra_state_attributes + } ecobee_fixture["equipmentStatus"] = "" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "", - } == thermostat.extra_state_attributes + } ecobee_fixture["equipmentStatus"] = "Unknown" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "Unknown", - } == thermostat.extra_state_attributes + } ecobee_fixture["program"]["currentClimateRef"] = "c2" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate2", "fan_min_on_time": 10, "equipment_running": "Unknown", - } == thermostat.extra_state_attributes + } async def test_is_aux_heat_on(hass: HomeAssistant) -> None: diff --git a/tests/components/ecoforest/conftest.py b/tests/components/ecoforest/conftest.py index 3eb13e58aee..85bfff08bdf 100644 --- a/tests/components/ecoforest/conftest.py +++ b/tests/components/ecoforest/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Ecoforest tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from pyecoforest.models.device import Alarm, Device, OperationMode, State import pytest -from typing_extensions import Generator from homeassistant.components.ecoforest import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/ecovacs/conftest.py b/tests/components/ecovacs/conftest.py index 8d0033a6bc9..22039d6c0bc 100644 --- a/tests/components/ecovacs/conftest.py +++ b/tests/components/ecovacs/conftest.py @@ -1,5 +1,6 @@ """Common fixtures for the Ecovacs tests.""" +from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -9,7 +10,7 @@ from deebot_client.device import Device from deebot_client.exceptions import ApiError from deebot_client.models import Credentials import pytest -from typing_extensions import AsyncGenerator, Generator +from sucks import EventEmitter from homeassistant.components.ecovacs import PLATFORMS from homeassistant.components.ecovacs.const import DOMAIN @@ -117,6 +118,27 @@ def mock_mqtt_client(mock_authenticator: Mock) -> Generator[Mock]: yield client +@pytest.fixture +def mock_vacbot(device_fixture: str) -> Generator[Mock]: + """Mock the legacy VacBot.""" + with patch( + "homeassistant.components.ecovacs.controller.VacBot", + autospec=True, + ) as mock: + vacbot = mock.return_value + vacbot.vacuum = load_json_object_fixture( + f"devices/{device_fixture}/device.json", DOMAIN + ) + vacbot.statusEvents = EventEmitter() + vacbot.batteryEvents = EventEmitter() + vacbot.lifespanEvents = EventEmitter() + vacbot.errorEvents = EventEmitter() + vacbot.battery_status = None + vacbot.fan_speed = None + vacbot.components = {} + yield vacbot + + @pytest.fixture def mock_device_execute() -> Generator[AsyncMock]: """Mock the device execute function.""" @@ -154,7 +176,7 @@ async def init_integration( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) yield mock_config_entry diff --git a/tests/components/ecovacs/fixtures/devices/123/device.json b/tests/components/ecovacs/fixtures/devices/123/device.json new file mode 100644 index 00000000000..07bdf01b156 --- /dev/null +++ b/tests/components/ecovacs/fixtures/devices/123/device.json @@ -0,0 +1,23 @@ +{ + "did": "E1234567890000000003", + "name": "E1234567890000000003", + "class": "123", + "resource": "atom", + "company": "eco-legacy", + "deviceName": "DEEBOT Slim2 Series", + "icon": "https://portal-ww.ecouser.net/api/pim/file/get/5d2c150dba13eb00013feaae", + "ota": false, + "UILogicId": "ECO_INTL_123", + "materialNo": "110-1639-0102", + "pid": "5cae9b201285190001685977", + "product_category": "DEEBOT", + "model": "Slim2", + "updateInfo": { + "needUpdate": false, + "changeLog": "" + }, + "nick": null, + "homeSort": 9999, + "status": 2, + "otaUpgrade": {} +} diff --git a/tests/components/ecovacs/snapshots/test_button.ambr b/tests/components/ecovacs/snapshots/test_button.ambr index d250a60a35f..efae8896962 100644 --- a/tests/components/ecovacs/snapshots/test_button.ambr +++ b/tests/components/ecovacs/snapshots/test_button.ambr @@ -229,7 +229,7 @@ 'state': '2024-01-01T00:00:00+00:00', }) # --- -# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brushes_lifespan:entity-registry] +# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brush_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -241,7 +241,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': , - 'entity_id': 'button.ozmo_950_reset_side_brushes_lifespan', + 'entity_id': 'button.ozmo_950_reset_side_brush_lifespan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -253,7 +253,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Reset side brushes lifespan', + 'original_name': 'Reset side brush lifespan', 'platform': 'ecovacs', 'previous_unique_id': None, 'supported_features': 0, @@ -262,13 +262,13 @@ 'unit_of_measurement': None, }) # --- -# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brushes_lifespan:state] +# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brush_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Ozmo 950 Reset side brushes lifespan', + 'friendly_name': 'Ozmo 950 Reset side brush lifespan', }), 'context': , - 'entity_id': 'button.ozmo_950_reset_side_brushes_lifespan', + 'entity_id': 'button.ozmo_950_reset_side_brush_lifespan', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/ecovacs/snapshots/test_init.ambr b/tests/components/ecovacs/snapshots/test_init.ambr index f47e747b1cf..9113445cc31 100644 --- a/tests/components/ecovacs/snapshots/test_init.ambr +++ b/tests/components/ecovacs/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': 'Ecovacs', 'model': 'DEEBOT OZMO 950 Series', + 'model_id': 'yna5xi', 'name': 'Ozmo 950', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'E1234567890000000001', 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/ecovacs/snapshots/test_number.ambr b/tests/components/ecovacs/snapshots/test_number.ambr index da8406491b4..c80132784e1 100644 --- a/tests/components/ecovacs/snapshots/test_number.ambr +++ b/tests/components/ecovacs/snapshots/test_number.ambr @@ -1,4 +1,115 @@ # serializer version: 1 +# name: test_number_entities[5xu9h3][number.goat_g1_cut_direction:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 180, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.goat_g1_cut_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cut direction', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cut_direction', + 'unique_id': '8516fbb1-17f1-4194-0000000_cut_direction', + 'unit_of_measurement': '°', + }) +# --- +# name: test_number_entities[5xu9h3][number.goat_g1_cut_direction:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Cut direction', + 'max': 180, + 'min': 0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'number.goat_g1_cut_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45', + }) +# --- +# name: test_number_entities[5xu9h3][number.goat_g1_volume:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.goat_g1_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '8516fbb1-17f1-4194-0000000_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_entities[5xu9h3][number.goat_g1_volume:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Volume', + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.goat_g1_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- # name: test_number_entities[yna5x1][number.ozmo_950_volume:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ecovacs/snapshots/test_sensor.ambr b/tests/components/ecovacs/snapshots/test_sensor.ambr index e2cee3d410f..659edfde2cf 100644 --- a/tests/components/ecovacs/snapshots/test_sensor.ambr +++ b/tests/components/ecovacs/snapshots/test_sensor.ambr @@ -1,4 +1,152 @@ # serializer version: 1 +# name: test_legacy_sensors[123][sensor.e1234567890000000003_filter_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.e1234567890000000003_filter_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Filter lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_filter', + 'unique_id': 'E1234567890000000003_lifespan_filter', + 'unit_of_measurement': '%', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_filter_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'E1234567890000000003 Filter lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.e1234567890000000003_filter_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_main_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.e1234567890000000003_main_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Main brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_main_brush', + 'unique_id': 'E1234567890000000003_lifespan_main_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_main_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'E1234567890000000003 Main brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.e1234567890000000003_main_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_side_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.e1234567890000000003_side_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Side brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_side_brush', + 'unique_id': 'E1234567890000000003_lifespan_side_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_side_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'E1234567890000000003 Side brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.e1234567890000000003_side_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_legacy_sensors[123][states] + list([ + 'sensor.e1234567890000000003_main_brush_lifespan', + 'sensor.e1234567890000000003_side_brush_lifespan', + 'sensor.e1234567890000000003_filter_lifespan', + ]) +# --- # name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -910,7 +1058,7 @@ 'state': '80', }) # --- -# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brush_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -922,7 +1070,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.ozmo_950_side_brushes_lifespan', + 'entity_id': 'sensor.ozmo_950_side_brush_lifespan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -934,7 +1082,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Side brushes lifespan', + 'original_name': 'Side brush lifespan', 'platform': 'ecovacs', 'previous_unique_id': None, 'supported_features': 0, @@ -943,14 +1091,14 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brush_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Ozmo 950 Side brushes lifespan', + 'friendly_name': 'Ozmo 950 Side brush lifespan', 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.ozmo_950_side_brushes_lifespan', + 'entity_id': 'sensor.ozmo_950_side_brush_lifespan', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/ecovacs/test_button.py b/tests/components/ecovacs/test_button.py index 08d53f3e93d..4b3068f6cda 100644 --- a/tests/components/ecovacs/test_button.py +++ b/tests/components/ecovacs/test_button.py @@ -42,7 +42,7 @@ def platforms() -> Platform | list[Platform]: ResetLifeSpan(LifeSpan.FILTER), ), ( - "button.ozmo_950_reset_side_brushes_lifespan", + "button.ozmo_950_reset_side_brush_lifespan", ResetLifeSpan(LifeSpan.SIDE_BRUSH), ), ], @@ -107,7 +107,7 @@ async def test_buttons( [ "button.ozmo_950_reset_main_brush_lifespan", "button.ozmo_950_reset_filter_lifespan", - "button.ozmo_950_reset_side_brushes_lifespan", + "button.ozmo_950_reset_side_brush_lifespan", ], ), ( diff --git a/tests/components/ecovacs/test_config_flow.py b/tests/components/ecovacs/test_config_flow.py index 0a161f88baa..5bf1144db0b 100644 --- a/tests/components/ecovacs/test_config_flow.py +++ b/tests/components/ecovacs/test_config_flow.py @@ -11,28 +11,23 @@ from deebot_client.mqtt_client import create_mqtt_config import pytest from homeassistant.components.ecovacs.const import ( - CONF_CONTINENT, CONF_OVERRIDE_MQTT_URL, CONF_OVERRIDE_REST_URL, CONF_VERIFY_MQTT_CERTIFICATE, DOMAIN, InstanceMode, ) -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import CONF_COUNTRY, CONF_MODE, CONF_USERNAME -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_MODE, CONF_USERNAME +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir from .const import ( - IMPORT_DATA, VALID_ENTRY_DATA_CLOUD, VALID_ENTRY_DATA_SELF_HOSTED, VALID_ENTRY_DATA_SELF_HOSTED_WITH_VALIDATE_CERT, ) -from tests.common import MockConfigEntry - _USER_STEP_SELF_HOSTED = {CONF_MODE: InstanceMode.SELF_HOSTED} _TEST_FN_AUTH_ARG = "user_input_auth" @@ -303,116 +298,3 @@ async def test_user_flow_self_hosted_error( mock_setup_entry.assert_called() mock_authenticator_authenticate.assert_called() mock_mqtt_client.verify_config.assert_called() - - -async def test_import_flow( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_setup_entry: AsyncMock, - mock_authenticator_authenticate: AsyncMock, - mock_mqtt_client: Mock, -) -> None: - """Test importing yaml config.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=IMPORT_DATA.copy(), - ) - mock_authenticator_authenticate.assert_called() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == VALID_ENTRY_DATA_CLOUD[CONF_USERNAME] - assert result["data"] == VALID_ENTRY_DATA_CLOUD - assert (HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}") in issue_registry.issues - mock_setup_entry.assert_called() - mock_mqtt_client.verify_config.assert_called() - - -async def test_import_flow_already_configured( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test importing yaml config where entry already configured.""" - entry = MockConfigEntry(domain=DOMAIN, data=VALID_ENTRY_DATA_CLOUD) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=IMPORT_DATA.copy(), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert (HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}") in issue_registry.issues - - -@pytest.mark.parametrize("show_advanced_options", [True, False]) -@pytest.mark.parametrize( - ("side_effect", "reason"), - [ - (ClientError, "cannot_connect"), - (InvalidAuthenticationError, "invalid_auth"), - (Exception, "unknown"), - ], -) -async def test_import_flow_error( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_authenticator_authenticate: AsyncMock, - mock_mqtt_client: Mock, - side_effect: Exception, - reason: str, - show_advanced_options: bool, -) -> None: - """Test handling invalid connection.""" - mock_authenticator_authenticate.side_effect = side_effect - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_IMPORT, - "show_advanced_options": show_advanced_options, - }, - data=IMPORT_DATA.copy(), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - assert ( - DOMAIN, - f"deprecated_yaml_import_issue_{reason}", - ) in issue_registry.issues - mock_authenticator_authenticate.assert_called() - - -@pytest.mark.parametrize("show_advanced_options", [True, False]) -@pytest.mark.parametrize( - ("reason", "user_input"), - [ - ("invalid_country_length", IMPORT_DATA | {CONF_COUNTRY: "too_long"}), - ("invalid_country_length", IMPORT_DATA | {CONF_COUNTRY: "a"}), # too short - ("invalid_continent_length", IMPORT_DATA | {CONF_CONTINENT: "too_long"}), - ("invalid_continent_length", IMPORT_DATA | {CONF_CONTINENT: "a"}), # too short - ("continent_not_match", IMPORT_DATA | {CONF_CONTINENT: "AA"}), - ], -) -async def test_import_flow_invalid_data( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - reason: str, - user_input: dict[str, Any], - show_advanced_options: bool, -) -> None: - """Test handling invalid connection.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_IMPORT, - "show_advanced_options": show_advanced_options, - }, - data=user_input, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - assert ( - DOMAIN, - f"deprecated_yaml_import_issue_{reason}", - ) in issue_registry.issues diff --git a/tests/components/ecovacs/test_diagnostics.py b/tests/components/ecovacs/test_diagnostics.py index b025db43cc0..6e4dcd5f677 100644 --- a/tests/components/ecovacs/test_diagnostics.py +++ b/tests/components/ecovacs/test_diagnostics.py @@ -28,4 +28,4 @@ async def test_diagnostics( """Test diagnostics.""" assert await get_diagnostics_for_config_entry( hass, hass_client, init_integration - ) == snapshot(exclude=props("entry_id")) + ) == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/ecovacs/test_init.py b/tests/components/ecovacs/test_init.py index 27d00a2d023..2185ae4c9eb 100644 --- a/tests/components/ecovacs/test_init.py +++ b/tests/components/ecovacs/test_init.py @@ -1,7 +1,6 @@ """Test init of ecovacs.""" -from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import Mock, patch from deebot_client.exceptions import DeebotError, InvalidAuthenticationError import pytest @@ -12,9 +11,6 @@ from homeassistant.components.ecovacs.controller import EcovacsController from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.setup import async_setup_component - -from .const import IMPORT_DATA from tests.common import MockConfigEntry @@ -88,32 +84,6 @@ async def test_invalid_auth( assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR -@pytest.mark.parametrize( - ("config", "config_entries_expected"), - [ - ({}, 0), - ({DOMAIN: IMPORT_DATA.copy()}, 1), - ], - ids=["no_config", "import_config"], -) -async def test_async_setup_import( - hass: HomeAssistant, - config: dict[str, Any], - config_entries_expected: int, - mock_setup_entry: AsyncMock, - mock_authenticator_authenticate: AsyncMock, - mock_mqtt_client: Mock, -) -> None: - """Test async_setup config import.""" - assert len(hass.config_entries.async_entries(DOMAIN)) == 0 - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(DOMAIN)) == config_entries_expected - assert mock_setup_entry.call_count == config_entries_expected - assert mock_authenticator_authenticate.call_count == config_entries_expected - assert mock_mqtt_client.verify_config.call_count == config_entries_expected - - async def test_devices_in_dr( device_registry: dr.DeviceRegistry, controller: EcovacsController, @@ -129,12 +99,15 @@ async def test_devices_in_dr( assert device_entry == snapshot(name=device.device_info["did"]) -@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_vacbot", "init_integration" +) @pytest.mark.parametrize( ("device_fixture", "entities"), [ ("yna5x1", 26), - ("5xu9h3", 24), + ("5xu9h3", 25), + ("123", 1), ], ) async def test_all_entities_loaded( diff --git a/tests/components/ecovacs/test_number.py b/tests/components/ecovacs/test_number.py index d444d6510a8..a735863d40a 100644 --- a/tests/components/ecovacs/test_number.py +++ b/tests/components/ecovacs/test_number.py @@ -3,8 +3,8 @@ from dataclasses import dataclass from deebot_client.command import Command -from deebot_client.commands.json import SetVolume -from deebot_client.events import Event, VolumeEvent +from deebot_client.commands.json import SetCutDirection, SetVolume +from deebot_client.events import CutDirectionEvent, Event, VolumeEvent import pytest from syrupy import SnapshotAssertion @@ -53,8 +53,23 @@ class NumberTestCase: ), ], ), + ( + "5xu9h3", + [ + NumberTestCase( + "number.goat_g1_volume", VolumeEvent(3, 11), "3", 7, SetVolume(7) + ), + NumberTestCase( + "number.goat_g1_cut_direction", + CutDirectionEvent(45), + "45", + 97, + SetCutDirection(97), + ), + ], + ), ], - ids=["yna5x1"], + ids=["yna5x1", "5xu9h3"], ) async def test_number_entities( hass: HomeAssistant, @@ -107,8 +122,12 @@ async def test_number_entities( "yna5x1", ["number.ozmo_950_volume"], ), + ( + "5xu9h3", + ["number.goat_g1_cut_direction", "number.goat_g1_volume"], + ), ], - ids=["yna5x1"], + ids=["yna5x1", "5xu9h3"], ) async def test_disabled_by_default_number_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, entity_ids: list[str] @@ -125,6 +144,7 @@ async def test_disabled_by_default_number_entities( @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize(("device_fixture"), ["yna5x1"]) async def test_volume_maximum( hass: HomeAssistant, controller: EcovacsController, diff --git a/tests/components/ecovacs/test_sensor.py b/tests/components/ecovacs/test_sensor.py index 005d10bffbd..53c57999776 100644 --- a/tests/components/ecovacs/test_sensor.py +++ b/tests/components/ecovacs/test_sensor.py @@ -1,5 +1,7 @@ """Tests for Ecovacs sensors.""" +from unittest.mock import Mock + from deebot_client.event_bus import EventBus from deebot_client.events import ( BatteryEvent, @@ -64,7 +66,7 @@ async def notify_events(hass: HomeAssistant, event_bus: EventBus): "sensor.ozmo_950_wi_fi_ssid", "sensor.ozmo_950_main_brush_lifespan", "sensor.ozmo_950_filter_lifespan", - "sensor.ozmo_950_side_brushes_lifespan", + "sensor.ozmo_950_side_brush_lifespan", "sensor.ozmo_950_error", ], ), @@ -152,3 +154,34 @@ async def test_disabled_by_default_sensors( ), f"Entity registry entry for {entity_id} is missing" assert entry.disabled assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_vacbot", "init_integration" +) +@pytest.mark.parametrize(("device_fixture"), ["123"]) +async def test_legacy_sensors( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_vacbot: Mock, +) -> None: + """Test that sensor entity snapshots match.""" + mock_vacbot.components = {"main_brush": 0.8, "side_brush": 0.6, "filter": 0.4} + mock_vacbot.lifespanEvents.notify("dummy_data") + await hass.async_block_till_done(wait_background_tasks=True) + + states = hass.states.async_entity_ids() + assert snapshot(name="states") == states + + for entity_id in hass.states.async_entity_ids(): + assert (state := hass.states.get(entity_id)), f"State of {entity_id} is missing" + assert snapshot(name=f"{entity_id}:state") == state + + assert (entity_entry := entity_registry.async_get(state.entity_id)) + assert snapshot(name=f"{entity_id}:entity-registry") == entity_entry + + assert entity_entry.device_id + assert (device_entry := device_registry.async_get(entity_entry.device_id)) + assert device_entry.identifiers == {(DOMAIN, "E1234567890000000003")} diff --git a/tests/components/ecovacs/test_services.py b/tests/components/ecovacs/test_services.py index 973c63782ec..6fd10cde6d9 100644 --- a/tests/components/ecovacs/test_services.py +++ b/tests/components/ecovacs/test_services.py @@ -16,9 +16,7 @@ pytestmark = [pytest.mark.usefixtures("init_integration")] @pytest.fixture -def mock_device_execute_response( - data: dict[str, Any], -) -> Generator[dict[str, Any], None, None]: +def mock_device_execute_response(data: dict[str, Any]) -> Generator[dict[str, Any]]: """Mock the device execute function response.""" response = { @@ -73,7 +71,7 @@ def mock_device_execute_response( ) async def test_get_positions_service( hass: HomeAssistant, - mock_device_execute_response: dict[str], + mock_device_execute_response: dict[str, Any], entity_id: str, ) -> None: """Test that get_positions service response snapshots match.""" diff --git a/tests/components/edl21/conftest.py b/tests/components/edl21/conftest.py index b6af4ea9cef..1b14e3366d8 100644 --- a/tests/components/edl21/conftest.py +++ b/tests/components/edl21/conftest.py @@ -1,9 +1,9 @@ """Define test fixtures for EDL21.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/electric_kiwi/conftest.py b/tests/components/electric_kiwi/conftest.py index c9f9c7e04f0..010efcb7b5f 100644 --- a/tests/components/electric_kiwi/conftest.py +++ b/tests/components/electric_kiwi/conftest.py @@ -2,13 +2,12 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from time import time from unittest.mock import AsyncMock, patch from electrickiwi_api.model import AccountBalance, Hop, HopIntervals import pytest -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/elevenlabs/__init__.py b/tests/components/elevenlabs/__init__.py new file mode 100644 index 00000000000..261286f04f7 --- /dev/null +++ b/tests/components/elevenlabs/__init__.py @@ -0,0 +1 @@ +"""Tests for the ElevenLabs integration.""" diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py new file mode 100644 index 00000000000..c4d9a87b5ad --- /dev/null +++ b/tests/components/elevenlabs/conftest.py @@ -0,0 +1,65 @@ +"""Common fixtures for the ElevenLabs text-to-speech tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from elevenlabs.core import ApiError +from elevenlabs.types import GetVoicesResponse +import pytest + +from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE +from homeassistant.const import CONF_API_KEY + +from .const import MOCK_MODELS, MOCK_VOICES + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.elevenlabs.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_async_client() -> Generator[AsyncMock]: + """Override async ElevenLabs client.""" + client_mock = AsyncMock() + client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) + client_mock.models.get_all.return_value = MOCK_MODELS + with patch( + "elevenlabs.client.AsyncElevenLabs", return_value=client_mock + ) as mock_async_client: + yield mock_async_client + + +@pytest.fixture +def mock_async_client_fail() -> Generator[AsyncMock]: + """Override async ElevenLabs client.""" + with patch( + "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", + return_value=AsyncMock(), + ) as mock_async_client: + mock_async_client.side_effect = ApiError + yield mock_async_client + + +@pytest.fixture +def mock_entry() -> MockConfigEntry: + """Mock a config entry.""" + entry = MockConfigEntry( + domain="elevenlabs", + data={ + CONF_API_KEY: "api_key", + }, + options={CONF_MODEL: "model1", CONF_VOICE: "voice1"}, + ) + entry.models = { + "model1": "model1", + } + + entry.voices = {"voice1": "voice1"} + return entry diff --git a/tests/components/elevenlabs/const.py b/tests/components/elevenlabs/const.py new file mode 100644 index 00000000000..e16e1fd1334 --- /dev/null +++ b/tests/components/elevenlabs/const.py @@ -0,0 +1,52 @@ +"""Constants for the Testing of the ElevenLabs text-to-speech integration.""" + +from elevenlabs.types import LanguageResponse, Model, Voice + +from homeassistant.components.elevenlabs.const import DEFAULT_MODEL + +MOCK_VOICES = [ + Voice( + voice_id="voice1", + name="Voice 1", + ), + Voice( + voice_id="voice2", + name="Voice 2", + ), +] + +MOCK_MODELS = [ + Model( + model_id="model1", + name="Model 1", + can_do_text_to_speech=True, + languages=[ + LanguageResponse(language_id="en", name="English"), + LanguageResponse(language_id="de", name="German"), + LanguageResponse(language_id="es", name="Spanish"), + LanguageResponse(language_id="ja", name="Japanese"), + ], + ), + Model( + model_id="model2", + name="Model 2", + can_do_text_to_speech=True, + languages=[ + LanguageResponse(language_id="en", name="English"), + LanguageResponse(language_id="de", name="German"), + LanguageResponse(language_id="es", name="Spanish"), + LanguageResponse(language_id="ja", name="Japanese"), + ], + ), + Model( + model_id=DEFAULT_MODEL, + name=DEFAULT_MODEL, + can_do_text_to_speech=True, + languages=[ + LanguageResponse(language_id="en", name="English"), + LanguageResponse(language_id="de", name="German"), + LanguageResponse(language_id="es", name="Spanish"), + LanguageResponse(language_id="ja", name="Japanese"), + ], + ), +] diff --git a/tests/components/elevenlabs/test_config_flow.py b/tests/components/elevenlabs/test_config_flow.py new file mode 100644 index 00000000000..853c49d48ff --- /dev/null +++ b/tests/components/elevenlabs/test_config_flow.py @@ -0,0 +1,94 @@ +"""Test the ElevenLabs text-to-speech config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.elevenlabs.const import ( + CONF_MODEL, + CONF_VOICE, + DEFAULT_MODEL, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_user_step( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_async_client: AsyncMock, +) -> None: + """Test user step create entry result.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "api_key", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "ElevenLabs" + assert result["data"] == { + "api_key": "api_key", + } + assert result["options"] == {CONF_MODEL: DEFAULT_MODEL, CONF_VOICE: "voice1"} + + mock_setup_entry.assert_called_once() + + +async def test_invalid_api_key( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_async_client_fail: AsyncMock +) -> None: + """Test user step with invalid api key.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "api_key", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] + + mock_setup_entry.assert_not_called() + + +async def test_options_flow_init( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_async_client: AsyncMock, + mock_entry: MockConfigEntry, +) -> None: + """Test options flow init.""" + mock_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(mock_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_MODEL: "model1", CONF_VOICE: "voice1"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_entry.options == {CONF_MODEL: "model1", CONF_VOICE: "voice1"} + + mock_setup_entry.assert_called_once() diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py new file mode 100644 index 00000000000..8b14ab26487 --- /dev/null +++ b/tests/components/elevenlabs/test_tts.py @@ -0,0 +1,270 @@ +"""Tests for the ElevenLabs TTS entity.""" + +from __future__ import annotations + +from http import HTTPStatus +from pathlib import Path +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +from elevenlabs.core import ApiError +from elevenlabs.types import GetVoicesResponse +import pytest + +from homeassistant.components import tts +from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE, DOMAIN +from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + DOMAIN as DOMAIN_MP, + SERVICE_PLAY_MEDIA, +) +from homeassistant.config import async_process_ha_core_config +from homeassistant.const import ATTR_ENTITY_ID, CONF_API_KEY +from homeassistant.core import HomeAssistant, ServiceCall + +from .const import MOCK_MODELS, MOCK_VOICES + +from tests.common import MockConfigEntry, async_mock_service +from tests.components.tts.common import retrieve_media +from tests.typing import ClientSessionGenerator + + +@pytest.fixture(autouse=True) +def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: + """Mock writing tags.""" + + +@pytest.fixture(autouse=True) +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: + """Mock the TTS cache dir with empty dir.""" + + +@pytest.fixture +async def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Mock media player calls.""" + return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) + + +@pytest.fixture(autouse=True) +async def setup_internal_url(hass: HomeAssistant) -> None: + """Set up internal url.""" + await async_process_ha_core_config( + hass, {"internal_url": "http://example.local:8123"} + ) + + +@pytest.fixture(name="setup") +async def setup_fixture( + hass: HomeAssistant, + config_data: dict[str, Any], + config_options: dict[str, Any], + request: pytest.FixtureRequest, + mock_async_client: AsyncMock, +) -> AsyncMock: + """Set up the test environment.""" + if request.param == "mock_config_entry_setup": + await mock_config_entry_setup(hass, config_data, config_options) + else: + raise RuntimeError("Invalid setup fixture") + + await hass.async_block_till_done() + return mock_async_client + + +@pytest.fixture(name="config_data") +def config_data_fixture() -> dict[str, Any]: + """Return config data.""" + return {} + + +@pytest.fixture(name="config_options") +def config_options_fixture() -> dict[str, Any]: + """Return config options.""" + return {} + + +async def mock_config_entry_setup( + hass: HomeAssistant, config_data: dict[str, Any], config_options: dict[str, Any] +) -> None: + """Mock config entry setup.""" + default_config_data = { + CONF_API_KEY: "api_key", + } + default_config_options = { + CONF_VOICE: "voice1", + CONF_MODEL: "model1", + } + config_entry = MockConfigEntry( + domain=DOMAIN, + data=default_config_data | config_data, + options=default_config_options | config_options, + ) + config_entry.add_to_hass(hass) + client_mock = AsyncMock() + client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) + client_mock.models.get_all.return_value = MOCK_MODELS + with patch( + "homeassistant.components.elevenlabs.AsyncElevenLabs", return_value=client_mock + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + + +@pytest.mark.parametrize( + "config_data", + [ + {}, + {tts.CONF_LANG: "de"}, + {tts.CONF_LANG: "en"}, + {tts.CONF_LANG: "ja"}, + {tts.CONF_LANG: "es"}, + ], +) +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice2"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test tts service.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.OK + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", voice="voice2", model="model1" + ) + + +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_LANGUAGE: "de", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, + }, + ), + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_LANGUAGE: "es", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak_lang_config( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test service call say with other langcodes in the config.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.OK + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", voice="voice1", model="model1" + ) + + +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak_error( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test service call say with http response 400.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + tts_entity._client.generate.side_effect = ApiError + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.NOT_FOUND + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", voice="voice1", model="model1" + ) diff --git a/tests/components/elgato/conftest.py b/tests/components/elgato/conftest.py index aaaed0dc8da..73b09421576 100644 --- a/tests/components/elgato/conftest.py +++ b/tests/components/elgato/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Elgato integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from elgato import BatteryInfo, ElgatoNoBatteryError, Info, Settings, State import pytest -from typing_extensions import Generator from homeassistant.components.elgato.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT diff --git a/tests/components/elgato/snapshots/test_button.ambr b/tests/components/elgato/snapshots/test_button.ambr index e7477540f46..dcf9d1c87d0 100644 --- a/tests/components/elgato/snapshots/test_button.ambr +++ b/tests/components/elgato/snapshots/test_button.ambr @@ -72,8 +72,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GW24L1A02987', 'suggested_area': None, 'sw_version': '1.0.4 (229)', @@ -153,8 +155,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GW24L1A02987', 'suggested_area': None, 'sw_version': '1.0.4 (229)', diff --git a/tests/components/elgato/snapshots/test_light.ambr b/tests/components/elgato/snapshots/test_light.ambr index e2f663d294b..c3ab076ded2 100644 --- a/tests/components/elgato/snapshots/test_light.ambr +++ b/tests/components/elgato/snapshots/test_light.ambr @@ -104,8 +104,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'CN11A1A00001', 'suggested_area': None, 'sw_version': '1.0.3 (192)', @@ -219,8 +221,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Light Strip', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'CN11A1A00001', 'suggested_area': None, 'sw_version': '1.0.3 (192)', @@ -334,8 +338,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Light Strip', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'CN11A1A00001', 'suggested_area': None, 'sw_version': '1.0.3 (192)', diff --git a/tests/components/elgato/snapshots/test_sensor.ambr b/tests/components/elgato/snapshots/test_sensor.ambr index 2b52d6b9f23..be0ec0a56c5 100644 --- a/tests/components/elgato/snapshots/test_sensor.ambr +++ b/tests/components/elgato/snapshots/test_sensor.ambr @@ -79,8 +79,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GW24L1A02987', 'suggested_area': None, 'sw_version': '1.0.4 (229)', @@ -170,8 +172,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GW24L1A02987', 'suggested_area': None, 'sw_version': '1.0.4 (229)', @@ -261,8 +265,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GW24L1A02987', 'suggested_area': None, 'sw_version': '1.0.4 (229)', @@ -349,8 +355,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GW24L1A02987', 'suggested_area': None, 'sw_version': '1.0.4 (229)', @@ -440,8 +448,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GW24L1A02987', 'suggested_area': None, 'sw_version': '1.0.4 (229)', diff --git a/tests/components/elgato/snapshots/test_switch.ambr b/tests/components/elgato/snapshots/test_switch.ambr index 41f3a8f3aaf..ba95160d28a 100644 --- a/tests/components/elgato/snapshots/test_switch.ambr +++ b/tests/components/elgato/snapshots/test_switch.ambr @@ -71,8 +71,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GW24L1A02987', 'suggested_area': None, 'sw_version': '1.0.4 (229)', @@ -151,8 +153,10 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GW24L1A02987', 'suggested_area': None, 'sw_version': '1.0.4 (229)', diff --git a/tests/components/elmax/conftest.py b/tests/components/elmax/conftest.py index 552aa138f1b..f92fc2f1827 100644 --- a/tests/components/elmax/conftest.py +++ b/tests/components/elmax/conftest.py @@ -1,5 +1,6 @@ """Configuration for Elmax tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, patch @@ -12,7 +13,6 @@ from elmax_api.constants import ( from httpx import Response import pytest import respx -from typing_extensions import Generator from . import ( MOCK_DIRECT_HOST, diff --git a/tests/components/elmax/snapshots/test_alarm_control_panel.ambr b/tests/components/elmax/snapshots/test_alarm_control_panel.ambr index f09ba6752c5..f175fc707bb 100644 --- a/tests/components/elmax/snapshots/test_alarm_control_panel.ambr +++ b/tests/components/elmax/snapshots/test_alarm_control_panel.ambr @@ -46,7 +46,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disarmed', }) # --- # name: test_alarm_control_panels[alarm_control_panel.direct_panel_https_1_1_1_1_443_api_v2_area_2-entry] @@ -96,7 +96,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disarmed', }) # --- # name: test_alarm_control_panels[alarm_control_panel.direct_panel_https_1_1_1_1_443_api_v2_area_3-entry] @@ -146,6 +146,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disarmed', }) # --- diff --git a/tests/components/elmax/test_alarm_control_panel.py b/tests/components/elmax/test_alarm_control_panel.py index 6e4f09710fc..76dc8845662 100644 --- a/tests/components/elmax/test_alarm_control_panel.py +++ b/tests/components/elmax/test_alarm_control_panel.py @@ -1,9 +1,11 @@ """Tests for the Elmax alarm control panels.""" +from datetime import timedelta from unittest.mock import patch from syrupy import SnapshotAssertion +from homeassistant.components.elmax import POLLING_SECONDS from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -12,6 +14,8 @@ from . import init_integration from tests.common import snapshot_platform +WAIT = timedelta(seconds=POLLING_SECONDS) + async def test_alarm_control_panels( hass: HomeAssistant, diff --git a/tests/components/elvia/conftest.py b/tests/components/elvia/conftest.py index 0708e5c698a..13955db49d5 100644 --- a/tests/components/elvia/conftest.py +++ b/tests/components/elvia/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Elvia tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/emoncms/__init__.py b/tests/components/emoncms/__init__.py new file mode 100644 index 00000000000..ecf3c54e9ed --- /dev/null +++ b/tests/components/emoncms/__init__.py @@ -0,0 +1 @@ +"""Tests for the emoncms component.""" diff --git a/tests/components/emoncms/conftest.py b/tests/components/emoncms/conftest.py new file mode 100644 index 00000000000..500fff228e9 --- /dev/null +++ b/tests/components/emoncms/conftest.py @@ -0,0 +1,47 @@ +"""Fixtures for emoncms integration tests.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +import pytest + +UNITS = ["kWh", "Wh", "W", "V", "A", "VA", "°C", "°F", "K", "Hz", "hPa", ""] + + +def get_feed( + number: int, unit: str = "W", value: int = 18.04, timestamp: int = 1665509570 +): + """Generate feed details.""" + return { + "id": str(number), + "userid": "1", + "name": f"parameter {number}", + "tag": "tag", + "size": "35809224", + "unit": unit, + "time": timestamp, + "value": value, + } + + +FEEDS = [get_feed(i + 1, unit=unit) for i, unit in enumerate(UNITS)] + + +EMONCMS_FAILURE = {"success": False, "message": "failure"} + + +@pytest.fixture +async def emoncms_client() -> AsyncGenerator[AsyncMock]: + """Mock pyemoncms success response.""" + with ( + patch( + "homeassistant.components.emoncms.sensor.EmoncmsClient", autospec=True + ) as mock_client, + patch( + "homeassistant.components.emoncms.coordinator.EmoncmsClient", + new=mock_client, + ), + ): + client = mock_client.return_value + client.async_request.return_value = {"success": True, "message": FEEDS} + yield client diff --git a/tests/components/emoncms/snapshots/test_sensor.ambr b/tests/components/emoncms/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..62c85aaba01 --- /dev/null +++ b/tests/components/emoncms/snapshots/test_sensor.ambr @@ -0,0 +1,24 @@ +# serializer version: 1 +# name: test_coordinator_update[sensor.emoncms_parameter_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'FeedId': '1', + 'FeedName': 'parameter 1', + 'LastUpdated': 1665509570, + 'LastUpdatedStr': '2022-10-11T10:32:50-07:00', + 'Size': '35809224', + 'Tag': 'tag', + 'UserId': '1', + 'device_class': 'temperature', + 'friendly_name': 'EmonCMS parameter 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.emoncms_parameter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '18.04', + }) +# --- diff --git a/tests/components/emoncms/test_sensor.py b/tests/components/emoncms/test_sensor.py new file mode 100644 index 00000000000..a039239077e --- /dev/null +++ b/tests/components/emoncms/test_sensor.py @@ -0,0 +1,90 @@ +"""Test emoncms sensor.""" + +from typing import Any +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_PLATFORM, CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers.typing import ConfigType +from homeassistant.setup import async_setup_component + +from .conftest import EMONCMS_FAILURE, FEEDS, get_feed + +from tests.common import async_fire_time_changed + +YAML = { + CONF_PLATFORM: "emoncms", + CONF_API_KEY: "my_api_key", + CONF_ID: 1, + CONF_URL: "http://1.1.1.1", + CONF_ONLY_INCLUDE_FEEDID: [1, 2], + "scan_interval": 30, +} + + +@pytest.fixture +def emoncms_yaml_config() -> ConfigType: + """Mock emoncms configuration from yaml.""" + return {"sensor": YAML} + + +def get_entity_ids(feeds: list[dict[str, Any]]) -> list[str]: + """Get emoncms entity ids.""" + return [ + f"{SENSOR_DOMAIN}.{DOMAIN}_{feed["name"].replace(' ', '_')}" for feed in feeds + ] + + +def get_feeds(nbs: list[int]) -> list[dict[str, Any]]: + """Get feeds.""" + return [feed for feed in FEEDS if feed["id"] in str(nbs)] + + +async def test_coordinator_update( + hass: HomeAssistant, + emoncms_yaml_config: ConfigType, + snapshot: SnapshotAssertion, + emoncms_client: AsyncMock, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator update.""" + emoncms_client.async_request.return_value = { + "success": True, + "message": [get_feed(1, unit="°C")], + } + await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config) + await hass.async_block_till_done() + feeds = get_feeds([1]) + for entity_id in get_entity_ids(feeds): + state = hass.states.get(entity_id) + assert state == snapshot(name=entity_id) + + async def skip_time() -> None: + freezer.tick(60) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + emoncms_client.async_request.return_value = { + "success": True, + "message": [get_feed(1, unit="°C", value=24.04, timestamp=1665509670)], + } + + await skip_time() + + for entity_id in get_entity_ids(feeds): + state = hass.states.get(entity_id) + assert state.attributes["LastUpdated"] == 1665509670 + assert state.state == "24.04" + + emoncms_client.async_request.return_value = EMONCMS_FAILURE + + await skip_time() + + assert f"Error fetching {DOMAIN}_coordinator data" in caplog.text diff --git a/tests/components/emulated_hue/test_hue_api.py b/tests/components/emulated_hue/test_hue_api.py index 4edd52b812d..28e269fdaeb 100644 --- a/tests/components/emulated_hue/test_hue_api.py +++ b/tests/components/emulated_hue/test_hue_api.py @@ -1,11 +1,14 @@ """The tests for the emulated Hue component.""" +from __future__ import annotations + import asyncio +from collections.abc import Generator from datetime import timedelta from http import HTTPStatus from ipaddress import ip_address import json -from unittest.mock import patch +from unittest.mock import AsyncMock, _patch, patch from aiohttp.hdrs import CONTENT_TYPE from aiohttp.test_utils import TestClient @@ -56,6 +59,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util +from homeassistant.util.json import JsonObjectType from tests.common import ( async_fire_time_changed, @@ -104,14 +108,14 @@ ENTITY_IDS_BY_NUMBER = { ENTITY_NUMBERS_BY_ID = {v: k for k, v in ENTITY_IDS_BY_NUMBER.items()} -def patch_upnp(): +def patch_upnp() -> _patch[AsyncMock]: """Patch async_create_upnp_datagram_endpoint.""" return patch( "homeassistant.components.emulated_hue.async_create_upnp_datagram_endpoint" ) -async def async_get_lights(client): +async def async_get_lights(client: TestClient) -> JsonObjectType: """Get lights with the hue client.""" result = await client.get("/api/username/lights") assert result.status == HTTPStatus.OK @@ -131,7 +135,7 @@ async def _async_setup_emulated_hue(hass: HomeAssistant, conf: ConfigType) -> No @pytest.fixture -async def base_setup(hass): +async def base_setup(hass: HomeAssistant) -> None: """Set up homeassistant and http.""" await asyncio.gather( setup.async_setup_component(hass, "homeassistant", {}), @@ -142,7 +146,7 @@ async def base_setup(hass): @pytest.fixture(autouse=True) -async def wanted_platforms_only() -> None: +def wanted_platforms_only() -> Generator[None]: """Enable only the wanted demo platforms.""" with patch( "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", @@ -159,7 +163,7 @@ async def wanted_platforms_only() -> None: @pytest.fixture -async def demo_setup(hass, wanted_platforms_only): +async def demo_setup(hass: HomeAssistant, wanted_platforms_only: None) -> None: """Fixture to setup demo platforms.""" # We need to do this to get access to homeassistant/turn_(on,off) setups = [ @@ -211,7 +215,9 @@ async def demo_setup(hass, wanted_platforms_only): @pytest.fixture -async def hass_hue(hass, base_setup, demo_setup): +async def hass_hue( + hass: HomeAssistant, base_setup: None, demo_setup: None +) -> HomeAssistant: """Set up a Home Assistant instance for these tests.""" await _async_setup_emulated_hue( hass, @@ -245,7 +251,7 @@ def _mock_hue_endpoints( @pytest.fixture async def hue_client( - hass_hue, hass_client_no_auth: ClientSessionGenerator + hass_hue: HomeAssistant, hass_client_no_auth: ClientSessionGenerator ) -> TestClient: """Create web client for emulated hue api.""" _mock_hue_endpoints( @@ -285,7 +291,7 @@ async def hue_client( return await hass_client_no_auth() -async def test_discover_lights(hass: HomeAssistant, hue_client) -> None: +async def test_discover_lights(hass: HomeAssistant, hue_client: TestClient) -> None: """Test the discovery of lights.""" result = await hue_client.get("/api/username/lights") @@ -343,7 +349,8 @@ async def test_discover_lights(hass: HomeAssistant, hue_client) -> None: assert device["state"][HUE_API_STATE_ON] is False -async def test_light_without_brightness_supported(hass_hue, hue_client) -> None: +@pytest.mark.usefixtures("hass_hue") +async def test_light_without_brightness_supported(hue_client: TestClient) -> None: """Test that light without brightness is supported.""" light_without_brightness_json = await perform_get_light_state( hue_client, "light.no_brightness", HTTPStatus.OK @@ -382,7 +389,9 @@ async def test_lights_all_dimmable( ) -async def test_light_without_brightness_can_be_turned_off(hass_hue, hue_client) -> None: +async def test_light_without_brightness_can_be_turned_off( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test that light without brightness can be turned off.""" hass_hue.states.async_set("light.no_brightness", "on", {}) turn_off_calls = [] @@ -417,7 +426,9 @@ async def test_light_without_brightness_can_be_turned_off(hass_hue, hue_client) assert "light.no_brightness" in call.data[ATTR_ENTITY_ID] -async def test_light_without_brightness_can_be_turned_on(hass_hue, hue_client) -> None: +async def test_light_without_brightness_can_be_turned_on( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test that light without brightness can be turned on.""" hass_hue.states.async_set("light.no_brightness", "off", {}) @@ -467,7 +478,9 @@ async def test_light_without_brightness_can_be_turned_on(hass_hue, hue_client) - (const.STATE_UNKNOWN, True), ], ) -async def test_reachable_for_state(hass_hue, hue_client, state, is_reachable) -> None: +async def test_reachable_for_state( + hass_hue: HomeAssistant, hue_client: TestClient, state: str, is_reachable: bool +) -> None: """Test that an entity is reported as unreachable if in unavailable state.""" entity_id = "light.ceiling_lights" @@ -478,7 +491,7 @@ async def test_reachable_for_state(hass_hue, hue_client, state, is_reachable) -> assert state_json["state"]["reachable"] == is_reachable, state_json -async def test_discover_full_state(hue_client) -> None: +async def test_discover_full_state(hue_client: TestClient) -> None: """Test the discovery of full state.""" result = await hue_client.get(f"/api/{HUE_API_USERNAME}") @@ -529,7 +542,7 @@ async def test_discover_full_state(hue_client) -> None: assert config_json["linkbutton"] is True -async def test_discover_config(hue_client) -> None: +async def test_discover_config(hue_client: TestClient) -> None: """Test the discovery of configuration.""" result = await hue_client.get(f"/api/{HUE_API_USERNAME}/config") @@ -587,7 +600,7 @@ async def test_discover_config(hue_client) -> None: assert "error" not in config_json -async def test_get_light_state(hass_hue, hue_client) -> None: +async def test_get_light_state(hass_hue: HomeAssistant, hue_client: TestClient) -> None: """Test the getting of light state.""" # Turn ceiling lights on and set to 127 brightness, and set light color await hass_hue.services.async_call( @@ -648,7 +661,9 @@ async def test_get_light_state(hass_hue, hue_client) -> None: ) -async def test_put_light_state(hass: HomeAssistant, hass_hue, hue_client) -> None: +async def test_put_light_state( + hass: HomeAssistant, hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test the setting of light states.""" await perform_put_test_on_ceiling_lights(hass_hue, hue_client) @@ -818,7 +833,7 @@ async def test_put_light_state(hass: HomeAssistant, hass_hue, hue_client) -> Non async def test_put_light_state_script( - hass: HomeAssistant, hass_hue, hue_client + hass: HomeAssistant, hass_hue: HomeAssistant, hue_client: TestClient ) -> None: """Test the setting of script variables.""" # Turn the kitchen light off first @@ -834,7 +849,7 @@ async def test_put_light_state_script( brightness = round(level * 254 / 100) script_result = await perform_put_light_state( - hass_hue, hue_client, "script.set_kitchen_light", True, brightness + hass_hue, hue_client, "script.set_kitchen_light", True, brightness=brightness ) script_result_json = await script_result.json() @@ -851,13 +866,15 @@ async def test_put_light_state_script( ) -async def test_put_light_state_climate_set_temperature(hass_hue, hue_client) -> None: +async def test_put_light_state_climate_set_temperature( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test setting climate temperature.""" brightness = 19 temperature = round(brightness / 254 * 100) hvac_result = await perform_put_light_state( - hass_hue, hue_client, "climate.hvac", True, brightness + hass_hue, hue_client, "climate.hvac", True, brightness=brightness ) hvac_result_json = await hvac_result.json() @@ -876,7 +893,9 @@ async def test_put_light_state_climate_set_temperature(hass_hue, hue_client) -> assert ecobee_result.status == HTTPStatus.UNAUTHORIZED -async def test_put_light_state_humidifier_set_humidity(hass_hue, hue_client) -> None: +async def test_put_light_state_humidifier_set_humidity( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test setting humidifier target humidity.""" # Turn the humidifier off first await hass_hue.services.async_call( @@ -890,7 +909,7 @@ async def test_put_light_state_humidifier_set_humidity(hass_hue, hue_client) -> humidity = round(brightness / 254 * 100) humidifier_result = await perform_put_light_state( - hass_hue, hue_client, "humidifier.humidifier", True, brightness + hass_hue, hue_client, "humidifier.humidifier", True, brightness=brightness ) humidifier_result_json = await humidifier_result.json() @@ -909,7 +928,9 @@ async def test_put_light_state_humidifier_set_humidity(hass_hue, hue_client) -> assert hygrostat_result.status == HTTPStatus.UNAUTHORIZED -async def test_put_light_state_media_player(hass_hue, hue_client) -> None: +async def test_put_light_state_media_player( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test turning on media player and setting volume.""" # Turn the music player off first await hass_hue.services.async_call( @@ -924,7 +945,7 @@ async def test_put_light_state_media_player(hass_hue, hue_client) -> None: brightness = round(level * 254) mp_result = await perform_put_light_state( - hass_hue, hue_client, "media_player.walkman", True, brightness + hass_hue, hue_client, "media_player.walkman", True, brightness=brightness ) mp_result_json = await mp_result.json() @@ -937,7 +958,9 @@ async def test_put_light_state_media_player(hass_hue, hue_client) -> None: assert walkman.attributes[media_player.ATTR_MEDIA_VOLUME_LEVEL] == level -async def test_open_cover_without_position(hass_hue, hue_client) -> None: +async def test_open_cover_without_position( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test opening cover .""" cover_id = "cover.living_room_window" # Close cover first @@ -1000,7 +1023,9 @@ async def test_open_cover_without_position(hass_hue, hue_client) -> None: assert cover_test_2.attributes.get("current_position") == 0 -async def test_set_position_cover(hass_hue, hue_client) -> None: +async def test_set_position_cover( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test setting position cover .""" cover_id = "cover.living_room_window" cover_number = ENTITY_NUMBERS_BY_ID[cover_id] @@ -1034,7 +1059,7 @@ async def test_set_position_cover(hass_hue, hue_client) -> None: # Go through the API to open cover_result = await perform_put_light_state( - hass_hue, hue_client, cover_id, False, brightness + hass_hue, hue_client, cover_id, False, brightness=brightness ) assert cover_result.status == HTTPStatus.OK @@ -1057,7 +1082,9 @@ async def test_set_position_cover(hass_hue, hue_client) -> None: assert cover_test_2.attributes.get("current_position") == level -async def test_put_light_state_fan(hass_hue, hue_client) -> None: +async def test_put_light_state_fan( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test turning on fan and setting speed.""" # Turn the fan off first await hass_hue.services.async_call( @@ -1072,7 +1099,7 @@ async def test_put_light_state_fan(hass_hue, hue_client) -> None: brightness = round(level * 254 / 100) fan_result = await perform_put_light_state( - hass_hue, hue_client, "fan.living_room_fan", True, brightness + hass_hue, hue_client, "fan.living_room_fan", True, brightness=brightness ) fan_result_json = await fan_result.json() @@ -1166,7 +1193,9 @@ async def test_put_light_state_fan(hass_hue, hue_client) -> None: assert fan_json["state"][HUE_API_STATE_BRI] == 1 -async def test_put_with_form_urlencoded_content_type(hass_hue, hue_client) -> None: +async def test_put_with_form_urlencoded_content_type( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test the form with urlencoded content.""" entity_number = ENTITY_NUMBERS_BY_ID["light.ceiling_lights"] # Needed for Alexa @@ -1185,7 +1214,7 @@ async def test_put_with_form_urlencoded_content_type(hass_hue, hue_client) -> No assert result.status == HTTPStatus.BAD_REQUEST -async def test_entity_not_found(hue_client) -> None: +async def test_entity_not_found(hue_client: TestClient) -> None: """Test for entity which are not found.""" result = await hue_client.get("/api/username/lights/98") @@ -1196,7 +1225,7 @@ async def test_entity_not_found(hue_client) -> None: assert result.status == HTTPStatus.NOT_FOUND -async def test_allowed_methods(hue_client) -> None: +async def test_allowed_methods(hue_client: TestClient) -> None: """Test the allowed methods.""" result = await hue_client.get( "/api/username/lights/ENTITY_NUMBERS_BY_ID[light.ceiling_lights]/state" @@ -1215,7 +1244,7 @@ async def test_allowed_methods(hue_client) -> None: assert result.status == HTTPStatus.METHOD_NOT_ALLOWED -async def test_proper_put_state_request(hue_client) -> None: +async def test_proper_put_state_request(hue_client: TestClient) -> None: """Test the request to set the state.""" # Test proper on value parsing result = await hue_client.put( @@ -1238,7 +1267,7 @@ async def test_proper_put_state_request(hue_client) -> None: assert result.status == HTTPStatus.BAD_REQUEST -async def test_get_empty_groups_state(hue_client) -> None: +async def test_get_empty_groups_state(hue_client: TestClient) -> None: """Test the request to get groups endpoint.""" # Test proper on value parsing result = await hue_client.get("/api/username/groups") @@ -1251,7 +1280,9 @@ async def test_get_empty_groups_state(hue_client) -> None: async def perform_put_test_on_ceiling_lights( - hass_hue, hue_client, content_type=CONTENT_TYPE_JSON + hass_hue: HomeAssistant, + hue_client: TestClient, + content_type: str = CONTENT_TYPE_JSON, ): """Test the setting of a light.""" # Turn the office light off first @@ -1267,7 +1298,12 @@ async def perform_put_test_on_ceiling_lights( # Go through the API to turn it on office_result = await perform_put_light_state( - hass_hue, hue_client, "light.ceiling_lights", True, 56, content_type + hass_hue, + hue_client, + "light.ceiling_lights", + True, + brightness=56, + content_type=content_type, ) assert office_result.status == HTTPStatus.OK @@ -1283,7 +1319,9 @@ async def perform_put_test_on_ceiling_lights( assert ceiling_lights.attributes[light.ATTR_BRIGHTNESS] == 56 -async def perform_get_light_state_by_number(client, entity_number, expected_status): +async def perform_get_light_state_by_number( + client: TestClient, entity_number: int | str, expected_status: HTTPStatus +) -> JsonObjectType | None: """Test the getting of a light state.""" result = await client.get(f"/api/username/lights/{entity_number}") @@ -1297,7 +1335,9 @@ async def perform_get_light_state_by_number(client, entity_number, expected_stat return None -async def perform_get_light_state(client, entity_id, expected_status): +async def perform_get_light_state( + client: TestClient, entity_id: str, expected_status: HTTPStatus +) -> JsonObjectType | None: """Test the getting of a light state.""" entity_number = ENTITY_NUMBERS_BY_ID[entity_id] return await perform_get_light_state_by_number( @@ -1306,18 +1346,19 @@ async def perform_get_light_state(client, entity_id, expected_status): async def perform_put_light_state( - hass_hue, - client, - entity_id, - is_on, - brightness=None, - content_type=CONTENT_TYPE_JSON, - hue=None, - saturation=None, - color_temp=None, - with_state=True, - xy=None, - transitiontime=None, + hass_hue: HomeAssistant, + client: TestClient, + entity_id: str, + is_on: bool, + *, + brightness: int | None = None, + content_type: str = CONTENT_TYPE_JSON, + hue: int | None = None, + saturation: int | None = None, + color_temp: int | None = None, + with_state: bool = True, + xy: tuple[float, float] | None = None, + transitiontime: int | None = None, ): """Test the setting of a light state.""" req_headers = {"Content-Type": content_type} @@ -1353,7 +1394,7 @@ async def perform_put_light_state( return result -async def test_external_ip_blocked(hue_client) -> None: +async def test_external_ip_blocked(hue_client: TestClient) -> None: """Test external IP blocked.""" getUrls = [ "/api/username/groups", @@ -1391,7 +1432,7 @@ async def test_external_ip_blocked(hue_client) -> None: _remote_is_allowed.cache_clear() -async def test_unauthorized_user_blocked(hue_client) -> None: +async def test_unauthorized_user_blocked(hue_client: TestClient) -> None: """Test unauthorized_user blocked.""" getUrls = [ "/api/wronguser", @@ -1405,7 +1446,7 @@ async def test_unauthorized_user_blocked(hue_client) -> None: async def test_put_then_get_cached_properly( - hass: HomeAssistant, hass_hue, hue_client + hass: HomeAssistant, hass_hue: HomeAssistant, hue_client: TestClient ) -> None: """Test the setting of light states and an immediate readback reads the same values.""" @@ -1530,7 +1571,7 @@ async def test_put_then_get_cached_properly( async def test_put_than_get_when_service_call_fails( - hass: HomeAssistant, hass_hue, hue_client + hass: HomeAssistant, hass_hue: HomeAssistant, hue_client: TestClient ) -> None: """Test putting and getting the light state when the service call fails.""" @@ -1581,14 +1622,17 @@ async def test_put_than_get_when_service_call_fails( assert ceiling_json["state"][HUE_API_STATE_ON] is False -async def test_get_invalid_entity(hass: HomeAssistant, hass_hue, hue_client) -> None: +@pytest.mark.usefixtures("hass_hue") +async def test_get_invalid_entity(hue_client: TestClient) -> None: """Test the setting of light states and an immediate readback reads the same values.""" # Check that we get an error with an invalid entity number. await perform_get_light_state_by_number(hue_client, 999, HTTPStatus.NOT_FOUND) -async def test_put_light_state_scene(hass: HomeAssistant, hass_hue, hue_client) -> None: +async def test_put_light_state_scene( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test the setting of scene variables.""" # Turn the kitchen lights off first await hass_hue.services.async_call( @@ -1630,7 +1674,9 @@ async def test_put_light_state_scene(hass: HomeAssistant, hass_hue, hue_client) assert hass_hue.states.get("light.kitchen_lights").state == STATE_OFF -async def test_only_change_contrast(hass: HomeAssistant, hass_hue, hue_client) -> None: +async def test_only_change_contrast( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test when only changing the contrast of a light state.""" # Turn the kitchen lights off first @@ -1661,7 +1707,7 @@ async def test_only_change_contrast(hass: HomeAssistant, hass_hue, hue_client) - async def test_only_change_hue_or_saturation( - hass: HomeAssistant, hass_hue, hue_client + hass_hue: HomeAssistant, hue_client: TestClient ) -> None: """Test setting either the hue or the saturation but not both.""" @@ -1700,8 +1746,9 @@ async def test_only_change_hue_or_saturation( ] == (0, 3) +@pytest.mark.usefixtures("base_setup") async def test_specificly_exposed_entities( - hass: HomeAssistant, base_setup, hass_client_no_auth: ClientSessionGenerator + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator ) -> None: """Test specific entities with expose by default off.""" conf = { @@ -1731,7 +1778,9 @@ async def test_specificly_exposed_entities( assert "1" in result_json -async def test_get_light_state_when_none(hass_hue: HomeAssistant, hue_client) -> None: +async def test_get_light_state_when_none( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test the getting of light state when brightness is None.""" hass_hue.states.async_set( "light.ceiling_lights", diff --git a/tests/components/emulated_hue/test_upnp.py b/tests/components/emulated_hue/test_upnp.py index 3522f7e8047..b16fda536c6 100644 --- a/tests/components/emulated_hue/test_upnp.py +++ b/tests/components/emulated_hue/test_upnp.py @@ -1,6 +1,7 @@ """The tests for the emulated Hue component.""" from asyncio import AbstractEventLoop +from collections.abc import Generator from http import HTTPStatus import json import unittest @@ -10,7 +11,6 @@ from aiohttp import web from aiohttp.test_utils import TestClient import defusedxml.ElementTree as ET import pytest -from typing_extensions import Generator from homeassistant import setup from homeassistant.components import emulated_hue diff --git a/tests/components/energenie_power_sockets/conftest.py b/tests/components/energenie_power_sockets/conftest.py index 64eb8bbd2a8..c142e436fd3 100644 --- a/tests/components/energenie_power_sockets/conftest.py +++ b/tests/components/energenie_power_sockets/conftest.py @@ -1,11 +1,11 @@ """Configure tests for Energenie-Power-Sockets.""" +from collections.abc import Generator from typing import Final from unittest.mock import MagicMock, patch from pyegps.fakes.powerstrip import FakePowerStrip import pytest -from typing_extensions import Generator from homeassistant.components.energenie_power_sockets.const import ( CONF_DEVICE_API_ID, diff --git a/tests/components/energyzero/conftest.py b/tests/components/energyzero/conftest.py index 49f6c18b09e..d42283c0d4b 100644 --- a/tests/components/energyzero/conftest.py +++ b/tests/components/energyzero/conftest.py @@ -1,11 +1,11 @@ """Fixtures for EnergyZero integration tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from energyzero import Electricity, Gas import pytest -from typing_extensions import Generator from homeassistant.components.energyzero.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/energyzero/snapshots/test_sensor.ambr b/tests/components/energyzero/snapshots/test_sensor.ambr index 23b232379df..3a66f25fd32 100644 --- a/tests/components/energyzero/snapshots/test_sensor.ambr +++ b/tests/components/energyzero/snapshots/test_sensor.ambr @@ -62,8 +62,10 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -136,8 +138,10 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -207,8 +211,10 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -278,8 +284,10 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -349,8 +357,10 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -423,8 +433,10 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Gas market price', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/enigma2/conftest.py b/tests/components/enigma2/conftest.py index f879fb327d7..6c024ebf66a 100644 --- a/tests/components/enigma2/conftest.py +++ b/tests/components/enigma2/conftest.py @@ -1,5 +1,7 @@ """Test the Enigma2 config flow.""" +from openwebif.api import OpenWebIfServiceEvent, OpenWebIfStatus + from homeassistant.components.enigma2.const import ( CONF_DEEP_STANDBY, CONF_MAC_ADDRESS, @@ -66,7 +68,11 @@ class MockDevice: mac_address: str | None = "12:34:56:78:90:ab" _base = "http://1.1.1.1" - async def _call_api(self, url: str) -> dict: + def __init__(self) -> None: + """Initialize the mock Enigma2 device.""" + self.status = OpenWebIfStatus(currservice=OpenWebIfServiceEvent()) + + async def _call_api(self, url: str) -> dict | None: if url.endswith("/api/about"): return { "info": { @@ -74,11 +80,14 @@ class MockDevice: { "mac": self.mac_address, } - ] + ], + "model": "Mock Enigma2", + "brand": "Enigma2", } } + return None - def get_version(self): + def get_version(self) -> str | None: """Return the version.""" return None @@ -97,5 +106,8 @@ class MockDevice: ] } + async def update(self) -> None: + """Mock update.""" + async def close(self): """Mock close.""" diff --git a/tests/components/enigma2/test_init.py b/tests/components/enigma2/test_init.py index 93a130eef54..ab19c2ce51a 100644 --- a/tests/components/enigma2/test_init.py +++ b/tests/components/enigma2/test_init.py @@ -15,7 +15,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" with ( patch( - "homeassistant.components.enigma2.OpenWebIfDevice.__new__", + "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", return_value=MockDevice(), ), patch( diff --git a/tests/components/enphase_envoy/__init__.py b/tests/components/enphase_envoy/__init__.py index 6c6293ab76b..f69ab8e44f2 100644 --- a/tests/components/enphase_envoy/__init__.py +++ b/tests/components/enphase_envoy/__init__.py @@ -1 +1,13 @@ """Tests for the Enphase Envoy integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 965af3b40fc..ab6e0e4f097 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -1,51 +1,60 @@ """Define test fixtures for Enphase Envoy.""" +from collections.abc import AsyncGenerator, Generator +from typing import Any from unittest.mock import AsyncMock, Mock, patch import jwt from pyenphase import ( - Envoy, EnvoyData, + EnvoyEncharge, + EnvoyEnchargeAggregate, + EnvoyEnchargePower, + EnvoyEnpower, EnvoyInverter, EnvoySystemConsumption, EnvoySystemProduction, EnvoyTokenAuth, ) -from pyenphase.const import PhaseNames, SupportedFeatures -from pyenphase.models.meters import ( - CtMeterStatus, - CtState, - CtStatusFlags, - CtType, - EnvoyMeterData, - EnvoyPhaseMode, -) +from pyenphase.const import SupportedFeatures +from pyenphase.models.dry_contacts import EnvoyDryContactSettings, EnvoyDryContactStatus +from pyenphase.models.meters import EnvoyMeterData +from pyenphase.models.tariff import EnvoyStorageSettings, EnvoyTariff import pytest from homeassistant.components.enphase_envoy import DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.enphase_envoy.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry @pytest.fixture(name="config_entry") -def config_entry_fixture(hass: HomeAssistant, config, serial_number): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, str] +) -> MockConfigEntry: """Define a config entry fixture.""" - entry = MockConfigEntry( + return MockConfigEntry( domain=DOMAIN, entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title=f"Envoy {serial_number}" if serial_number else "Envoy", - unique_id=serial_number, + title="Envoy 1234", + unique_id="1234", data=config, ) - entry.add_to_hass(hass) - return entry @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, str]: """Define a config entry data fixture.""" return { CONF_HOST: "1.1.1.1", @@ -55,333 +64,173 @@ def config_fixture(): } -@pytest.fixture(name="mock_envoy") -def mock_envoy_fixture( - serial_number, - mock_authenticate, - mock_setup, - mock_auth, -): +@pytest.fixture +async def mock_envoy( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncMock]: """Define a mocked Envoy fixture.""" - mock_envoy = Mock(spec=Envoy) - mock_envoy.serial_number = serial_number - mock_envoy.firmware = "7.1.2" - mock_envoy.part_number = "123456789" - mock_envoy.envoy_model = "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT" - mock_envoy.authenticate = mock_authenticate - mock_envoy.setup = mock_setup - mock_envoy.auth = mock_auth - mock_envoy.supported_features = SupportedFeatures( - SupportedFeatures.INVERTERS - | SupportedFeatures.PRODUCTION - | SupportedFeatures.PRODUCTION - | SupportedFeatures.METERING - | SupportedFeatures.THREEPHASE - | SupportedFeatures.CTMETERS - ) - mock_envoy.phase_mode = EnvoyPhaseMode.THREE - mock_envoy.phase_count = 3 - mock_envoy.active_phase_count = 3 - mock_envoy.ct_meter_count = 3 - mock_envoy.consumption_meter_type = CtType.NET_CONSUMPTION - mock_envoy.production_meter_type = CtType.PRODUCTION - mock_envoy.storage_meter_type = CtType.STORAGE - mock_envoy.data = EnvoyData( - system_consumption=EnvoySystemConsumption( - watt_hours_last_7_days=1234, - watt_hours_lifetime=1234, - watt_hours_today=1234, - watts_now=1234, - ), - system_production=EnvoySystemProduction( - watt_hours_last_7_days=1234, - watt_hours_lifetime=1234, - watt_hours_today=1234, - watts_now=1234, - ), - system_consumption_phases={ - PhaseNames.PHASE_1: EnvoySystemConsumption( - watt_hours_last_7_days=1321, - watt_hours_lifetime=1322, - watt_hours_today=1323, - watts_now=1324, - ), - PhaseNames.PHASE_2: EnvoySystemConsumption( - watt_hours_last_7_days=2321, - watt_hours_lifetime=2322, - watt_hours_today=2323, - watts_now=2324, - ), - PhaseNames.PHASE_3: EnvoySystemConsumption( - watt_hours_last_7_days=3321, - watt_hours_lifetime=3322, - watt_hours_today=3323, - watts_now=3324, - ), - }, - system_production_phases={ - PhaseNames.PHASE_1: EnvoySystemProduction( - watt_hours_last_7_days=1231, - watt_hours_lifetime=1232, - watt_hours_today=1233, - watts_now=1234, - ), - PhaseNames.PHASE_2: EnvoySystemProduction( - watt_hours_last_7_days=2231, - watt_hours_lifetime=2232, - watt_hours_today=2233, - watts_now=2234, - ), - PhaseNames.PHASE_3: EnvoySystemProduction( - watt_hours_last_7_days=3231, - watt_hours_lifetime=3232, - watt_hours_today=3233, - watts_now=3234, - ), - }, - ctmeter_production=EnvoyMeterData( - eid="100000010", - timestamp=1708006110, - energy_delivered=11234, - energy_received=12345, - active_power=100, - power_factor=0.11, - voltage=111, - current=0.2, - frequency=50.1, - state=CtState.ENABLED, - measurement_type=CtType.PRODUCTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[ - CtStatusFlags.PODUCTION_IMBALANCE, - CtStatusFlags.POWER_ON_UNUSED_PHASE, - ], - ), - ctmeter_consumption=EnvoyMeterData( - eid="100000020", - timestamp=1708006120, - energy_delivered=21234, - energy_received=22345, - active_power=101, - power_factor=0.21, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.NET_CONSUMPTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - ctmeter_storage=EnvoyMeterData( - eid="100000030", - timestamp=1708006120, - energy_delivered=31234, - energy_received=32345, - active_power=103, - power_factor=0.23, - voltage=113, - current=0.4, - frequency=50.3, - state=CtState.ENABLED, - measurement_type=CtType.STORAGE, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - ctmeter_production_phases={ - PhaseNames.PHASE_1: EnvoyMeterData( - eid="100000011", - timestamp=1708006111, - energy_delivered=112341, - energy_received=123451, - active_power=20, - power_factor=0.12, - voltage=111, - current=0.2, - frequency=50.1, - state=CtState.ENABLED, - measurement_type=CtType.PRODUCTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[CtStatusFlags.PODUCTION_IMBALANCE], - ), - PhaseNames.PHASE_2: EnvoyMeterData( - eid="100000012", - timestamp=1708006112, - energy_delivered=112342, - energy_received=123452, - active_power=30, - power_factor=0.13, - voltage=111, - current=0.2, - frequency=50.1, - state=CtState.ENABLED, - measurement_type=CtType.PRODUCTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[CtStatusFlags.POWER_ON_UNUSED_PHASE], - ), - PhaseNames.PHASE_3: EnvoyMeterData( - eid="100000013", - timestamp=1708006113, - energy_delivered=112343, - energy_received=123453, - active_power=50, - power_factor=0.14, - voltage=111, - current=0.2, - frequency=50.1, - state=CtState.ENABLED, - measurement_type=CtType.PRODUCTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - }, - ctmeter_consumption_phases={ - PhaseNames.PHASE_1: EnvoyMeterData( - eid="100000021", - timestamp=1708006121, - energy_delivered=212341, - energy_received=223451, - active_power=21, - power_factor=0.22, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.NET_CONSUMPTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - PhaseNames.PHASE_2: EnvoyMeterData( - eid="100000022", - timestamp=1708006122, - energy_delivered=212342, - energy_received=223452, - active_power=31, - power_factor=0.23, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.NET_CONSUMPTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - PhaseNames.PHASE_3: EnvoyMeterData( - eid="100000023", - timestamp=1708006123, - energy_delivered=212343, - energy_received=223453, - active_power=51, - power_factor=0.24, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.NET_CONSUMPTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - }, - ctmeter_storage_phases={ - PhaseNames.PHASE_1: EnvoyMeterData( - eid="100000031", - timestamp=1708006121, - energy_delivered=312341, - energy_received=323451, - active_power=22, - power_factor=0.32, - voltage=113, - current=0.4, - frequency=50.3, - state=CtState.ENABLED, - measurement_type=CtType.STORAGE, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - PhaseNames.PHASE_2: EnvoyMeterData( - eid="100000032", - timestamp=1708006122, - energy_delivered=312342, - energy_received=323452, - active_power=33, - power_factor=0.23, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.STORAGE, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - PhaseNames.PHASE_3: EnvoyMeterData( - eid="100000033", - timestamp=1708006123, - energy_delivered=312343, - energy_received=323453, - active_power=53, - power_factor=0.24, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.STORAGE, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - }, - inverters={ - "1": EnvoyInverter( - serial_number="1", - last_report_date=1, - last_report_watts=1, - max_report_watts=1, - ) - }, - raw={"varies_by": "firmware_version"}, - ) - mock_envoy.update = AsyncMock(return_value=mock_envoy.data) - return mock_envoy - - -@pytest.fixture(name="setup_enphase_envoy") -async def setup_enphase_envoy_fixture(hass: HomeAssistant, config, mock_envoy): - """Define a fixture to set up Enphase Envoy.""" with ( patch( "homeassistant.components.enphase_envoy.config_flow.Envoy", - return_value=mock_envoy, - ), + autospec=True, + ) as mock_client, patch( "homeassistant.components.enphase_envoy.Envoy", - return_value=mock_envoy, + new=mock_client, ), ): - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - yield + mock_envoy = mock_client.return_value + # Add the fixtures specified + token = jwt.encode( + payload={"name": "envoy", "exp": 1907837780}, + key="secret", + algorithm="HS256", + ) + mock_envoy.auth = EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial="1234") + mock_envoy.serial_number = "1234" + mock = Mock() + mock.status_code = 200 + mock.text = "Testing request \nreplies." + mock.headers = {"Hello": "World"} + mock_envoy.request.return_value = mock + + # determine fixture file name, default envoy if no request passed + fixture_name = "envoy" + if hasattr(request, "param"): + fixture_name = request.param + + # Load envoy model from fixture + load_envoy_fixture(mock_envoy, fixture_name) + mock_envoy.update.return_value = mock_envoy.data + + yield mock_envoy -@pytest.fixture(name="mock_authenticate") -def mock_authenticate(): - """Define a mocked Envoy.authenticate fixture.""" - return AsyncMock() +def load_envoy_fixture(mock_envoy: AsyncMock, fixture_name: str) -> None: + """Load envoy model from fixture.""" - -@pytest.fixture(name="mock_auth") -def mock_auth(serial_number): - """Define a mocked EnvoyAuth fixture.""" - token = jwt.encode( - payload={"name": "envoy", "exp": 1907837780}, key="secret", algorithm="HS256" + json_fixture: dict[str, Any] = load_json_object_fixture( + f"{fixture_name}.json", DOMAIN ) - return EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial=serial_number) + + mock_envoy.firmware = json_fixture["firmware"] + mock_envoy.part_number = json_fixture["part_number"] + mock_envoy.envoy_model = json_fixture["envoy_model"] + mock_envoy.supported_features = SupportedFeatures( + json_fixture["supported_features"] + ) + mock_envoy.phase_mode = json_fixture["phase_mode"] + mock_envoy.phase_count = json_fixture["phase_count"] + mock_envoy.active_phase_count = json_fixture["active_phase_count"] + mock_envoy.ct_meter_count = json_fixture["ct_meter_count"] + mock_envoy.consumption_meter_type = json_fixture["consumption_meter_type"] + mock_envoy.production_meter_type = json_fixture["production_meter_type"] + mock_envoy.storage_meter_type = json_fixture["storage_meter_type"] + + mock_envoy.data = EnvoyData() + _load_json_2_production_data(mock_envoy.data, json_fixture) + _load_json_2_meter_data(mock_envoy.data, json_fixture) + _load_json_2_inverter_data(mock_envoy.data, json_fixture) + _load_json_2_encharge_enpower_data(mock_envoy.data, json_fixture) + _load_json_2_raw_data(mock_envoy.data, json_fixture) -@pytest.fixture(name="mock_setup") -def mock_setup(): - """Define a mocked Envoy.setup fixture.""" - return AsyncMock() +def _load_json_2_production_data( + mocked_data: EnvoyData, json_fixture: dict[str, Any] +) -> None: + """Fill envoy production data from fixture.""" + if item := json_fixture["data"].get("system_consumption"): + mocked_data.system_consumption = EnvoySystemConsumption(**item) + if item := json_fixture["data"].get("system_production"): + mocked_data.system_production = EnvoySystemProduction(**item) + if item := json_fixture["data"].get("system_consumption_phases"): + mocked_data.system_consumption_phases = {} + for sub_item, item_data in item.items(): + mocked_data.system_consumption_phases[sub_item] = EnvoySystemConsumption( + **item_data + ) + if item := json_fixture["data"].get("system_production_phases"): + mocked_data.system_production_phases = {} + for sub_item, item_data in item.items(): + mocked_data.system_production_phases[sub_item] = EnvoySystemProduction( + **item_data + ) -@pytest.fixture(name="serial_number") -def serial_number_fixture(): - """Define a serial number fixture.""" - return "1234" +def _load_json_2_meter_data( + mocked_data: EnvoyData, json_fixture: dict[str, Any] +) -> None: + """Fill envoy meter data from fixture.""" + if item := json_fixture["data"].get("ctmeter_production"): + mocked_data.ctmeter_production = EnvoyMeterData(**item) + if item := json_fixture["data"].get("ctmeter_consumption"): + mocked_data.ctmeter_consumption = EnvoyMeterData(**item) + if item := json_fixture["data"].get("ctmeter_storage"): + mocked_data.ctmeter_storage = EnvoyMeterData(**item) + if item := json_fixture["data"].get("ctmeter_production_phases"): + mocked_data.ctmeter_production_phases = {} + for sub_item, item_data in item.items(): + mocked_data.ctmeter_production_phases[sub_item] = EnvoyMeterData( + **item_data + ) + if item := json_fixture["data"].get("ctmeter_consumption_phases"): + mocked_data.ctmeter_consumption_phases = {} + for sub_item, item_data in item.items(): + mocked_data.ctmeter_consumption_phases[sub_item] = EnvoyMeterData( + **item_data + ) + if item := json_fixture["data"].get("ctmeter_storage_phases"): + mocked_data.ctmeter_storage_phases = {} + for sub_item, item_data in item.items(): + mocked_data.ctmeter_storage_phases[sub_item] = EnvoyMeterData(**item_data) + + +def _load_json_2_inverter_data( + mocked_data: EnvoyData, json_fixture: dict[str, Any] +) -> None: + """Fill envoy inverter data from fixture.""" + if item := json_fixture["data"].get("inverters"): + mocked_data.inverters = {} + for sub_item, item_data in item.items(): + mocked_data.inverters[sub_item] = EnvoyInverter(**item_data) + + +def _load_json_2_encharge_enpower_data( + mocked_data: EnvoyData, json_fixture: dict[str, Any] +) -> None: + """Fill envoy encharge/enpower data from fixture.""" + if item := json_fixture["data"].get("encharge_inventory"): + mocked_data.encharge_inventory = {} + for sub_item, item_data in item.items(): + mocked_data.encharge_inventory[sub_item] = EnvoyEncharge(**item_data) + if item := json_fixture["data"].get("enpower"): + mocked_data.enpower = EnvoyEnpower(**item) + if item := json_fixture["data"].get("encharge_aggregate"): + mocked_data.encharge_aggregate = EnvoyEnchargeAggregate(**item) + if item := json_fixture["data"].get("encharge_power"): + mocked_data.encharge_power = {} + for sub_item, item_data in item.items(): + mocked_data.encharge_power[sub_item] = EnvoyEnchargePower(**item_data) + if item := json_fixture["data"].get("tariff"): + mocked_data.tariff = EnvoyTariff(**item) + mocked_data.tariff.storage_settings = EnvoyStorageSettings( + **item["storage_settings"] + ) + if item := json_fixture["data"].get("dry_contact_status"): + mocked_data.dry_contact_status = {} + for sub_item, item_data in item.items(): + mocked_data.dry_contact_status[sub_item] = EnvoyDryContactStatus( + **item_data + ) + if item := json_fixture["data"].get("dry_contact_settings"): + mocked_data.dry_contact_settings = {} + for sub_item, item_data in item.items(): + mocked_data.dry_contact_settings[sub_item] = EnvoyDryContactSettings( + **item_data + ) + + +def _load_json_2_raw_data(mocked_data: EnvoyData, json_fixture: dict[str, Any]) -> None: + """Fill envoy raw data from fixture.""" + if item := json_fixture["data"].get("raw"): + mocked_data.raw = item diff --git a/tests/components/enphase_envoy/fixtures/envoy.json b/tests/components/enphase_envoy/fixtures/envoy.json new file mode 100644 index 00000000000..8c9be429931 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy.json @@ -0,0 +1,49 @@ +{ + "serial_number": "1234", + "firmware": "7.6.175", + "part_number": "123456789", + "envoy_model": "Envoy", + "supported_features": 65, + "phase_mode": null, + "phase_count": 1, + "active_phase_count": 0, + "ct_meter_count": 0, + "consumption_meter_type": null, + "production_meter_type": null, + "storage_meter_type": null, + "data": { + "encharge_inventory": null, + "encharge_power": null, + "encharge_aggregate": null, + "enpower": null, + "system_consumption": null, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": null, + "system_production_phases": null, + "ctmeter_production": null, + "ctmeter_consumption": null, + "ctmeter_storage": null, + "ctmeter_production_phases": null, + "ctmeter_consumption_phases": null, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": null, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json b/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json new file mode 100644 index 00000000000..e72829280da --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json @@ -0,0 +1,130 @@ +{ + "serial_number": "1234", + "firmware": "7.6.175", + "part_number": "123456789", + "envoy_model": "Envoy, phases: 1, phase mode: three, net-consumption CT, production CT", + "supported_features": 1231, + "phase_mode": "three", + "phase_count": 1, + "active_phase_count": 0, + "ct_meter_count": 2, + "consumption_meter_type": "net-consumption", + "production_meter_type": "production", + "storage_meter_type": null, + "data": { + "encharge_inventory": null, + "encharge_power": null, + "encharge_aggregate": null, + "enpower": null, + "system_consumption": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": null, + "system_production_phases": null, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": null, + "ctmeter_production_phases": null, + "ctmeter_consumption_phases": null, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1695744220", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 15.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1695598084" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "season_1", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 444, + "must_charge_duration": 35, + "must_charge_mode": "CG", + "enable_discharge_to_grid": true, + "periods": [ + { + "id": "period_1", + "start": 480, + "rate": 0.1898 + }, + { + "id": "filler", + "start": 1320, + "rate": 0.1034 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json b/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json new file mode 100644 index 00000000000..72b510e2328 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json @@ -0,0 +1,445 @@ +{ + "serial_number": "1234", + "firmware": "7.1.2", + "part_number": "123456789", + "envoy_model": "Envoy, phases: 3, phase mode: split, net-consumption CT, production CT, storage CT", + "supported_features": 1659, + "phase_mode": "three", + "phase_count": 3, + "active_phase_count": 3, + "ct_meter_count": 2, + "consumption_meter_type": "net-consumption", + "production_meter_type": "production", + "storage_meter_type": "storage", + "data": { + "encharge_inventory": { + "123456": { + "admin_state": 6, + "admin_state_str": "ENCHG_STATE_READY", + "bmu_firmware_version": "2.1.34", + "comm_level_2_4_ghz": 4, + "comm_level_sub_ghz": 4, + "communicating": true, + "dc_switch_off": false, + "encharge_capacity": 3500, + "encharge_revision": 2, + "firmware_loaded_date": 1695330323, + "firmware_version": "2.6.5973_rel/22.11", + "installed_date": 1695330323, + "last_report_date": 1695769447, + "led_status": 17, + "max_cell_temp": 30, + "operating": true, + "part_number": "830-01760-r37", + "percent_full": 15, + "serial_number": "123456", + "temperature": 29, + "temperature_unit": "C", + "zigbee_dongle_fw_version": "100F" + } + }, + "encharge_power": { + "123456": { + "apparent_power_mva": 0, + "real_power_mw": 0, + "soc": 15 + } + }, + "encharge_aggregate": { + "available_energy": 525, + "backup_reserve": 526, + "state_of_charge": 15, + "reserve_state_of_charge": 15, + "configured_reserve_state_of_charge": 15, + "max_available_capacity": 3500 + }, + "enpower": { + "grid_mode": "multimode-ongrid", + "admin_state": 24, + "admin_state_str": "ENPWR_STATE_OPER_CLOSED", + "comm_level_2_4_ghz": 5, + "comm_level_sub_ghz": 5, + "communicating": true, + "firmware_loaded_date": 1695330323, + "firmware_version": "1.2.2064_release/20.34", + "installed_date": 1695330323, + "last_report_date": 1695769447, + "mains_admin_state": "closed", + "mains_oper_state": "closed", + "operating": true, + "part_number": "830-01760-r37", + "serial_number": "654321", + "temperature": 79, + "temperature_unit": "F", + "zigbee_dongle_fw_version": "1009" + }, + "system_consumption": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": { + "L1": { + "watt_hours_lifetime": 1322, + "watt_hours_last_7_days": 1321, + "watt_hours_today": 1323, + "watts_now": 1324 + }, + "L2": { + "watt_hours_lifetime": 2322, + "watt_hours_last_7_days": 2321, + "watt_hours_today": 2323, + "watts_now": 2324 + }, + "L3": { + "watt_hours_lifetime": 3322, + "watt_hours_last_7_days": 3321, + "watt_hours_today": 3323, + "watts_now": 3324 + } + }, + "system_production_phases": { + "L1": { + "watt_hours_lifetime": 1232, + "watt_hours_last_7_days": 1231, + "watt_hours_today": 1233, + "watts_now": 1234 + }, + "L2": { + "watt_hours_lifetime": 2232, + "watt_hours_last_7_days": 2231, + "watt_hours_today": 2233, + "watts_now": 2234 + }, + "L3": { + "watt_hours_lifetime": 3232, + "watt_hours_last_7_days": 3231, + "watt_hours_today": 3233, + "watts_now": 3234 + } + }, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": { + "eid": "100000030", + "timestamp": 1708006120, + "energy_delivered": 31234, + "energy_received": 32345, + "active_power": 103, + "power_factor": 0.23, + "voltage": 113, + "current": 0.4, + "frequency": 50.3, + "state": "enabled", + "measurement_type": "storage", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_production_phases": { + "L1": { + "eid": "100000011", + "timestamp": 1708006111, + "energy_delivered": 112341, + "energy_received": 123451, + "active_power": 20, + "power_factor": 0.12, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance"] + }, + "L2": { + "eid": "100000012", + "timestamp": 1708006112, + "energy_delivered": 112342, + "energy_received": 123452, + "active_power": 30, + "power_factor": 0.13, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["power-on-unused-phase"] + }, + "L3": { + "eid": "100000013", + "timestamp": 1708006113, + "energy_delivered": 112343, + "energy_received": 123453, + "active_power": 50, + "power_factor": 0.14, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_consumption_phases": { + "L1": { + "eid": "100000021", + "timestamp": 1708006121, + "energy_delivered": 212341, + "energy_received": 223451, + "active_power": 21, + "power_factor": 0.22, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L2": { + "eid": "100000022", + "timestamp": 1708006122, + "energy_delivered": 212342, + "energy_received": 223452, + "active_power": 31, + "power_factor": 0.23, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L3": { + "eid": "100000023", + "timestamp": 1708006123, + "energy_delivered": 212343, + "energy_received": 223453, + "active_power": 51, + "power_factor": 0.24, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_storage_phases": { + "L1": { + "eid": "100000031", + "timestamp": 1708006121, + "energy_delivered": 312341, + "energy_received": 323451, + "active_power": 22, + "power_factor": 0.32, + "voltage": 113, + "current": 0.4, + "frequency": 50.3, + "state": "enabled", + "measurement_type": "storage", + "metering_status": "normal", + "status_flags": [] + }, + "L2": { + "eid": "100000032", + "timestamp": 1708006122, + "energy_delivered": 312342, + "energy_received": 323452, + "active_power": 33, + "power_factor": 0.23, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "storage", + "metering_status": "normal", + "status_flags": [] + }, + "L3": { + "eid": "100000033", + "timestamp": 1708006123, + "energy_delivered": 312343, + "energy_received": 323453, + "active_power": 53, + "power_factor": 0.24, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "storage", + "metering_status": "normal", + "status_flags": [] + } + }, + "dry_contact_status": { + "NC1": { + "id": "NC1", + "status": "open" + }, + "NC2": { + "id": "NC2", + "status": "closed" + }, + "NC3": { + "id": "NC3", + "status": "open" + } + }, + "dry_contact_settings": { + "NC1": { + "id": "NC1", + "black_start": 5.0, + "essential_end_time": 32400.0, + "essential_start_time": 57600.0, + "generator_action": "shed", + "grid_action": "shed", + "load_name": "NC1 Fixture", + "manual_override": true, + "micro_grid_action": "shed", + "mode": "manual", + "override": true, + "priority": 1.0, + "pv_serial_nb": [], + "soc_high": 70.0, + "soc_low": 25.0, + "type": "LOAD" + }, + "NC2": { + "id": "NC2", + "black_start": 5.0, + "essential_end_time": 57600.0, + "essential_start_time": 32400.0, + "generator_action": "shed", + "grid_action": "apply", + "load_name": "NC2 Fixture", + "manual_override": true, + "micro_grid_action": "shed", + "mode": "manual", + "override": true, + "priority": 2.0, + "pv_serial_nb": [], + "soc_high": 70.0, + "soc_low": 30.0, + "type": "LOAD" + }, + "NC3": { + "id": "NC3", + "black_start": 5.0, + "essential_end_time": 57600.0, + "essential_start_time": 32400.0, + "generator_action": "apply", + "grid_action": "shed", + "load_name": "NC3 Fixture", + "manual_override": true, + "micro_grid_action": "apply", + "mode": "manual", + "override": true, + "priority": 3.0, + "pv_serial_nb": [], + "soc_high": 70.0, + "soc_low": 30.0, + "type": "NONE" + } + }, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1695744220", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 15.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1695598084" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "season_1", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 444, + "must_charge_duration": 35, + "must_charge_mode": "CG", + "enable_discharge_to_grid": true, + "periods": [ + { + "id": "period_1", + "start": 480, + "rate": 0.1898 + }, + { + "id": "filler", + "start": 1320, + "rate": 0.1034 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json b/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json new file mode 100644 index 00000000000..f9b6ae31196 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json @@ -0,0 +1,260 @@ +{ + "serial_number": "1234", + "firmware": "7.6.175", + "part_number": "123456789", + "envoy_model": "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT", + "supported_features": 1743, + "phase_mode": "three", + "phase_count": 3, + "active_phase_count": 3, + "ct_meter_count": 2, + "consumption_meter_type": "net-consumption", + "production_meter_type": "production", + "storage_meter_type": null, + "data": { + "encharge_inventory": null, + "encharge_power": null, + "encharge_aggregate": null, + "enpower": null, + "system_consumption": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": { + "L1": { + "watt_hours_lifetime": 1322, + "watt_hours_last_7_days": 1321, + "watt_hours_today": 1323, + "watts_now": 1324 + }, + "L2": { + "watt_hours_lifetime": 2322, + "watt_hours_last_7_days": 2321, + "watt_hours_today": 2323, + "watts_now": 2324 + }, + "L3": { + "watt_hours_lifetime": 3322, + "watt_hours_last_7_days": 3321, + "watt_hours_today": 3323, + "watts_now": 3324 + } + }, + "system_production_phases": { + "L1": { + "watt_hours_lifetime": 1232, + "watt_hours_last_7_days": 1231, + "watt_hours_today": 1233, + "watts_now": 1234 + }, + "L2": { + "watt_hours_lifetime": 2232, + "watt_hours_last_7_days": 2231, + "watt_hours_today": 2233, + "watts_now": 2234 + }, + "L3": { + "watt_hours_lifetime": 3232, + "watt_hours_last_7_days": 3231, + "watt_hours_today": 3233, + "watts_now": 3234 + } + }, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": null, + "ctmeter_production_phases": { + "L1": { + "eid": "100000011", + "timestamp": 1708006111, + "energy_delivered": 112341, + "energy_received": 123451, + "active_power": 20, + "power_factor": 0.12, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance"] + }, + "L2": { + "eid": "100000012", + "timestamp": 1708006112, + "energy_delivered": 112342, + "energy_received": 123452, + "active_power": 30, + "power_factor": 0.13, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["power-on-unused-phase"] + }, + "L3": { + "eid": "100000013", + "timestamp": 1708006113, + "energy_delivered": 112343, + "energy_received": 123453, + "active_power": 50, + "power_factor": 0.14, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_consumption_phases": { + "L1": { + "eid": "100000021", + "timestamp": 1708006121, + "energy_delivered": 212341, + "energy_received": 223451, + "active_power": 21, + "power_factor": 0.22, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L2": { + "eid": "100000022", + "timestamp": 1708006122, + "energy_delivered": 212342, + "energy_received": 223452, + "active_power": 31, + "power_factor": 0.23, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L3": { + "eid": "100000023", + "timestamp": 1708006123, + "energy_delivered": 212343, + "energy_received": 223453, + "active_power": 51, + "power_factor": 0.24, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1695744220", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 15.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1695598084" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "season_1", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 444, + "must_charge_duration": 35, + "must_charge_mode": "CG", + "enable_discharge_to_grid": true, + "periods": [ + { + "id": "period_1", + "start": 480, + "rate": 0.1898 + }, + { + "id": "filler", + "start": 1320, + "rate": 0.1034 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json b/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json new file mode 100644 index 00000000000..ca2a976b6d1 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json @@ -0,0 +1,125 @@ +{ + "serial_number": "1234", + "firmware": "7.6.175", + "part_number": "123456789", + "envoy_model": "Envoy, phases: 1, phase mode: three, total-consumption CT, production CT", + "supported_features": 1217, + "phase_mode": "three", + "phase_count": 1, + "active_phase_count": 0, + "ct_meter_count": 2, + "consumption_meter_type": "total-consumption", + "production_meter_type": "production", + "storage_meter_type": null, + "data": { + "encharge_inventory": null, + "encharge_power": null, + "encharge_aggregate": null, + "enpower": null, + "system_consumption": null, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": null, + "system_production_phases": null, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "total-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": null, + "ctmeter_production_phases": null, + "ctmeter_consumption_phases": null, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1695744220", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 15.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1695598084" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "season_1", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 444, + "must_charge_duration": 35, + "must_charge_mode": "CG", + "enable_discharge_to_grid": true, + "periods": [ + { + "id": "period_1", + "start": 480, + "rate": 0.1898 + }, + { + "id": "filler", + "start": 1320, + "rate": 0.1034 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..84401c7566b --- /dev/null +++ b/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr @@ -0,0 +1,188 @@ +# serializer version: 1 +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_communicating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.encharge_123456_communicating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Communicating', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'communicating', + 'unique_id': '123456_communicating', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_communicating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Encharge 123456 Communicating', + }), + 'context': , + 'entity_id': 'binary_sensor.encharge_123456_communicating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_dc_switch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.encharge_123456_dc_switch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DC switch', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_switch', + 'unique_id': '123456_dc_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_dc_switch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Encharge 123456 DC switch', + }), + 'context': , + 'entity_id': 'binary_sensor.encharge_123456_dc_switch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_communicating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.enpower_654321_communicating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Communicating', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'communicating', + 'unique_id': '654321_communicating', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_communicating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Enpower 654321 Communicating', + }), + 'context': , + 'entity_id': 'binary_sensor.enpower_654321_communicating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_grid_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.enpower_654321_grid_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:transmission-tower', + 'original_name': 'Grid status', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_status', + 'unique_id': '654321_mains_oper_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_grid_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Enpower 654321 Grid status', + 'icon': 'mdi:transmission-tower', + }), + 'context': , + 'entity_id': 'binary_sensor.enpower_654321_grid_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/enphase_envoy/snapshots/test_config_flow.ambr b/tests/components/enphase_envoy/snapshots/test_config_flow.ambr deleted file mode 100644 index b83d4e811f8..00000000000 --- a/tests/components/enphase_envoy/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,10 +0,0 @@ -# serializer version: 1 -# name: test_platforms - list([ - , - , - , - , - , - ]) -# --- diff --git a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr index c2ab51a7dbd..e849ab6ee43 100644 --- a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr +++ b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr @@ -45,12 +45,14 @@ 'labels': list([ ]), 'manufacturer': 'Enphase', - 'model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', + 'model': 'Envoy', + 'model_id': None, 'name': 'Envoy <>', 'name_by_user': None, + 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', 'serial_number': '<>', 'suggested_area': None, - 'sw_version': '7.1.2', + 'sw_version': '7.6.175', }), 'entities': list([ dict({ @@ -258,3493 +260,6 @@ 'state': '0.00<>', }), }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '<>_consumption', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current power consumption', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_power_consumption', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '<>_daily_consumption', - 'unit_of_measurement': 'kWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Energy consumption today', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'kWh', - }), - 'entity_id': 'sensor.envoy_<>_energy_consumption_today', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '<>_seven_days_consumption', - 'unit_of_measurement': 'kWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Energy consumption last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': 'kWh', - }), - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '<>_lifetime_consumption', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime energy consumption', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', - 'state': '0.00<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '<>_lifetime_net_consumption', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime net energy consumption', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', - 'state': '0.02<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '<>_lifetime_net_production', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime net energy production', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', - 'state': '0.022345', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '<>_net_consumption', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current net power consumption', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', - 'state': '0.101', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '<>_frequency', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '<>_voltage', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '<>_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '<>_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l1', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l1', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l2', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l2', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l3', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l3', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '<>_production_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '<>_production_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged', - 'unique_id': '<>_lifetime_battery_discharged', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime battery energy discharged', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', - 'state': '0.03<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged', - 'unique_id': '<>_lifetime_battery_charged', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime battery energy charged', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', - 'state': '0.032345', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge', - 'unique_id': '<>_battery_discharge', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current battery discharge', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_battery_discharge', - 'state': '0.103', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage', - 'unique_id': '<>_storage_voltage', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status', - 'unique_id': '<>_storage_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags', - 'unique_id': '<>_storage_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l1', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l2', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l3', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), ]), }), dict({ @@ -3770,8 +285,10 @@ ]), 'manufacturer': 'Enphase', 'model': 'Inverter', + 'model_id': None, 'name': 'Inverter 1', 'name_by_user': None, + 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -3861,60 +378,12 @@ }), ]), 'envoy_model_data': dict({ - 'ctmeter_consumption': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000020', timestamp=1708006120, energy_delivered=21234, energy_received=22345, active_power=101, power_factor=0.21, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'ctmeter_consumption_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000021', timestamp=1708006121, energy_delivered=212341, energy_received=223451, active_power=21, power_factor=0.22, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000022', timestamp=1708006122, energy_delivered=212342, energy_received=223452, active_power=31, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000023', timestamp=1708006123, energy_delivered=212343, energy_received=223453, active_power=51, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), - 'ctmeter_production': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000010', timestamp=1708006110, energy_delivered=11234, energy_received=12345, active_power=100, power_factor=0.11, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[, ])", - }), - 'ctmeter_production_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000011', timestamp=1708006111, energy_delivered=112341, energy_received=123451, active_power=20, power_factor=0.12, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000012', timestamp=1708006112, energy_delivered=112342, energy_received=123452, active_power=30, power_factor=0.13, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000013', timestamp=1708006113, energy_delivered=112343, energy_received=123453, active_power=50, power_factor=0.14, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), - 'ctmeter_storage': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000030', timestamp=1708006120, energy_delivered=31234, energy_received=32345, active_power=103, power_factor=0.23, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'ctmeter_storage_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000031', timestamp=1708006121, energy_delivered=312341, energy_received=323451, active_power=22, power_factor=0.32, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000032', timestamp=1708006122, energy_delivered=312342, energy_received=323452, active_power=33, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000033', timestamp=1708006123, energy_delivered=312343, energy_received=323453, active_power=53, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), + 'ctmeter_consumption': None, + 'ctmeter_consumption_phases': None, + 'ctmeter_production': None, + 'ctmeter_production_phases': None, + 'ctmeter_storage': None, + 'ctmeter_storage_phases': None, 'dry_contact_settings': dict({ }), 'dry_contact_status': dict({ @@ -3929,63 +398,938 @@ 'repr': "EnvoyInverter(serial_number='1', last_report_date=1, last_report_watts=1, max_report_watts=1)", }), }), - 'system_consumption': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', - }), - 'system_consumption_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1322, watt_hours_last_7_days=1321, watt_hours_today=1323, watts_now=1324)', - }), - 'L2': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=2322, watt_hours_last_7_days=2321, watt_hours_today=2323, watts_now=2324)', - }), - 'L3': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=3322, watt_hours_last_7_days=3321, watt_hours_today=3323, watts_now=3324)', - }), - }), + 'system_consumption': None, + 'system_consumption_phases': None, 'system_production': dict({ '__type': "", 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', }), - 'system_production_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1232, watt_hours_last_7_days=1231, watt_hours_today=1233, watts_now=1234)', - }), - 'L2': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=2232, watt_hours_last_7_days=2231, watt_hours_today=2233, watts_now=2234)', - }), - 'L3': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=3232, watt_hours_last_7_days=3231, watt_hours_today=3233, watts_now=3234)', - }), - }), + 'system_production_phases': None, 'tariff': None, }), 'envoy_properties': dict({ - 'active_phasecount': 3, - 'ct_consumption_meter': 'net-consumption', - 'ct_count': 3, - 'ct_production_meter': 'production', - 'ct_storage_meter': 'storage', - 'envoy_firmware': '7.1.2', - 'envoy_model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', + 'active_phasecount': 0, + 'ct_consumption_meter': None, + 'ct_count': 0, + 'ct_production_meter': None, + 'ct_storage_meter': None, + 'envoy_firmware': '7.6.175', + 'envoy_model': 'Envoy', 'part_number': '123456789', - 'phase_count': 3, - 'phase_mode': 'three', + 'phase_count': 1, + 'phase_mode': None, 'supported_features': list([ 'INVERTERS', - 'METERING', 'PRODUCTION', - 'THREEPHASE', - 'CTMETERS', ]), }), + 'fixtures': dict({ + }), + 'raw_data': dict({ + 'varies_by': 'firmware_version', + }), + }) +# --- +# name: test_entry_diagnostics_with_fixtures + dict({ + 'config_entry': dict({ + 'data': dict({ + 'host': '1.1.1.1', + 'name': '**REDACTED**', + 'password': '**REDACTED**', + 'token': '**REDACTED**', + 'username': '**REDACTED**', + }), + 'disabled_by': None, + 'domain': 'enphase_envoy', + 'entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'minor_version': 1, + 'options': dict({ + 'diagnostics_include_fixtures': True, + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': '**REDACTED**', + 'unique_id': '**REDACTED**', + 'version': 1, + }), + 'envoy_entities_by_device': list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + '45a36e55aaddb2007c5f6602e0c38e72', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '<>56789', + 'identifiers': list([ + list([ + 'enphase_envoy', + '<>', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Enphase', + 'model': 'Envoy', + 'model_id': None, + 'name': 'Envoy <>', + 'name_by_user': None, + 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', + 'serial_number': '<>', + 'suggested_area': None, + 'sw_version': '7.6.175', + }), + 'entities': list([ + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '<>_production', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current power production', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_power_production', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '<>_daily_production', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy production today', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_production_today', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '<>_seven_days_production', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '<>_lifetime_production', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production', + 'state': '0.00<>', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + '45a36e55aaddb2007c5f6602e0c38e72', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'identifiers': list([ + list([ + 'enphase_envoy', + '1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Enphase', + 'model': 'Inverter', + 'model_id': None, + 'name': 'Inverter 1', + 'name_by_user': None, + 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + }), + 'entities': list([ + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': 'W', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'W', + }), + 'entity_id': 'sensor.inverter_1', + 'state': '1', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'timestamp', + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }), + 'state': None, + }), + ]), + }), + ]), + 'envoy_model_data': dict({ + 'ctmeter_consumption': None, + 'ctmeter_consumption_phases': None, + 'ctmeter_production': None, + 'ctmeter_production_phases': None, + 'ctmeter_storage': None, + 'ctmeter_storage_phases': None, + 'dry_contact_settings': dict({ + }), + 'dry_contact_status': dict({ + }), + 'encharge_aggregate': None, + 'encharge_inventory': None, + 'encharge_power': None, + 'enpower': None, + 'inverters': dict({ + '1': dict({ + '__type': "", + 'repr': "EnvoyInverter(serial_number='1', last_report_date=1, last_report_watts=1, max_report_watts=1)", + }), + }), + 'system_consumption': None, + 'system_consumption_phases': None, + 'system_production': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', + }), + 'system_production_phases': None, + 'tariff': None, + }), + 'envoy_properties': dict({ + 'active_phasecount': 0, + 'ct_consumption_meter': None, + 'ct_count': 0, + 'ct_production_meter': None, + 'ct_storage_meter': None, + 'envoy_firmware': '7.6.175', + 'envoy_model': 'Envoy', + 'part_number': '123456789', + 'phase_count': 1, + 'phase_mode': None, + 'supported_features': list([ + 'INVERTERS', + 'PRODUCTION', + ]), + }), + 'fixtures': dict({ + '/admin/lib/tariff': 'Testing request replies.', + '/admin/lib/tariff_log': '{"headers":{"Hello":"World"},"code":200}', + '/api/v1/production': 'Testing request replies.', + '/api/v1/production/inverters': 'Testing request replies.', + '/api/v1/production/inverters_log': '{"headers":{"Hello":"World"},"code":200}', + '/api/v1/production_log': '{"headers":{"Hello":"World"},"code":200}', + '/info': 'Testing request replies.', + '/info_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ensemble/dry_contacts': 'Testing request replies.', + '/ivp/ensemble/dry_contacts_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ensemble/generator': 'Testing request replies.', + '/ivp/ensemble/generator_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ensemble/inventory': 'Testing request replies.', + '/ivp/ensemble/inventory_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ensemble/power': 'Testing request replies.', + '/ivp/ensemble/power_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ensemble/secctrl': 'Testing request replies.', + '/ivp/ensemble/secctrl_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ensemble/status': 'Testing request replies.', + '/ivp/ensemble/status_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/meters': 'Testing request replies.', + '/ivp/meters/readings': 'Testing request replies.', + '/ivp/meters/readings_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/meters_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/sc/pvlimit': 'Testing request replies.', + '/ivp/sc/pvlimit_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ss/dry_contact_settings': 'Testing request replies.', + '/ivp/ss/dry_contact_settings_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ss/gen_config': 'Testing request replies.', + '/ivp/ss/gen_config_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ss/gen_schedule': 'Testing request replies.', + '/ivp/ss/gen_schedule_log': '{"headers":{"Hello":"World"},"code":200}', + '/ivp/ss/pel_settings': 'Testing request replies.', + '/ivp/ss/pel_settings_log': '{"headers":{"Hello":"World"},"code":200}', + '/production': 'Testing request replies.', + '/production.json': 'Testing request replies.', + '/production.json?details=1': 'Testing request replies.', + '/production.json?details=1_log': '{"headers":{"Hello":"World"},"code":200}', + '/production.json_log': '{"headers":{"Hello":"World"},"code":200}', + '/production_log': '{"headers":{"Hello":"World"},"code":200}', + }), + 'raw_data': dict({ + 'varies_by': 'firmware_version', + }), + }) +# --- +# name: test_entry_diagnostics_with_fixtures_with_error + dict({ + 'config_entry': dict({ + 'data': dict({ + 'host': '1.1.1.1', + 'name': '**REDACTED**', + 'password': '**REDACTED**', + 'token': '**REDACTED**', + 'username': '**REDACTED**', + }), + 'disabled_by': None, + 'domain': 'enphase_envoy', + 'entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'minor_version': 1, + 'options': dict({ + 'diagnostics_include_fixtures': True, + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': '**REDACTED**', + 'unique_id': '**REDACTED**', + 'version': 1, + }), + 'envoy_entities_by_device': list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + '45a36e55aaddb2007c5f6602e0c38e72', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '<>56789', + 'identifiers': list([ + list([ + 'enphase_envoy', + '<>', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Enphase', + 'model': 'Envoy', + 'model_id': None, + 'name': 'Envoy <>', + 'name_by_user': None, + 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', + 'serial_number': '<>', + 'suggested_area': None, + 'sw_version': '7.6.175', + }), + 'entities': list([ + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kW', + }), + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '<>_production', + 'unit_of_measurement': 'kW', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Envoy <> Current power production', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'kW', + }), + 'entity_id': 'sensor.envoy_<>_current_power_production', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '<>_daily_production', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy production today', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_production_today', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'kWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '<>_seven_days_production', + 'unit_of_measurement': 'kWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': 'kWh', + }), + 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days', + 'state': '1.234', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'total_increasing', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': 'MWh', + }), + }), + 'original_device_class': 'energy', + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '<>_lifetime_production', + 'unit_of_measurement': 'MWh', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy <> Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': 'total_increasing', + 'unit_of_measurement': 'MWh', + }), + 'entity_id': 'sensor.envoy_<>_lifetime_energy_production', + 'state': '0.00<>', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + '45a36e55aaddb2007c5f6602e0c38e72', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'identifiers': list([ + list([ + 'enphase_envoy', + '1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Enphase', + 'model': 'Inverter', + 'model_id': None, + 'name': 'Inverter 1', + 'name_by_user': None, + 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + }), + 'entities': list([ + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': 'measurement', + }), + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'power', + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': 'W', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': 'measurement', + 'unit_of_measurement': 'W', + }), + 'entity_id': 'sensor.inverter_1', + 'state': '1', + }), + }), + dict({ + 'entity': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', + 'device_class': None, + 'disabled_by': 'integration', + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': 'timestamp', + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }), + 'state': None, + }), + ]), + }), + ]), + 'envoy_model_data': dict({ + 'ctmeter_consumption': None, + 'ctmeter_consumption_phases': None, + 'ctmeter_production': None, + 'ctmeter_production_phases': None, + 'ctmeter_storage': None, + 'ctmeter_storage_phases': None, + 'dry_contact_settings': dict({ + }), + 'dry_contact_status': dict({ + }), + 'encharge_aggregate': None, + 'encharge_inventory': None, + 'encharge_power': None, + 'enpower': None, + 'inverters': dict({ + '1': dict({ + '__type': "", + 'repr': "EnvoyInverter(serial_number='1', last_report_date=1, last_report_watts=1, max_report_watts=1)", + }), + }), + 'system_consumption': None, + 'system_consumption_phases': None, + 'system_production': dict({ + '__type': "", + 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', + }), + 'system_production_phases': None, + 'tariff': None, + }), + 'envoy_properties': dict({ + 'active_phasecount': 0, + 'ct_consumption_meter': None, + 'ct_count': 0, + 'ct_production_meter': None, + 'ct_storage_meter': None, + 'envoy_firmware': '7.6.175', + 'envoy_model': 'Envoy', + 'part_number': '123456789', + 'phase_count': 1, + 'phase_mode': None, + 'supported_features': list([ + 'INVERTERS', + 'PRODUCTION', + ]), + }), + 'fixtures': dict({ + 'Error': "EnvoyError('Test')", + }), 'raw_data': dict({ 'varies_by': 'firmware_version', }), diff --git a/tests/components/enphase_envoy/snapshots/test_number.ambr b/tests/components/enphase_envoy/snapshots/test_number.ambr new file mode 100644 index 00000000000..6310911c27e --- /dev/null +++ b/tests/components/enphase_envoy/snapshots/test_number.ambr @@ -0,0 +1,394 @@ +# serializer version: 1 +# name: test_number[envoy_metered_batt_relay][number.enpower_654321_reserve_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.enpower_654321_reserve_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reserve battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_soc', + 'unique_id': '654321_reserve_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.enpower_654321_reserve_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Enpower 654321 Reserve battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.enpower_654321_reserve_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_cutoff_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc1_fixture_cutoff_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cutoff battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cutoff_battery_level', + 'unique_id': '654321_relay_NC1_soc_low', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_cutoff_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC1 Fixture Cutoff battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc1_fixture_cutoff_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_restore_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc1_fixture_restore_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restore battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'restore_battery_level', + 'unique_id': '654321_relay_NC1_soc_high', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_restore_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC1 Fixture Restore battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc1_fixture_restore_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_cutoff_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc2_fixture_cutoff_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cutoff battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cutoff_battery_level', + 'unique_id': '654321_relay_NC2_soc_low', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_cutoff_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC2 Fixture Cutoff battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc2_fixture_cutoff_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_restore_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc2_fixture_restore_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restore battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'restore_battery_level', + 'unique_id': '654321_relay_NC2_soc_high', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_restore_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC2 Fixture Restore battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc2_fixture_restore_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_cutoff_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc3_fixture_cutoff_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cutoff battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cutoff_battery_level', + 'unique_id': '654321_relay_NC3_soc_low', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_cutoff_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC3 Fixture Cutoff battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc3_fixture_cutoff_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_restore_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc3_fixture_restore_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restore battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'restore_battery_level', + 'unique_id': '654321_relay_NC3_soc_high', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_restore_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC3 Fixture Restore battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc3_fixture_restore_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70.0', + }) +# --- diff --git a/tests/components/enphase_envoy/snapshots/test_select.ambr b/tests/components/enphase_envoy/snapshots/test_select.ambr new file mode 100644 index 00000000000..10f15820ac4 --- /dev/null +++ b/tests/components/enphase_envoy/snapshots/test_select.ambr @@ -0,0 +1,754 @@ +# serializer version: 1 +# name: test_select[envoy_metered_batt_relay][select.enpower_654321_storage_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'backup', + 'self_consumption', + 'savings', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.enpower_654321_storage_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Storage mode', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_mode', + 'unique_id': '654321_storage_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.enpower_654321_storage_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Enpower 654321 Storage mode', + 'options': list([ + 'backup', + 'self_consumption', + 'savings', + ]), + }), + 'context': , + 'entity_id': 'select.enpower_654321_storage_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'self_consumption', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_generator_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc1_fixture_generator_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Generator action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_generator_action', + 'unique_id': '654321_relay_NC1_generator_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_generator_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture Generator action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc1_fixture_generator_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_grid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc1_fixture_grid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_grid_action', + 'unique_id': '654321_relay_NC1_grid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_grid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture Grid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc1_fixture_grid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_microgrid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc1_fixture_microgrid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Microgrid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_microgrid_action', + 'unique_id': '654321_relay_NC1_microgrid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_microgrid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture Microgrid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc1_fixture_microgrid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc1_fixture_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_mode', + 'unique_id': '654321_relay_NC1_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture Mode', + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'context': , + 'entity_id': 'select.nc1_fixture_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'standard', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_generator_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc2_fixture_generator_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Generator action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_generator_action', + 'unique_id': '654321_relay_NC2_generator_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_generator_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture Generator action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc2_fixture_generator_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_grid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc2_fixture_grid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_grid_action', + 'unique_id': '654321_relay_NC2_grid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_grid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture Grid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc2_fixture_grid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_microgrid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc2_fixture_microgrid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Microgrid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_microgrid_action', + 'unique_id': '654321_relay_NC2_microgrid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_microgrid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture Microgrid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc2_fixture_microgrid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc2_fixture_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_mode', + 'unique_id': '654321_relay_NC2_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture Mode', + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'context': , + 'entity_id': 'select.nc2_fixture_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'standard', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_generator_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc3_fixture_generator_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Generator action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_generator_action', + 'unique_id': '654321_relay_NC3_generator_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_generator_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture Generator action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc3_fixture_generator_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_grid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc3_fixture_grid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_grid_action', + 'unique_id': '654321_relay_NC3_grid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_grid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture Grid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc3_fixture_grid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_microgrid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc3_fixture_microgrid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Microgrid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_microgrid_action', + 'unique_id': '654321_relay_NC3_microgrid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_microgrid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture Microgrid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc3_fixture_microgrid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc3_fixture_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_mode', + 'unique_id': '654321_relay_NC3_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture Mode', + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'context': , + 'entity_id': 'select.nc3_fixture_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'standard', + }) +# --- diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index e403886b096..dde6a6add41 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -1,3429 +1,46 @@ # serializer version: 1 -# name: test_sensor - list([ - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production', - 'unique_id': '1234_production', - 'unit_of_measurement': , +# name: test_sensor[envoy][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production', - 'unique_id': '1234_daily_production', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production', - 'unique_id': '1234_seven_days_production', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production', - 'unique_id': '1234_lifetime_production', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '1234_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '1234_daily_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '1234_seven_days_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '1234_lifetime_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '1234_lifetime_net_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '1234_lifetime_net_production', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '1234_net_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '1234_frequency', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '1234_voltage', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '1234_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '1234_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '1234_production_ct_metering_status', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '1234_production_ct_status_flags', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged', - 'unique_id': '1234_lifetime_battery_discharged', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged', - 'unique_id': '1234_lifetime_battery_charged', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge', - 'unique_id': '1234_battery_discharge', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage', - 'unique_id': '1234_storage_voltage', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status', - 'unique_id': '1234_storage_ct_metering_status', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags', - 'unique_id': '1234_storage_ct_status_flags', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '1234_lifetime_battery_discharged_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '1234_lifetime_battery_charged_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '1234_battery_discharge_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '1234_storage_voltage_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '1234_storage_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '1234_storage_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '1234_lifetime_battery_discharged_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '1234_lifetime_battery_charged_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '1234_battery_discharge_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '1234_storage_voltage_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '1234_storage_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '1234_storage_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '1234_lifetime_battery_discharged_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '1234_lifetime_battery_charged_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '1234_battery_discharge_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '1234_storage_voltage_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '1234_storage_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '1234_storage_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '1_last_reported', - 'unit_of_measurement': None, - }), - ]) -# --- -# name: test_sensor[sensor.envoy_1234_current_battery_discharge-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current battery discharge', - 'icon': 'mdi:flash', + 'area_id': None, + 'capabilities': dict({ 'state_class': , - 'unit_of_measurement': , }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_battery_discharge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.103', + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.envoy_1234_current_battery_discharge_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_battery_discharge_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_battery_discharge_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.101', - }) -# --- -# name: test_sensor[sensor.envoy_1234_current_net_power_consumption_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_net_power_consumption_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_net_power_consumption_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[sensor.envoy_1234_current_power_consumption_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_consumption_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_consumption_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_production-state] +# name: test_sensor[envoy][sensor.envoy_1234_current_power_production-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -3440,67 +57,46 @@ 'state': '1.234', }) # --- -# name: test_sensor[sensor.envoy_1234_current_power_production_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_production_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_production_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': , +# name: test_sensor[envoy][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_today_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_today_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_today_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days-state] +# name: test_sensor[envoy][sensor.envoy_1234_energy_production_last_seven_days-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -3516,16 +112,48 @@ 'state': '1.234', }) # --- -# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days_l1-state] - None +# name: test_sensor[envoy][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) # --- -# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_today-state] +# name: test_sensor[envoy][sensor.envoy_1234_energy_production_today-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -3542,106 +170,48 @@ 'state': '1.234', }) # --- -# name: test_sensor[sensor.envoy_1234_energy_production_today_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_today_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_today_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy charged', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , +# name: test_sensor[envoy][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.032345', + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.031234', - }) -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_production-state] +# name: test_sensor[envoy][sensor.envoy_1234_lifetime_energy_production-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -3658,164 +228,42 @@ 'state': '0.001234', }) # --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_production_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_production_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_production_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , +# name: test_sensor[envoy][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.021234', + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.022345', - }) -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_production_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_production_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_production_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_production_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_storage_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_storage_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_storage_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_storage_ct_l3-state] - None -# --- -# name: test_sensor[sensor.inverter_1-state] +# name: test_sensor[envoy][sensor.inverter_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -3832,6 +280,11330 @@ 'state': '1', }) # --- -# name: test_sensor[sensor.inverter_1_last_reported-state] - None +# name: test_sensor[envoy][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022345', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_apparent_power_mva', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Encharge 123456 Apparent power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Encharge 123456 Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '123456_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Encharge 123456 Last reported', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-09-26T23:04:07+00:00', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_real_power_mw', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Encharge 123456 Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Encharge 123456 Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '29', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.enpower_654321_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '654321_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Enpower 654321 Last reported', + }), + 'context': , + 'entity_id': 'sensor.enpower_654321_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-09-26T23:04:07+00:00', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.enpower_654321_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '654321_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Enpower 654321 Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.enpower_654321_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_available_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Available battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'available_energy', + 'unique_id': '1234_available_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_available_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Available battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '525', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Battery', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery capacity', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_capacity', + 'unique_id': '1234_max_capacity', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Battery capacity', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3500', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge', + 'unique_id': '1234_battery_discharge', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current battery discharge', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_battery_discharge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.103', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '1234_battery_discharge_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current battery discharge l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '1234_battery_discharge_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current battery discharge l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.033', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '1234_battery_discharge_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current battery discharge l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.053', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.031', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.051', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.324', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.324', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.324', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l1', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l2', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l3', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.323', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.323', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.323', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l1', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.231', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l2', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.231', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l3', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.231', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.233', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.233', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.233', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged', + 'unique_id': '1234_lifetime_battery_charged', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy charged', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.032345', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '1234_lifetime_battery_charged_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.323451', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '1234_lifetime_battery_charged_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.323452', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '1234_lifetime_battery_charged_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.323453', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged', + 'unique_id': '1234_lifetime_battery_discharged', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.031234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '1234_lifetime_battery_discharged_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.312341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '1234_lifetime_battery_discharged_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.312342', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '1234_lifetime_battery_discharged_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.312343', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001322', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002322', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.003322', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001232', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002232', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.003232', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212342', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212343', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022345', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223451', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223452', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223453', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags', + 'unique_id': '1234_storage_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active storage CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '1234_storage_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '1234_storage_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '1234_storage_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status', + 'unique_id': '1234_storage_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status storage CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '1234_storage_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status storage CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '1234_storage_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status storage CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '1234_storage_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status storage CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_energy', + 'unique_id': '1234_reserve_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Reserve battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '526', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_soc', + 'unique_id': '1234_reserve_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Reserve battery level', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage', + 'unique_id': '1234_storage_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage storage CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '113', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '1234_storage_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage storage CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '113', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '1234_storage_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage storage CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '1234_storage_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage storage CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.031', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.051', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.324', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.324', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.324', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l1', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l2', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l3', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.323', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.323', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.323', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l1', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.231', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l2', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.231', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l3', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.231', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.233', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.233', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.233', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001322', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002322', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.003322', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001232', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002232', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.003232', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212341', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212342', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212343', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022345', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223451', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223452', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223453', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) # --- diff --git a/tests/components/enphase_envoy/snapshots/test_switch.ambr b/tests/components/enphase_envoy/snapshots/test_switch.ambr new file mode 100644 index 00000000000..a5dafd735b5 --- /dev/null +++ b/tests/components/enphase_envoy/snapshots/test_switch.ambr @@ -0,0 +1,231 @@ +# serializer version: 1 +# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_charge_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.enpower_654321_charge_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charge from grid', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_from_grid', + 'unique_id': '654321_charge_from_grid', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_charge_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Enpower 654321 Charge from grid', + }), + 'context': , + 'entity_id': 'switch.enpower_654321_charge_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_grid_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.enpower_654321_grid_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid enabled', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_enabled', + 'unique_id': '654321_mains_admin_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_grid_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Enpower 654321 Grid enabled', + }), + 'context': , + 'entity_id': 'switch.enpower_654321_grid_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc1_fixture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.nc1_fixture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '654321_relay_NC1_relay_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc1_fixture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture', + }), + 'context': , + 'entity_id': 'switch.nc1_fixture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc2_fixture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.nc2_fixture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '654321_relay_NC2_relay_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc2_fixture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture', + }), + 'context': , + 'entity_id': 'switch.nc2_fixture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc3_fixture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.nc3_fixture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '654321_relay_NC3_relay_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc3_fixture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture', + }), + 'context': , + 'entity_id': 'switch.nc3_fixture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/enphase_envoy/test_binary_sensor.py b/tests/components/enphase_envoy/test_binary_sensor.py new file mode 100644 index 00000000000..883df4be6fc --- /dev/null +++ b/tests/components/enphase_envoy/test_binary_sensor.py @@ -0,0 +1,89 @@ +"""Test Enphase Envoy binary sensors.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.const import STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensor( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test binary sensor platform entities against snapshot.""" + with patch( + "homeassistant.components.enphase_envoy.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_no_binary_sensor( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch platform entities are not created.""" + with patch( + "homeassistant.components.enphase_envoy.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, config_entry) + assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_binary_sensor_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test binary sensor entities values and names.""" + with patch( + "homeassistant.components.enphase_envoy.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + entity_base = f"{Platform.BINARY_SENSOR}.enpower" + + assert (entity_state := hass.states.get(f"{entity_base}_{sn}_communicating")) + assert entity_state.state == STATE_ON + assert (entity_state := hass.states.get(f"{entity_base}_{sn}_grid_status")) + assert entity_state.state == STATE_ON + + entity_base = f"{Platform.BINARY_SENSOR}.encharge" + + for sn in mock_envoy.data.encharge_inventory: + assert (entity_state := hass.states.get(f"{entity_base}_{sn}_communicating")) + assert entity_state.state == STATE_ON + assert (entity_state := hass.states.get(f"{entity_base}_{sn}_dc_switch")) + assert entity_state.state == STATE_ON diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index 7e1808ffa52..c2cc02fcc7c 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -6,173 +6,144 @@ from unittest.mock import AsyncMock from pyenphase import EnvoyAuthenticationError, EnvoyError import pytest -from syrupy.assertion import SnapshotAssertion -from homeassistant import config_entries from homeassistant.components import zeroconf -from homeassistant.components.enphase_envoy.const import DOMAIN, PLATFORMS +from homeassistant.components.enphase_envoy.const import ( + DOMAIN, + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, +) +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_USER, + SOURCE_ZEROCONF, +) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_integration + from tests.common import MockConfigEntry _LOGGER = logging.getLogger(__name__) -async def test_form(hass: HomeAssistant, config, setup_enphase_envoy) -> None: +async def test_form( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Envoy 1234" - assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy 1234", - "username": "test-username", - "password": "test-password", + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Envoy 1234" + assert result["data"] == { + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", } -@pytest.mark.parametrize("serial_number", [None]) async def test_user_no_serial_number( - hass: HomeAssistant, config, setup_enphase_envoy + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test user setup without a serial number.""" + mock_envoy.serial_number = None result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Envoy" - assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy", - "username": "test-username", - "password": "test-password", + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Envoy" + assert result["data"] == { + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", } -@pytest.mark.parametrize("serial_number", [None]) -async def test_user_fetching_serial_fails( - hass: HomeAssistant, setup_enphase_envoy +async def test_form_invalid_auth( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: - """Test user setup without a serial number.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", - }, - ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Envoy" - assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy", - "username": "test-username", - "password": "test-password", - } - - -@pytest.mark.parametrize( - "mock_authenticate", - [ - AsyncMock(side_effect=EnvoyAuthenticationError("test")), - ], -) -async def test_form_invalid_auth(hass: HomeAssistant, setup_enphase_envoy) -> None: """Test we handle invalid auth.""" + mock_envoy.authenticate.side_effect = EnvoyAuthenticationError( + "fail authentication" + ) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "invalid_auth"} @pytest.mark.parametrize( - "mock_setup", - [AsyncMock(side_effect=EnvoyError)], + ("exception", "error"), + [ + (EnvoyError, "cannot_connect"), + (ValueError, "unknown"), + ], ) -async def test_form_cannot_connect(hass: HomeAssistant, setup_enphase_envoy) -> None: +async def test_form_cannot_connect( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, + exception: Exception, + error: str, +) -> None: """Test we handle cannot connect error.""" + mock_envoy.setup.side_effect = exception result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - -@pytest.mark.parametrize( - "mock_setup", - [AsyncMock(side_effect=ValueError)], -) -async def test_form_unknown_error(hass: HomeAssistant, setup_enphase_envoy) -> None: - """Test we handle unknown error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", - }, - ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "unknown"} + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} def _get_schema_default(schema, key_name): @@ -184,12 +155,14 @@ def _get_schema_default(schema, key_name): async def test_zeroconf_pre_token_firmware( - hass: HomeAssistant, setup_enphase_envoy + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -203,35 +176,38 @@ async def test_zeroconf_pre_token_firmware( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert _get_schema_default(result["data_schema"].schema, "username") == "installer" + assert ( + _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "installer" + ) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Envoy 1234" - assert result2["result"].unique_id == "1234" - assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy 1234", - "username": "test-username", - "password": "test-password", + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Envoy 1234" + assert result["result"].unique_id == "1234" + assert result["data"] == { + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", } async def test_zeroconf_token_firmware( - hass: HomeAssistant, setup_enphase_envoy + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -244,102 +220,101 @@ async def test_zeroconf_token_firmware( ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert _get_schema_default(result["data_schema"].schema, "username") == "" + assert _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Envoy 1234" assert result2["result"].unique_id == "1234" assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy 1234", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", } -@pytest.mark.parametrize( - "mock_authenticate", - [ - AsyncMock( - side_effect=[ - None, - EnvoyAuthenticationError("fail authentication"), - None, - ] - ), - ], -) async def test_form_host_already_exists( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test changing credentials for existing host.""" + config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} # existing config - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" + + mock_envoy.authenticate.side_effect = EnvoyAuthenticationError( + "fail authentication" + ) # mock failing authentication on first try - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "wrong-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "wrong-password", }, ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_auth"} + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + + mock_envoy.authenticate.side_effect = None # still original config after failure - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" # mock successful authentication and update of credentials - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "changed-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "changed-password", }, ) await hass.async_block_till_done() - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" # updated config with new ip and changed pw - assert config_entry.data["host"] == "1.1.1.2" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "changed-password" + assert config_entry.data[CONF_HOST] == "1.1.1.2" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "changed-password" async def test_zeroconf_serial_already_exists( hass: HomeAssistant, - config_entry, - setup_enphase_envoy, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, caplog: pytest.LogCaptureFixture, ) -> None: """Test serial number already exists from zeroconf.""" _LOGGER.setLevel(logging.DEBUG) + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("4.4.4.4"), ip_addresses=[ip_address("4.4.4.4")], @@ -350,21 +325,24 @@ async def test_zeroconf_serial_already_exists( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data["host"] == "4.4.4.4" + assert config_entry.data[CONF_HOST] == "4.4.4.4" assert "Zeroconf ip 4 processing 4.4.4.4, current hosts: {'1.1.1.1'}" in caplog.text async def test_zeroconf_serial_already_exists_ignores_ipv6( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test serial number already exists from zeroconf but the discovery is ipv6.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("fd00::b27c:63bb:cc85:4ea0"), ip_addresses=[ip_address("fd00::b27c:63bb:cc85:4ea0")], @@ -379,17 +357,21 @@ async def test_zeroconf_serial_already_exists_ignores_ipv6( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "not_ipv4_address" - assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.1" -@pytest.mark.parametrize("serial_number", [None]) async def test_zeroconf_host_already_exists( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test hosts already exists from zeroconf.""" + mock_envoy.serial_number = None + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -400,7 +382,6 @@ async def test_zeroconf_host_already_exists( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" @@ -409,17 +390,21 @@ async def test_zeroconf_host_already_exists( async def test_zero_conf_while_form( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test zeroconf while form is active.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -430,26 +415,29 @@ async def test_zero_conf_while_form( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.1" assert config_entry.unique_id == "1234" assert config_entry.title == "Envoy 1234" async def test_zero_conf_second_envoy_while_form( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test zeroconf while form is active.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM result2 = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("4.4.4.4"), ip_addresses=[ip_address("4.4.4.4")], @@ -460,50 +448,51 @@ async def test_zero_conf_second_envoy_while_form( type="mock_type", ), ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert config_entry.data["host"] == "1.1.1.1" + assert result["type"] is FlowResultType.FORM + assert config_entry.data[CONF_HOST] == "1.1.1.1" assert config_entry.unique_id == "1234" assert config_entry.title == "Envoy 1234" - result3 = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result2["flow_id"], { - "host": "4.4.4.4", - "username": "test-username", - "password": "test-password", + CONF_HOST: "4.4.4.4", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Envoy 4321" - assert result3["result"].unique_id == "4321" + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Envoy 4321" + assert result2["result"].unique_id == "4321" result4 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result4["type"] is FlowResultType.ABORT async def test_zero_conf_malformed_serial_property( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test malformed zeroconf properties.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM with pytest.raises(KeyError) as ex: await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -516,30 +505,33 @@ async def test_zero_conf_malformed_serial_property( ) assert "serialnum" in str(ex.value) - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.ABORT + assert result["type"] is FlowResultType.ABORT async def test_zero_conf_malformed_serial( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test malformed zeroconf properties.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - result2 = await hass.config_entries.flow.async_init( + result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -550,34 +542,36 @@ async def test_zero_conf_malformed_serial( type="mock_type", ), ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM + assert result["type"] is FlowResultType.FORM - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result = await hass.config_entries.flow.async_configure( + result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Envoy 12%4" + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Envoy 12%4" async def test_zero_conf_malformed_fw_property( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test malformed zeroconf property.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -588,25 +582,26 @@ async def test_zero_conf_malformed_fw_property( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.1" assert config_entry.unique_id == "1234" assert config_entry.title == "Envoy 1234" async def test_zero_conf_old_blank_entry( - hass: HomeAssistant, setup_enphase_envoy + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test re-using old blank entry.""" entry = MockConfigEntry( domain=DOMAIN, data={ - "host": "1.1.1.1", - "username": "", - "password": "", - "name": "unknown", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_NAME: "unknown", }, unique_id=None, title="Envoy", @@ -614,7 +609,7 @@ async def test_zero_conf_old_blank_entry( entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1"), ip_address("1.1.1.2")], @@ -625,21 +620,26 @@ async def test_zero_conf_old_blank_entry( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert entry.data["host"] == "1.1.1.1" + assert entry.data[CONF_HOST] == "1.1.1.1" assert entry.unique_id == "1234" assert entry.title == "Envoy 1234" -async def test_reauth(hass: HomeAssistant, config_entry, setup_enphase_envoy) -> None: +async def test_reauth( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: """Test we reauth auth.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_REAUTH, + "source": SOURCE_REAUTH, "unique_id": config_entry.unique_id, "entry_id": config_entry.entry_id, }, @@ -647,23 +647,69 @@ async def test_reauth(hass: HomeAssistant, config_entry, setup_enphase_envoy) -> result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", - "password": "test-password", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" +async def test_options_default( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: + """Test we can configure options.""" + await setup_integration(hass, config_entry) + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert config_entry.options == { + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE + } + + +async def test_options_set( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: + """Test we can configure options.""" + await setup_integration(hass, config_entry) + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert config_entry.options == {OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True} + + async def test_reconfigure( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we can reconfiger the entry.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -672,36 +718,40 @@ async def test_reconfigure( assert result["errors"] == {} # original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username2", - "password": "test-password2", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username2", + CONF_PASSWORD: "test-password2", }, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # changed entry - assert config_entry.data["host"] == "1.1.1.2" - assert config_entry.data["username"] == "test-username2" - assert config_entry.data["password"] == "test-password2" + assert config_entry.data[CONF_HOST] == "1.1.1.2" + assert config_entry.data[CONF_USERNAME] == "test-username2" + assert config_entry.data[CONF_PASSWORD] == "test-password2" async def test_reconfigure_nochange( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we get the reconfigure form and apply nochange.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -710,36 +760,40 @@ async def test_reconfigure_nochange( assert result["errors"] == {} # original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # unchanged original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" async def test_reconfigure_otherenvoy( - hass: HomeAssistant, config_entry, setup_enphase_envoy, mock_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test entering ip of other envoy and prevent changing it based on serial.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -750,67 +804,67 @@ async def test_reconfigure_otherenvoy( # let mock return different serial from first time, sim it's other one on changed ip mock_envoy.serial_number = "45678" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "new-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "new-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "unexpected_envoy"} + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unexpected_envoy"} # entry should still be original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" # set serial back to original to finsich flow mock_envoy.serial_number = "1234" - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result = await hass.config_entries.flow.async_configure( + result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "new-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "new-password", }, ) - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # updated original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "new-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "new-password" @pytest.mark.parametrize( - "mock_authenticate", + ("exception", "error"), [ - AsyncMock( - side_effect=[ - None, - EnvoyAuthenticationError("fail authentication"), - EnvoyError("cannot_connect"), - Exception("Unexpected exception"), - None, - ] - ), + (EnvoyAuthenticationError("fail authentication"), "invalid_auth"), + (EnvoyError, "cannot_connect"), + (Exception, "unknown"), ], ) async def test_reconfigure_auth_failure( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, + exception: Exception, + error: str, ) -> None: """Test changing credentials for existing host with auth failure.""" + await setup_integration(hass, config_entry) + result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -818,84 +872,51 @@ async def test_reconfigure_auth_failure( assert result["errors"] == {} # existing config - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" + + mock_envoy.authenticate.side_effect = exception # mock failing authentication on first try - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "wrong-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "wrong-password", }, ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_auth"} - - # still original config after failure - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" - - # mock failing authentication on first try - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.2", - "username": "new-username", - "password": "wrong-password", - }, - ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - # still original config after failure - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" - - # mock failing authentication on first try - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.2", - "username": "other-username", - "password": "test-password", - }, - ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "unknown"} - - # still original config after failure - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + mock_envoy.authenticate.side_effect = None # mock successful authentication and update of credentials - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "changed-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "changed-password", }, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # updated config with new ip and changed pw - assert config_entry.data["host"] == "1.1.1.2" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "changed-password" + assert config_entry.data[CONF_HOST] == "1.1.1.2" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "changed-password" async def test_reconfigure_change_ip_to_existing( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test reconfiguration to existing entry with same ip does not harm existing one.""" + await setup_integration(hass, config_entry) other_entry = MockConfigEntry( domain=DOMAIN, entry_id="65432155aaddb2007c5f6602e0c38e72", @@ -911,14 +932,14 @@ async def test_reconfigure_change_ip_to_existing( other_entry.add_to_hass(hass) # original other entry - assert other_entry.data["host"] == "1.1.1.2" - assert other_entry.data["username"] == "other-username" - assert other_entry.data["password"] == "other-password" + assert other_entry.data[CONF_HOST] == "1.1.1.2" + assert other_entry.data[CONF_USERNAME] == "other-username" + assert other_entry.data[CONF_PASSWORD] == "other-password" result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -927,33 +948,27 @@ async def test_reconfigure_change_ip_to_existing( assert result["errors"] == {} # original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "test-password2", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password2", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # updated entry - assert config_entry.data["host"] == "1.1.1.2" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password2" + assert config_entry.data[CONF_HOST] == "1.1.1.2" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password2" # unchanged other entry - assert other_entry.data["host"] == "1.1.1.2" - assert other_entry.data["username"] == "other-username" - assert other_entry.data["password"] == "other-password" - - -async def test_platforms(snapshot: SnapshotAssertion) -> None: - """Test if platform list changed and requires more tests.""" - assert snapshot == PLATFORMS + assert other_entry.data[CONF_HOST] == "1.1.1.2" + assert other_entry.data[CONF_USERNAME] == "other-username" + assert other_entry.data[CONF_PASSWORD] == "other-password" diff --git a/tests/components/enphase_envoy/test_diagnostics.py b/tests/components/enphase_envoy/test_diagnostics.py index a3b4f8e0f3c..186ee5c46f3 100644 --- a/tests/components/enphase_envoy/test_diagnostics.py +++ b/tests/components/enphase_envoy/test_diagnostics.py @@ -1,10 +1,20 @@ """Test Enphase Envoy diagnostics.""" -from syrupy import SnapshotAssertion +from unittest.mock import AsyncMock -from homeassistant.config_entries import ConfigEntry +from pyenphase.exceptions import EnvoyError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.enphase_envoy.const import ( + DOMAIN, + OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, +) from homeassistant.core import HomeAssistant +from . import setup_integration + +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -16,6 +26,8 @@ TO_EXCLUDE = { "last_updated", "last_changed", "last_reported", + "created_at", + "modified_at", } @@ -26,12 +38,55 @@ def limit_diagnostic_attrs(prop, path) -> bool: async def test_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, - setup_enphase_envoy, + mock_envoy: AsyncMock, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" + await setup_integration(hass, config_entry) assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) == snapshot(exclude=limit_diagnostic_attrs) + + +@pytest.fixture(name="config_entry_options") +def config_entry_options_fixture(hass: HomeAssistant, config: dict[str, str]): + """Define a config entry fixture.""" + return MockConfigEntry( + domain=DOMAIN, + entry_id="45a36e55aaddb2007c5f6602e0c38e72", + title="Envoy 1234", + unique_id="1234", + data=config, + options={OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True}, + ) + + +async def test_entry_diagnostics_with_fixtures( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + config_entry_options: MockConfigEntry, + mock_envoy: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, config_entry_options) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry_options + ) == snapshot(exclude=limit_diagnostic_attrs) + + +async def test_entry_diagnostics_with_fixtures_with_error( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + config_entry_options: MockConfigEntry, + snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, config_entry_options) + mock_envoy.request.side_effect = EnvoyError("Test") + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry_options + ) == snapshot(exclude=limit_diagnostic_attrs) diff --git a/tests/components/enphase_envoy/test_number.py b/tests/components/enphase_envoy/test_number.py new file mode 100644 index 00000000000..dac51ed5e26 --- /dev/null +++ b/tests/components/enphase_envoy/test_number.py @@ -0,0 +1,153 @@ +"""Test Enphase Envoy number sensors.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test number platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_no_number( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test number platform entities are not created.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, config_entry) + assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_number_operation_storage( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy number storage entities operation.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + test_entity = f"{Platform.NUMBER}.enpower_{sn}_reserve_battery_level" + + assert (entity_state := hass.states.get(test_entity)) + assert mock_envoy.data.tariff.storage_settings.reserved_soc == float( + entity_state.state + ) + test_value = 30.0 + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: test_entity, + ATTR_VALUE: test_value, + }, + blocking=True, + ) + + mock_envoy.set_reserve_soc.assert_awaited_once_with(test_value) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_number_operation_relays( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy number relay entities operation.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.NUMBER}." + + for counter, (contact_id, dry_contact) in enumerate( + mock_envoy.data.dry_contact_settings.items() + ): + name = dry_contact.load_name.lower().replace(" ", "_") + test_entity = f"{entity_base}{name}_cutoff_battery_level" + assert (entity_state := hass.states.get(test_entity)) + assert mock_envoy.data.dry_contact_settings[contact_id].soc_low == float( + entity_state.state + ) + test_value = 10.0 + counter + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: test_entity, + ATTR_VALUE: test_value, + }, + blocking=True, + ) + + mock_envoy.update_dry_contact.assert_awaited_once_with( + {"id": contact_id, "soc_low": test_value} + ) + mock_envoy.update_dry_contact.reset_mock() + + test_entity = f"{entity_base}{name}_restore_battery_level" + assert (entity_state := hass.states.get(test_entity)) + assert mock_envoy.data.dry_contact_settings[contact_id].soc_high == float( + entity_state.state + ) + test_value = 80.0 - counter + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: test_entity, + ATTR_VALUE: test_value, + }, + blocking=True, + ) + + mock_envoy.update_dry_contact.assert_awaited_once_with( + {"id": contact_id, "soc_high": test_value} + ) + mock_envoy.update_dry_contact.reset_mock() diff --git a/tests/components/enphase_envoy/test_select.py b/tests/components/enphase_envoy/test_select.py new file mode 100644 index 00000000000..38640f53dea --- /dev/null +++ b/tests/components/enphase_envoy/test_select.py @@ -0,0 +1,221 @@ +"""Test Enphase Envoy select.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.components.enphase_envoy.select import ( + ACTION_OPTIONS, + MODE_OPTIONS, + RELAY_ACTION_MAP, + RELAY_MODE_MAP, + REVERSE_RELAY_ACTION_MAP, + REVERSE_RELAY_MODE_MAP, + REVERSE_STORAGE_MODE_MAP, + STORAGE_MODE_MAP, + STORAGE_MODE_OPTIONS, +) +from homeassistant.components.select import ATTR_OPTION, DOMAIN as SELECT_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_SELECT_OPTION +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test select platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_no_select( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test select platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_select_relay_actions( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test select platform entities dry contact relay actions.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.SELECT}." + + for contact_id, dry_contact in mock_envoy.data.dry_contact_settings.items(): + name = dry_contact.load_name.lower().replace(" ", "_") + for target in ( + ("generator_action", dry_contact.generator_action, "generator_action"), + ("microgrid_action", dry_contact.micro_grid_action, "micro_grid_action"), + ("grid_action", dry_contact.grid_action, "grid_action"), + ): + test_entity = f"{entity_base}{name}_{target[0]}" + assert (entity_state := hass.states.get(test_entity)) + assert RELAY_ACTION_MAP[target[1]] == (current_state := entity_state.state) + # set all relay modes except current mode + for action in [action for action in ACTION_OPTIONS if not current_state]: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: action, + }, + blocking=True, + ) + mock_envoy.update_dry_contact.assert_called_once_with( + {"id": contact_id, target[2]: REVERSE_RELAY_ACTION_MAP[action]} + ) + mock_envoy.update_dry_contact.reset_mock() + # and finally back to original + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: current_state, + }, + blocking=True, + ) + mock_envoy.update_dry_contact.assert_called_once_with( + {"id": contact_id, target[2]: REVERSE_RELAY_ACTION_MAP[current_state]} + ) + mock_envoy.update_dry_contact.reset_mock() + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_select_relay_modes( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test select platform dry contact relay mode changes.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.SELECT}." + + for contact_id, dry_contact in mock_envoy.data.dry_contact_settings.items(): + name = dry_contact.load_name.lower().replace(" ", "_") + test_entity = f"{entity_base}{name}_mode" + assert (entity_state := hass.states.get(test_entity)) + assert RELAY_MODE_MAP[dry_contact.mode] == (current_state := entity_state.state) + for mode in [mode for mode in MODE_OPTIONS if not current_state]: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: mode, + }, + blocking=True, + ) + mock_envoy.update_dry_contact.assert_called_once_with( + {"id": contact_id, "mode": REVERSE_RELAY_MODE_MAP[mode]} + ) + mock_envoy.update_dry_contact.reset_mock() + + # and finally current mode again + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: current_state, + }, + blocking=True, + ) + mock_envoy.update_dry_contact.assert_called_once_with( + {"id": contact_id, "mode": REVERSE_RELAY_MODE_MAP[current_state]} + ) + mock_envoy.update_dry_contact.reset_mock() + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_select_storage_modes( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test select platform entities storage mode changes.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + test_entity = f"{Platform.SELECT}.enpower_{sn}_storage_mode" + + assert (entity_state := hass.states.get(test_entity)) + assert STORAGE_MODE_MAP[mock_envoy.data.tariff.storage_settings.mode] == ( + current_state := entity_state.state + ) + + for mode in [mode for mode in STORAGE_MODE_OPTIONS if not current_state]: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: mode, + }, + blocking=True, + ) + mock_envoy.set_storage_mode.assert_called_once_with( + REVERSE_STORAGE_MODE_MAP[mode] + ) + mock_envoy.set_storage_mode.reset_mock() + + # and finally with original mode + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: current_state, + }, + blocking=True, + ) + mock_envoy.set_storage_mode.assert_called_once_with( + REVERSE_STORAGE_MODE_MAP[current_state] + ) diff --git a/tests/components/enphase_envoy/test_sensor.py b/tests/components/enphase_envoy/test_sensor.py index 13727e29eac..273f81173ff 100644 --- a/tests/components/enphase_envoy/test_sensor.py +++ b/tests/components/enphase_envoy/test_sensor.py @@ -1,58 +1,914 @@ """Test Enphase Envoy sensors.""" -from unittest.mock import patch +from itertools import chain +from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory +from pyenphase.const import PHASENAMES import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.enphase_envoy import DOMAIN from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.components.enphase_envoy.coordinator import SCAN_INTERVAL +from homeassistant.const import STATE_UNKNOWN, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util +from homeassistant.util.unit_conversion import TemperatureConverter -from tests.common import MockConfigEntry - - -@pytest.fixture(name="setup_enphase_envoy_sensor") -async def setup_enphase_envoy_sensor_fixture(hass, config, mock_envoy): - """Define a fixture to set up Enphase Envoy with sensor platform only.""" - with ( - patch( - "homeassistant.components.enphase_envoy.config_flow.Envoy", - return_value=mock_envoy, - ), - patch( - "homeassistant.components.enphase_envoy.Envoy", - return_value=mock_envoy, - ), - patch( - "homeassistant.components.enphase_envoy.PLATFORMS", - [Platform.SENSOR], - ), - ): - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - yield +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry: MockConfigEntry, snapshot: SnapshotAssertion, - setup_enphase_envoy_sensor, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, ) -> None: - """Test enphase_envoy sensor entities.""" - # compare registered entities against snapshot of prior run - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - assert entity_entries - assert entity_entries == snapshot + """Test sensor platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - # Test if all entities still have same state - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" + +PRODUCTION_NAMES: tuple[str, ...] = ( + "current_power_production", + "energy_production_today", + "energy_production_last_seven_days", + "lifetime_energy_production", +) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_production_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test production entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.system_production + PRODUCTION_TARGETS: tuple[float, ...] = ( + data.watts_now / 1000.0, + data.watt_hours_today / 1000.0, + data.watt_hours_last_7_days / 1000.0, + data.watt_hours_lifetime / 1000000.0, + ) + + for name, target in list(zip(PRODUCTION_NAMES, PRODUCTION_TARGETS, strict=False)): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + +PRODUCTION_PHASE_NAMES: list[str] = [ + f"{name}_{phase.lower()}" for phase in PHASENAMES for name in PRODUCTION_NAMES +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_production_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test production phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + PRODUCTION_PHASE_TARGET = chain( + *[ + ( + phase_data.watts_now / 1000.0, + phase_data.watt_hours_today / 1000.0, + phase_data.watt_hours_last_7_days / 1000.0, + phase_data.watt_hours_lifetime / 1000000.0, + ) + for phase_data in mock_envoy.data.system_production_phases.values() + ] + ) + + for name, target in list( + zip(PRODUCTION_PHASE_NAMES, PRODUCTION_PHASE_TARGET, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + +CONSUMPTION_NAMES: tuple[str, ...] = ( + "current_power_consumption", + "energy_consumption_today", + "energy_consumption_last_seven_days", + "lifetime_energy_consumption", +) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_consumption_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.system_consumption + CONSUMPTION_TARGETS = ( + data.watts_now / 1000.0, + data.watt_hours_today / 1000.0, + data.watt_hours_last_7_days / 1000.0, + data.watt_hours_lifetime / 1000000.0, + ) + + for name, target in list(zip(CONSUMPTION_NAMES, CONSUMPTION_TARGETS, strict=False)): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + +CONSUMPTION_PHASE_NAMES: list[str] = [ + f"{name}_{phase.lower()}" for phase in PHASENAMES for name in CONSUMPTION_NAMES +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_consumption_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + CONSUMPTION_PHASE_TARGET = chain( + *[ + ( + phase_data.watts_now / 1000.0, + phase_data.watt_hours_today / 1000.0, + phase_data.watt_hours_last_7_days / 1000.0, + phase_data.watt_hours_lifetime / 1000000.0, + ) + for phase_data in mock_envoy.data.system_consumption_phases.values() + ] + ) + + for name, target in list( + zip(CONSUMPTION_PHASE_NAMES, CONSUMPTION_PHASE_TARGET, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + +CT_PRODUCTION_NAMES_INT = ("meter_status_flags_active_production_ct",) +CT_PRODUCTION_NAMES_STR = ("metering_status_production_ct",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_production_ct_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, +) -> None: + """Test production CT phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.ctmeter_production + + CT_PRODUCTION_TARGETS_INT = (len(data.status_flags),) + for name, target in list( + zip(CT_PRODUCTION_NAMES_INT, CT_PRODUCTION_TARGETS_INT, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_PRODUCTION_TARGETS_STR = (data.metering_status,) + for name, target in list( + zip(CT_PRODUCTION_NAMES_STR, CT_PRODUCTION_TARGETS_STR, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_PRODUCTION_NAMES_FLOAT_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in CT_PRODUCTION_NAMES_INT +] + +CT_PRODUCTION_NAMES_STR_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in CT_PRODUCTION_NAMES_STR +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_production_ct_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test production ct phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + CT_PRODUCTION_NAMES_FLOAT_TARGET = [ + len(phase_data.status_flags) + for phase_data in mock_envoy.data.ctmeter_production_phases.values() + ] + + for name, target in list( + zip( + CT_PRODUCTION_NAMES_FLOAT_PHASE, + CT_PRODUCTION_NAMES_FLOAT_TARGET, + strict=False, ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_PRODUCTION_NAMES_STR_TARGET = [ + phase_data.metering_status + for phase_data in mock_envoy.data.ctmeter_production_phases.values() + ] + + for name, target in list( + zip( + CT_PRODUCTION_NAMES_STR_PHASE, + CT_PRODUCTION_NAMES_STR_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_CONSUMPTION_NAMES_FLOAT: tuple[str, ...] = ( + "lifetime_net_energy_consumption", + "lifetime_net_energy_production", + "current_net_power_consumption", + "frequency_net_consumption_ct", + "voltage_net_consumption_ct", + "meter_status_flags_active_net_consumption_ct", +) + +CT_CONSUMPTION_NAMES_STR: tuple[str, ...] = ("metering_status_net_consumption_ct",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_consumption_ct_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption CT phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.ctmeter_consumption + + CT_CONSUMPTION_TARGETS_FLOAT = ( + data.energy_delivered / 1000000.0, + data.energy_received / 1000000.0, + data.active_power / 1000.0, + data.frequency, + data.voltage, + len(data.status_flags), + ) + for name, target in list( + zip(CT_CONSUMPTION_NAMES_FLOAT, CT_CONSUMPTION_TARGETS_FLOAT, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_CONSUMPTION_TARGETS_STR = (data.metering_status,) + for name, target in list( + zip(CT_CONSUMPTION_NAMES_STR, CT_CONSUMPTION_TARGETS_STR, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_CONSUMPTION_NAMES_FLOAT_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in CT_CONSUMPTION_NAMES_FLOAT +] + +CT_CONSUMPTION_NAMES_STR_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in CT_CONSUMPTION_NAMES_STR +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_consumption_ct_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption ct phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + CT_CONSUMPTION_NAMES_FLOAT_PHASE_TARGET = chain( + *[ + ( + phase_data.energy_delivered / 1000000.0, + phase_data.energy_received / 1000000.0, + phase_data.active_power / 1000.0, + phase_data.frequency, + phase_data.voltage, + len(phase_data.status_flags), + ) + for phase_data in mock_envoy.data.ctmeter_consumption_phases.values() + ] + ) + + for name, target in list( + zip( + CT_CONSUMPTION_NAMES_FLOAT_PHASE, + CT_CONSUMPTION_NAMES_FLOAT_PHASE_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_CONSUMPTION_NAMES_STR_PHASE_TARGET = [ + phase_data.metering_status + for phase_data in mock_envoy.data.ctmeter_consumption_phases.values() + ] + + for name, target in list( + zip( + CT_CONSUMPTION_NAMES_STR_PHASE, + CT_CONSUMPTION_NAMES_STR_PHASE_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_STORAGE_NAMES_FLOAT = ( + "lifetime_battery_energy_discharged", + "lifetime_battery_energy_charged", + "current_battery_discharge", + "voltage_storage_ct", + "meter_status_flags_active_storage_ct", +) +CT_STORAGE_NAMES_STR = ("metering_status_storage_ct",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_storage_ct_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test storage phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.ctmeter_storage + + CT_STORAGE_TARGETS_FLOAT = ( + data.energy_delivered / 1000000.0, + data.energy_received / 1000000.0, + data.active_power / 1000.0, + data.voltage, + len(data.status_flags), + ) + for name, target in list( + zip(CT_STORAGE_NAMES_FLOAT, CT_STORAGE_TARGETS_FLOAT, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_STORAGE_TARGETS_STR = (data.metering_status,) + for name, target in list( + zip(CT_STORAGE_NAMES_STR, CT_STORAGE_TARGETS_STR, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_STORAGE_NAMES_FLOAT_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in (CT_STORAGE_NAMES_FLOAT) +] + +CT_STORAGE_NAMES_STR_PHASE = [ + f"{name}_{phase.lower()}" for phase in PHASENAMES for name in (CT_STORAGE_NAMES_STR) +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_storage_ct_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test storage ct phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + CT_STORAGE_NAMES_FLOAT_PHASE_TARGET = chain( + *[ + ( + phase_data.energy_delivered / 1000000.0, + phase_data.energy_received / 1000000.0, + phase_data.active_power / 1000.0, + phase_data.voltage, + len(phase_data.status_flags), + ) + for phase_data in mock_envoy.data.ctmeter_storage_phases.values() + ] + ) + + for name, target in list( + zip( + CT_STORAGE_NAMES_FLOAT_PHASE, + CT_STORAGE_NAMES_FLOAT_PHASE_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_STORAGE_NAMES_STR_PHASE_TARGET = [ + phase_data.metering_status + for phase_data in mock_envoy.data.ctmeter_storage_phases.values() + ] + + for name, target in list( + zip( + CT_STORAGE_NAMES_STR_PHASE, + CT_STORAGE_NAMES_STR_PHASE_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +async def test_sensor_all_phase_entities_disabled_by_integration( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all phase entities are disabled by integration.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + assert all( + f"{ENTITY_BASE}_{entity}" + in (integration_disabled_entities(entity_registry, config_entry)) + for entity in ( + PRODUCTION_PHASE_NAMES + + CONSUMPTION_PHASE_NAMES + + CT_PRODUCTION_NAMES_FLOAT_PHASE + + CT_PRODUCTION_NAMES_STR_PHASE + + CT_CONSUMPTION_NAMES_FLOAT_PHASE + + CT_CONSUMPTION_NAMES_STR_PHASE + ) + ) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +async def test_sensor_storage_phase_disabled_by_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_envoy: AsyncMock, +) -> None: + """Test all storage CT phase entities are disabled by integration.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + assert all( + f"{ENTITY_BASE}_{entity}" + in integration_disabled_entities(entity_registry, config_entry) + for entity in (CT_STORAGE_NAMES_FLOAT_PHASE + CT_STORAGE_NAMES_STR_PHASE) + ) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_inverter_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy inverter entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.SENSOR}.inverter" + + for sn, inverter in mock_envoy.data.inverters.items(): + assert (entity_state := hass.states.get(f"{entity_base}_{sn}")) + assert float(entity_state.state) == (inverter.last_report_watts) + assert (last_reported := hass.states.get(f"{entity_base}_{sn}_last_reported")) + assert dt_util.parse_datetime( + last_reported.state + ) == dt_util.utc_from_timestamp(inverter.last_report_date) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_sensor_inverter_disabled_by_integration( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test enphase_envoy inverter disabled by integration entities.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + INVERTER_BASE = f"{Platform.SENSOR}.inverter" + + assert all( + f"{INVERTER_BASE}_{sn}_last_reported" + in integration_disabled_entities(entity_registry, config_entry) + for sn in mock_envoy.data.inverters + ) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +async def test_sensor_encharge_aggregate_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy encharge aggregate entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.encharge_aggregate + + for target in ( + ("battery", data.state_of_charge), + ("reserve_battery_level", data.reserve_state_of_charge), + ("available_battery_energy", data.available_energy), + ("reserve_battery_energy", data.backup_reserve), + ("battery_capacity", data.max_available_capacity), + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{target[0]}")) + assert float(entity_state.state) == target[1] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +async def test_sensor_encharge_enpower_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy encharge enpower entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + ENTITY_BASE = f"{Platform.SENSOR}.enpower" + + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_temperature")) + assert ( + round( + TemperatureConverter.convert( + float(entity_state.state), + hass.config.units.temperature_unit, + UnitOfTemperature.FAHRENHEIT + if mock_envoy.data.enpower.temperature_unit == "F" + else UnitOfTemperature.CELSIUS, + ) + ) + == mock_envoy.data.enpower.temperature + ) + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_last_reported")) + assert dt_util.parse_datetime(entity_state.state) == dt_util.utc_from_timestamp( + mock_envoy.data.enpower.last_report_date + ) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +async def test_sensor_encharge_power_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, +) -> None: + """Test enphase_envoy encharge_power entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + ENTITY_BASE = f"{Platform.SENSOR}.encharge" + + ENCHARGE_POWER_NAMES = ( + "battery", + "apparent_power", + "power", + ) + + ENCHARGE_POWER_TARGETS = [ + ( + sn, + ( + encharge_power.soc, + encharge_power.apparent_power_mva / 1000.0, + encharge_power.real_power_mw / 1000.0, + ), + ) + for sn, encharge_power in mock_envoy.data.encharge_power.items() + ] + + for sn, sn_target in ENCHARGE_POWER_TARGETS: + for name, target in list(zip(ENCHARGE_POWER_NAMES, sn_target, strict=False)): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_{name}")) + assert float(entity_state.state) == target + + for sn, encharge_inventory in mock_envoy.data.encharge_inventory.items(): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_temperature")) + assert ( + round( + TemperatureConverter.convert( + float(entity_state.state), + hass.config.units.temperature_unit, + UnitOfTemperature.FAHRENHEIT + if encharge_inventory.temperature_unit == "F" + else UnitOfTemperature.CELSIUS, + ) + ) + == encharge_inventory.temperature + ) + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_last_reported")) + assert dt_util.parse_datetime(entity_state.state) == dt_util.utc_from_timestamp( + encharge_inventory.last_report_date + ) + + +def integration_disabled_entities( + entity_registry: er.EntityRegistry, config_entry: MockConfigEntry +) -> list[str]: + """Return list of entity ids marked as disabled by integration.""" + return [ + entity_entry.entity_id + for entity_entry in er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + if entity_entry.disabled_by == er.RegistryEntryDisabler.INTEGRATION + ] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_missing_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test enphase_envoy sensor platform midding data handling.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + ENTITY_BASE = f"{Platform.SENSOR}.envoy_{mock_envoy.serial_number}" + + # force missing data to test 'if == none' code sections + mock_envoy.data.system_production_phases["L2"] = None + mock_envoy.data.system_consumption_phases["L2"] = None + mock_envoy.data.ctmeter_production = None + mock_envoy.data.ctmeter_consumption = None + mock_envoy.data.ctmeter_storage = None + mock_envoy.data.ctmeter_production_phases = None + mock_envoy.data.ctmeter_consumption_phases = None + mock_envoy.data.ctmeter_storage_phases = None + + # use different inverter serial to test 'expected inverter missing' code + mock_envoy.data.inverters["2"] = mock_envoy.data.inverters.pop("1") + + # force HA to detect changed data by changing raw + mock_envoy.data.raw = {"I": "am changed"} + + # MOve time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + # all these should now be in unknown state + for entity in ( + "lifetime_energy_production_l2", + "lifetime_energy_consumption_l2", + "metering_status_production_ct", + "metering_status_net_consumption_ct", + "metering_status_storage_ct", + "metering_status_production_ct_l2", + "metering_status_net_consumption_ct_l2", + "metering_status_storage_ct_l2", + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{entity}")) + assert entity_state.state == STATE_UNKNOWN + + # test the original inverter is now unknown + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == STATE_UNKNOWN diff --git a/tests/components/enphase_envoy/test_switch.py b/tests/components/enphase_envoy/test_switch.py new file mode 100644 index 00000000000..15f59cc3ea6 --- /dev/null +++ b/tests/components/enphase_envoy/test_switch.py @@ -0,0 +1,213 @@ +"""Test Enphase Envoy switch platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_switch( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_no_switch( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch platform entities are not created.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, config_entry) + assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_switch_grid_operation( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test switch platform operation for grid switches.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + test_entity = f"{Platform.SWITCH}.enpower_{sn}_grid_enabled" + + # validate envoy value is reflected in entity + assert (entity_state := hass.states.get(test_entity)) + assert entity_state.state == STATE_ON + + # test grid status switch operation + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.go_off_grid.assert_awaited_once_with() + mock_envoy.go_off_grid.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.go_on_grid.assert_awaited_once_with() + mock_envoy.go_on_grid.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.go_off_grid.assert_awaited_once_with() + mock_envoy.go_off_grid.reset_mock() + + test_entity = f"{Platform.SWITCH}.enpower_{sn}_charge_from_grid" + + # validate envoy value is reflected in entity + assert (entity_state := hass.states.get(test_entity)) + assert entity_state.state == STATE_ON + + # test grid status switch operation + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.disable_charge_from_grid.assert_awaited_once_with() + mock_envoy.disable_charge_from_grid.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.enable_charge_from_grid.assert_awaited_once_with() + mock_envoy.enable_charge_from_grid.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.disable_charge_from_grid.assert_awaited_once_with() + mock_envoy.disable_charge_from_grid.reset_mock() + + +@pytest.mark.parametrize( + ("mock_envoy", "entity_states"), + [ + ( + "envoy_metered_batt_relay", + { + "NC1": (STATE_OFF, 0, 1), + "NC2": (STATE_ON, 1, 0), + "NC3": (STATE_OFF, 0, 1), + }, + ) + ], + indirect=["mock_envoy"], +) +async def test_switch_relay_operation( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_states: dict[str, tuple[str, int, int]], +) -> None: + """Test enphase_envoy switch relay entities operation.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.SWITCH}." + + for contact_id, dry_contact in mock_envoy.data.dry_contact_settings.items(): + name = dry_contact.load_name.lower().replace(" ", "_") + test_entity = f"{entity_base}{name}" + assert (entity_state := hass.states.get(test_entity)) + assert entity_state.state == entity_states[contact_id][0] + open_count = entity_states[contact_id][1] + close_count = entity_states[contact_id][2] + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + + mock_envoy.open_dry_contact.assert_awaited_once_with(contact_id) + mock_envoy.close_dry_contact.assert_not_awaited() + mock_envoy.open_dry_contact.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + + mock_envoy.close_dry_contact.assert_awaited_once_with(contact_id) + mock_envoy.open_dry_contact.assert_not_awaited() + mock_envoy.close_dry_contact.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + + assert mock_envoy.open_dry_contact.await_count == open_count + assert mock_envoy.close_dry_contact.await_count == close_count + mock_envoy.open_dry_contact.reset_mock() + mock_envoy.close_dry_contact.reset_mock() diff --git a/tests/components/environment_canada/__init__.py b/tests/components/environment_canada/__init__.py index 65b0ed16207..92c28e09b74 100644 --- a/tests/components/environment_canada/__init__.py +++ b/tests/components/environment_canada/__init__.py @@ -1 +1,67 @@ """Tests for the Environment Canada integration.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock, MagicMock, patch + +from homeassistant.components.environment_canada.const import CONF_STATION, DOMAIN +from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +FIXTURE_USER_INPUT = { + CONF_LATITUDE: 55.55, + CONF_LONGITUDE: 42.42, + CONF_STATION: "XX/1234567", + CONF_LANGUAGE: "Gibberish", +} + + +async def init_integration(hass: HomeAssistant, ec_data) -> MockConfigEntry: + """Set up the Environment Canada integration in Home Assistant.""" + + def mock_ec(): + ec_mock = MagicMock() + ec_mock.station_id = FIXTURE_USER_INPUT[CONF_STATION] + ec_mock.lat = FIXTURE_USER_INPUT[CONF_LATITUDE] + ec_mock.lon = FIXTURE_USER_INPUT[CONF_LONGITUDE] + ec_mock.language = FIXTURE_USER_INPUT[CONF_LANGUAGE] + ec_mock.update = AsyncMock() + return ec_mock + + config_entry = MockConfigEntry(domain=DOMAIN, data=FIXTURE_USER_INPUT, title="Home") + config_entry.add_to_hass(hass) + + weather_mock = mock_ec() + ec_data["metadata"]["timestamp"] = datetime(2022, 10, 4, tzinfo=UTC) + weather_mock.conditions = ec_data["conditions"] + weather_mock.alerts = ec_data["alerts"] + weather_mock.daily_forecasts = ec_data["daily_forecasts"] + weather_mock.metadata = ec_data["metadata"] + + radar_mock = mock_ec() + radar_mock.image = b"GIF..." + radar_mock.timestamp = datetime(2022, 10, 4, tzinfo=UTC) + + with ( + patch( + "homeassistant.components.environment_canada.ECWeather", + return_value=weather_mock, + ), + patch( + "homeassistant.components.environment_canada.ECAirQuality", + return_value=mock_ec(), + ), + patch( + "homeassistant.components.environment_canada.ECRadar", + return_value=radar_mock, + ), + patch( + "homeassistant.components.environment_canada.config_flow.ECWeather", + return_value=weather_mock, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/environment_canada/conftest.py b/tests/components/environment_canada/conftest.py new file mode 100644 index 00000000000..69cec187d11 --- /dev/null +++ b/tests/components/environment_canada/conftest.py @@ -0,0 +1,27 @@ +"""Common fixture for Environment Canada tests.""" + +import contextlib +from datetime import datetime +import json + +import pytest + +from tests.common import load_fixture + + +@pytest.fixture +def ec_data(): + """Load Environment Canada data.""" + + def date_hook(weather): + """Convert timestamp string to datetime.""" + + if t := weather.get("timestamp"): + with contextlib.suppress(ValueError): + weather["timestamp"] = datetime.fromisoformat(t) + return weather + + return json.loads( + load_fixture("environment_canada/current_conditions_data.json"), + object_hook=date_hook, + ) diff --git a/tests/components/environment_canada/fixtures/current_conditions_data.json b/tests/components/environment_canada/fixtures/current_conditions_data.json index f3a18869940..ceb00028f95 100644 --- a/tests/components/environment_canada/fixtures/current_conditions_data.json +++ b/tests/components/environment_canada/fixtures/current_conditions_data.json @@ -135,7 +135,8 @@ "icon_code": "30", "temperature": -1, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-03 15:00:00+00:00" }, { "period": "Tuesday", @@ -143,7 +144,8 @@ "icon_code": "00", "temperature": 18, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-04 15:00:00+00:00" }, { "period": "Tuesday night", @@ -151,7 +153,8 @@ "icon_code": "30", "temperature": 3, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-04 15:00:00+00:00" }, { "period": "Wednesday", @@ -159,7 +162,8 @@ "icon_code": "00", "temperature": 20, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-05 15:00:00+00:00" }, { "period": "Wednesday night", @@ -167,7 +171,8 @@ "icon_code": "30", "temperature": 9, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-05 15:00:00+00:00" }, { "period": "Thursday", @@ -175,7 +180,8 @@ "icon_code": "02", "temperature": 20, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-06 15:00:00+00:00" }, { "period": "Thursday night", @@ -183,7 +189,8 @@ "icon_code": "12", "temperature": 7, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-06 15:00:00+00:00" }, { "period": "Friday", @@ -191,7 +198,8 @@ "icon_code": "12", "temperature": 13, "temperature_class": "high", - "precip_probability": 40 + "precip_probability": 40, + "timestamp": "2022-10-07 15:00:00+00:00" }, { "period": "Friday night", @@ -199,7 +207,8 @@ "icon_code": "32", "temperature": 1, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-07 15:00:00+00:00" }, { "period": "Saturday", @@ -207,7 +216,8 @@ "icon_code": "02", "temperature": 10, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-08 15:00:00+00:00" }, { "period": "Saturday night", @@ -215,7 +225,8 @@ "icon_code": "32", "temperature": 3, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-08 15:00:00+00:00" }, { "period": "Sunday", @@ -223,7 +234,8 @@ "icon_code": "02", "temperature": 12, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-09 15:00:00+00:00" } ], "metadata": { diff --git a/tests/components/environment_canada/snapshots/test_weather.ambr b/tests/components/environment_canada/snapshots/test_weather.ambr new file mode 100644 index 00000000000..cfa0ad912a4 --- /dev/null +++ b/tests/components/environment_canada/snapshots/test_weather.ambr @@ -0,0 +1,94 @@ +# serializer version: 1 +# name: test_forecast_daily + dict({ + 'weather.home_forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2022-10-04T15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 18.0, + 'templow': 3.0, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2022-10-05T15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 20.0, + 'templow': 9.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2022-10-06T15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 20.0, + 'templow': 7.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2022-10-07T15:00:00+00:00', + 'precipitation_probability': 40, + 'temperature': 13.0, + 'templow': 1.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2022-10-08T15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 10.0, + 'templow': 3.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_daily_with_some_previous_days_data + dict({ + 'weather.home_forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'clear-night', + 'datetime': '2022-10-03T15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': None, + 'templow': -1.0, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2022-10-04T15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 18.0, + 'templow': 3.0, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2022-10-05T15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 20.0, + 'templow': 9.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2022-10-06T15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 20.0, + 'templow': 7.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2022-10-07T15:00:00+00:00', + 'precipitation_probability': 40, + 'temperature': 13.0, + 'templow': 1.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2022-10-08T15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 10.0, + 'templow': 3.0, + }), + ]), + }), + }) +# --- diff --git a/tests/components/environment_canada/test_config_flow.py b/tests/components/environment_canada/test_config_flow.py index f2c35ab4295..d61966e8da1 100644 --- a/tests/components/environment_canada/test_config_flow.py +++ b/tests/components/environment_canada/test_config_flow.py @@ -1,7 +1,7 @@ """Test the Environment Canada (EC) config flow.""" from unittest.mock import AsyncMock, MagicMock, Mock, patch -import xml.etree.ElementTree as et +import xml.etree.ElementTree as ET import aiohttp import pytest @@ -94,7 +94,7 @@ async def test_create_same_entry_twice(hass: HomeAssistant) -> None: (aiohttp.ClientResponseError(Mock(), (), status=404), "bad_station_id"), (aiohttp.ClientResponseError(Mock(), (), status=400), "error_response"), (aiohttp.ClientConnectionError, "cannot_connect"), - (et.ParseError, "bad_station_id"), + (ET.ParseError, "bad_station_id"), (ValueError, "unknown"), ], ) diff --git a/tests/components/environment_canada/test_diagnostics.py b/tests/components/environment_canada/test_diagnostics.py index 8f800111d39..79b72961124 100644 --- a/tests/components/environment_canada/test_diagnostics.py +++ b/tests/components/environment_canada/test_diagnostics.py @@ -1,16 +1,17 @@ """Test Environment Canada diagnostics.""" -from datetime import UTC, datetime import json -from unittest.mock import AsyncMock, MagicMock, patch +from typing import Any from syrupy import SnapshotAssertion -from homeassistant.components.environment_canada.const import CONF_STATION, DOMAIN +from homeassistant.components.environment_canada.const import CONF_STATION from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry, load_fixture +from . import init_integration + +from tests.common import load_fixture from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -22,68 +23,19 @@ FIXTURE_USER_INPUT = { } -async def init_integration(hass: HomeAssistant) -> MockConfigEntry: - """Set up the Environment Canada integration in Home Assistant.""" - - def mock_ec(): - ec_mock = MagicMock() - ec_mock.station_id = FIXTURE_USER_INPUT[CONF_STATION] - ec_mock.lat = FIXTURE_USER_INPUT[CONF_LATITUDE] - ec_mock.lon = FIXTURE_USER_INPUT[CONF_LONGITUDE] - ec_mock.language = FIXTURE_USER_INPUT[CONF_LANGUAGE] - ec_mock.update = AsyncMock() - return ec_mock - - config_entry = MockConfigEntry(domain=DOMAIN, data=FIXTURE_USER_INPUT) - config_entry.add_to_hass(hass) +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + ec_data: dict[str, Any], +) -> None: + """Test config entry diagnostics.""" ec_data = json.loads( load_fixture("environment_canada/current_conditions_data.json") ) - weather_mock = mock_ec() - ec_data["metadata"]["timestamp"] = datetime(2022, 10, 4, tzinfo=UTC) - weather_mock.conditions = ec_data["conditions"] - weather_mock.alerts = ec_data["alerts"] - weather_mock.daily_forecasts = ec_data["daily_forecasts"] - weather_mock.metadata = ec_data["metadata"] - - radar_mock = mock_ec() - radar_mock.image = b"GIF..." - radar_mock.timestamp = datetime(2022, 10, 4, tzinfo=UTC) - - with ( - patch( - "homeassistant.components.environment_canada.ECWeather", - return_value=weather_mock, - ), - patch( - "homeassistant.components.environment_canada.ECAirQuality", - return_value=mock_ec(), - ), - patch( - "homeassistant.components.environment_canada.ECRadar", - return_value=radar_mock, - ), - patch( - "homeassistant.components.environment_canada.config_flow.ECWeather", - return_value=weather_mock, - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry - - -async def test_entry_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test config entry diagnostics.""" - - config_entry = await init_integration(hass) + config_entry = await init_integration(hass, ec_data) diagnostics = await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) diff --git a/tests/components/environment_canada/test_weather.py b/tests/components/environment_canada/test_weather.py new file mode 100644 index 00000000000..8e22f68462f --- /dev/null +++ b/tests/components/environment_canada/test_weather.py @@ -0,0 +1,58 @@ +"""Test weather.""" + +import copy +from typing import Any + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.weather import ( + DOMAIN as WEATHER_DOMAIN, + SERVICE_GET_FORECASTS, +) +from homeassistant.core import HomeAssistant + +from . import init_integration + + +async def test_forecast_daily( + hass: HomeAssistant, snapshot: SnapshotAssertion, ec_data: dict[str, Any] +) -> None: + """Test basic forecast.""" + + # First entry in test data is a half day; we don't want that for this test + local_ec_data = copy.deepcopy(ec_data) + del local_ec_data["daily_forecasts"][0] + + await init_integration(hass, local_ec_data) + + response = await hass.services.async_call( + WEATHER_DOMAIN, + SERVICE_GET_FORECASTS, + { + "entity_id": "weather.home_forecast", + "type": "daily", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + + +async def test_forecast_daily_with_some_previous_days_data( + hass: HomeAssistant, snapshot: SnapshotAssertion, ec_data: dict[str, Any] +) -> None: + """Test forecast with half day at start.""" + + await init_integration(hass, ec_data) + + response = await hass.services.async_call( + WEATHER_DOMAIN, + SERVICE_GET_FORECASTS, + { + "entity_id": "weather.home_forecast", + "type": "daily", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index 43edca54158..ea4099560cd 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from asyncio import Event -from collections.abc import Awaitable, Callable +from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine from pathlib import Path from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch @@ -19,6 +19,8 @@ from aioesphomeapi import ( HomeassistantServiceCall, ReconnectLogic, UserService, + VoiceAssistantAudioSettings, + VoiceAssistantEventType, VoiceAssistantFeature, ) import pytest @@ -32,6 +34,11 @@ from homeassistant.components.esphome.const import ( DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, DOMAIN, ) +from homeassistant.components.esphome.entry_data import RuntimeEntryData +from homeassistant.components.esphome.voice_assistant import ( + VoiceAssistantAPIPipeline, + VoiceAssistantUDPPipeline, +) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -40,6 +47,8 @@ from . import DASHBOARD_HOST, DASHBOARD_PORT, DASHBOARD_SLUG from tests.common import MockConfigEntry +_ONE_SECOND = 16000 * 2 # 16Khz 16-bit + @pytest.fixture(autouse=True) def mock_bluetooth(enable_bluetooth: None) -> None: @@ -166,7 +175,7 @@ def mock_client(mock_device_info) -> APIClient: @pytest.fixture -async def mock_dashboard(hass): +async def mock_dashboard(hass: HomeAssistant) -> AsyncGenerator[dict[str, Any]]: """Mock dashboard.""" data = {"configured": [], "importable": []} with patch( @@ -196,6 +205,20 @@ class MockESPHomeDevice: self.home_assistant_state_subscription_callback: Callable[ [str, str | None], None ] + self.voice_assistant_handle_start_callback: Callable[ + [str, int, VoiceAssistantAudioSettings, str | None], + Coroutine[Any, Any, int | None], + ] + self.voice_assistant_handle_stop_callback: Callable[ + [], Coroutine[Any, Any, None] + ] + self.voice_assistant_handle_audio_callback: ( + Callable[ + [bytes], + Coroutine[Any, Any, None], + ] + | None + ) self.device_info = device_info def set_state_callback(self, state_callback: Callable[[EntityState], None]) -> None: @@ -255,6 +278,47 @@ class MockESPHomeDevice: """Mock a state subscription.""" self.home_assistant_state_subscription_callback(entity_id, attribute) + def set_subscribe_voice_assistant_callbacks( + self, + handle_start: Callable[ + [str, int, VoiceAssistantAudioSettings, str | None], + Coroutine[Any, Any, int | None], + ], + handle_stop: Callable[[], Coroutine[Any, Any, None]], + handle_audio: ( + Callable[ + [bytes], + Coroutine[Any, Any, None], + ] + | None + ) = None, + ) -> None: + """Set the voice assistant subscription callbacks.""" + self.voice_assistant_handle_start_callback = handle_start + self.voice_assistant_handle_stop_callback = handle_stop + self.voice_assistant_handle_audio_callback = handle_audio + + async def mock_voice_assistant_handle_start( + self, + conversation_id: str, + flags: int, + settings: VoiceAssistantAudioSettings, + wake_word_phrase: str | None, + ) -> int | None: + """Mock voice assistant handle start.""" + return await self.voice_assistant_handle_start_callback( + conversation_id, flags, settings, wake_word_phrase + ) + + async def mock_voice_assistant_handle_stop(self) -> None: + """Mock voice assistant handle stop.""" + await self.voice_assistant_handle_stop_callback() + + async def mock_voice_assistant_handle_audio(self, audio: bytes) -> None: + """Mock voice assistant handle audio.""" + assert self.voice_assistant_handle_audio_callback is not None + await self.voice_assistant_handle_audio_callback(audio) + async def _mock_generic_device_entry( hass: HomeAssistant, @@ -263,6 +327,7 @@ async def _mock_generic_device_entry( mock_list_entities_services: tuple[list[EntityInfo], list[UserService]], states: list[EntityState], entry: MockConfigEntry | None = None, + hass_storage: dict[str, Any] | None = None, ) -> MockESPHomeDevice: if not entry: entry = MockConfigEntry( @@ -286,6 +351,17 @@ async def _mock_generic_device_entry( } device_info = DeviceInfo(**(default_device_info | mock_device_info)) + if hass_storage: + storage_key = f"{DOMAIN}.{entry.entry_id}" + hass_storage[storage_key] = { + "version": 1, + "minor_version": 1, + "key": storage_key, + "data": { + "device_info": device_info.to_dict(), + }, + } + mock_device = MockESPHomeDevice(entry, mock_client, device_info) def _subscribe_states(callback: Callable[[EntityState], None]) -> None: @@ -306,8 +382,33 @@ async def _mock_generic_device_entry( """Subscribe to home assistant states.""" mock_device.set_home_assistant_state_subscription_callback(on_state_sub) + def _subscribe_voice_assistant( + *, + handle_start: Callable[ + [str, int, VoiceAssistantAudioSettings, str | None], + Coroutine[Any, Any, int | None], + ], + handle_stop: Callable[[], Coroutine[Any, Any, None]], + handle_audio: ( + Callable[ + [bytes], + Coroutine[Any, Any, None], + ] + | None + ) = None, + ) -> Callable[[], None]: + """Subscribe to voice assistant.""" + mock_device.set_subscribe_voice_assistant_callbacks( + handle_start, handle_stop, handle_audio + ) + + def unsub(): + pass + + return unsub + mock_client.device_info = AsyncMock(return_value=mock_device.device_info) - mock_client.subscribe_voice_assistant = Mock() + mock_client.subscribe_voice_assistant = _subscribe_voice_assistant mock_client.list_entities_services = AsyncMock( return_value=mock_list_entities_services ) @@ -320,7 +421,7 @@ async def _mock_generic_device_entry( class MockReconnectLogic(BaseMockReconnectLogic): """Mock ReconnectLogic.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the mock.""" super().__init__(*args, **kwargs) mock_device.set_on_disconnect(kwargs["on_disconnect"]) @@ -453,6 +554,7 @@ async def mock_bluetooth_entry_with_legacy_adv( @pytest.fixture async def mock_generic_device_entry( hass: HomeAssistant, + hass_storage: dict[str, Any], ) -> Callable[ [APIClient, list[EntityInfo], list[UserService], list[EntityState]], Awaitable[MockConfigEntry], @@ -464,10 +566,17 @@ async def mock_generic_device_entry( entity_info: list[EntityInfo], user_service: list[UserService], states: list[EntityState], + mock_storage: bool = False, ) -> MockConfigEntry: return ( await _mock_generic_device_entry( - hass, mock_client, {}, (entity_info, user_service), states + hass, + mock_client, + {}, + (entity_info, user_service), + states, + None, + hass_storage if mock_storage else None, ) ).entry @@ -477,6 +586,7 @@ async def mock_generic_device_entry( @pytest.fixture async def mock_esphome_device( hass: HomeAssistant, + hass_storage: dict[str, Any], ) -> Callable[ [APIClient, list[EntityInfo], list[UserService], list[EntityState]], Awaitable[MockESPHomeDevice], @@ -485,19 +595,75 @@ async def mock_esphome_device( async def _mock_device( mock_client: APIClient, - entity_info: list[EntityInfo], - user_service: list[UserService], - states: list[EntityState], + entity_info: list[EntityInfo] | None = None, + user_service: list[UserService] | None = None, + states: list[EntityState] | None = None, entry: MockConfigEntry | None = None, device_info: dict[str, Any] | None = None, + mock_storage: bool = False, ) -> MockESPHomeDevice: return await _mock_generic_device_entry( hass, mock_client, device_info or {}, - (entity_info, user_service), - states, + (entity_info or [], user_service or []), + states or [], entry, + hass_storage if mock_storage else None, ) return _mock_device + + +@pytest.fixture +def mock_voice_assistant_api_pipeline() -> VoiceAssistantAPIPipeline: + """Return the API Pipeline factory.""" + mock_pipeline = Mock(spec=VoiceAssistantAPIPipeline) + + def mock_constructor( + hass: HomeAssistant, + entry_data: RuntimeEntryData, + handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], + handle_finished: Callable[[], None], + api_client: APIClient, + ): + """Fake the constructor.""" + mock_pipeline.hass = hass + mock_pipeline.entry_data = entry_data + mock_pipeline.handle_event = handle_event + mock_pipeline.handle_finished = handle_finished + mock_pipeline.api_client = api_client + return mock_pipeline + + mock_pipeline.side_effect = mock_constructor + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantAPIPipeline", + new=mock_pipeline, + ): + yield mock_pipeline + + +@pytest.fixture +def mock_voice_assistant_udp_pipeline() -> VoiceAssistantUDPPipeline: + """Return the API Pipeline factory.""" + mock_pipeline = Mock(spec=VoiceAssistantUDPPipeline) + + def mock_constructor( + hass: HomeAssistant, + entry_data: RuntimeEntryData, + handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], + handle_finished: Callable[[], None], + ): + """Fake the constructor.""" + mock_pipeline.hass = hass + mock_pipeline.entry_data = entry_data + mock_pipeline.handle_event = handle_event + mock_pipeline.handle_finished = handle_finished + return mock_pipeline + + mock_pipeline.side_effect = mock_constructor + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantUDPPipeline", + new=mock_pipeline, + ): + yield mock_pipeline diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index 9c61a5d0615..68af6665380 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -2,7 +2,8 @@ from ipaddress import ip_address import json -from unittest.mock import AsyncMock, MagicMock, patch +from typing import Any +from unittest.mock import AsyncMock, patch from aioesphomeapi import ( APIClient, @@ -18,7 +19,7 @@ import pytest from homeassistant import config_entries from homeassistant.components import dhcp, zeroconf -from homeassistant.components.esphome import DomainData, dashboard +from homeassistant.components.esphome import dashboard from homeassistant.components.esphome.const import ( CONF_ALLOW_SERVICE_CALLS, CONF_DEVICE_NAME, @@ -329,7 +330,7 @@ async def test_user_invalid_password(hass: HomeAssistant, mock_client) -> None: async def test_user_dashboard_has_wrong_key( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test user step with key from dashboard that is incorrect.""" @@ -376,7 +377,7 @@ async def test_user_dashboard_has_wrong_key( async def test_user_discovers_name_and_gets_key_from_dashboard( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test user step can discover the name and get the key from the dashboard.""" @@ -429,7 +430,7 @@ async def test_user_discovers_name_and_gets_key_from_dashboard_fails( hass: HomeAssistant, dashboard_exception: Exception, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test user step can discover the name and get the key from the dashboard.""" @@ -484,7 +485,7 @@ async def test_user_discovers_name_and_gets_key_from_dashboard_fails( async def test_user_discovers_name_and_dashboard_is_unavailable( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test user step can discover the name but the dashboard is unavailable.""" @@ -843,7 +844,7 @@ async def test_reauth_confirm_valid( async def test_reauth_fixed_via_dashboard( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically via dashboard.""" @@ -894,7 +895,7 @@ async def test_reauth_fixed_via_dashboard( async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_config_entry, mock_setup_entry: None, ) -> None: @@ -938,7 +939,7 @@ async def test_reauth_fixed_via_remove_password( hass: HomeAssistant, mock_client, mock_config_entry, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically by seeing password removed.""" @@ -962,7 +963,7 @@ async def test_reauth_fixed_via_remove_password( async def test_reauth_fixed_via_dashboard_at_confirm( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically via dashboard at confirm step.""" @@ -1136,10 +1137,7 @@ async def test_discovery_dhcp_no_changes( ) entry.add_to_hass(hass) - mock_entry_data = MagicMock() - mock_entry_data.device_info.name = "test8266" - domain_data = DomainData.get(hass) - domain_data.set_entry_data(entry, mock_entry_data) + mock_client.device_info = AsyncMock(return_value=DeviceInfo(name="test8266")) service_info = dhcp.DhcpServiceInfo( ip="192.168.43.183", @@ -1156,7 +1154,9 @@ async def test_discovery_dhcp_no_changes( assert entry.data[CONF_HOST] == "192.168.43.183" -async def test_discovery_hassio(hass: HomeAssistant, mock_dashboard) -> None: +async def test_discovery_hassio( + hass: HomeAssistant, mock_dashboard: dict[str, Any] +) -> None: """Test dashboard discovery.""" result = await hass.config_entries.flow.async_init( "esphome", @@ -1184,7 +1184,7 @@ async def test_discovery_hassio(hass: HomeAssistant, mock_dashboard) -> None: async def test_zeroconf_encryption_key_via_dashboard( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test encryption key retrieved from dashboard.""" @@ -1250,7 +1250,7 @@ async def test_zeroconf_encryption_key_via_dashboard( async def test_zeroconf_encryption_key_via_dashboard_with_api_encryption_prop( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test encryption key retrieved from dashboard with api_encryption property set.""" @@ -1316,7 +1316,7 @@ async def test_zeroconf_encryption_key_via_dashboard_with_api_encryption_prop( async def test_zeroconf_no_encryption_key_via_dashboard( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test encryption key not retrieved from dashboard.""" diff --git a/tests/components/esphome/test_dashboard.py b/tests/components/esphome/test_dashboard.py index 1b0303a8a48..da805eb2eee 100644 --- a/tests/components/esphome/test_dashboard.py +++ b/tests/components/esphome/test_dashboard.py @@ -16,7 +16,10 @@ from tests.common import MockConfigEntry async def test_dashboard_storage( - hass: HomeAssistant, init_integration, mock_dashboard, hass_storage: dict[str, Any] + hass: HomeAssistant, + init_integration, + mock_dashboard: dict[str, Any], + hass_storage: dict[str, Any], ) -> None: """Test dashboard storage.""" assert hass_storage[dashboard.STORAGE_KEY]["data"] == { @@ -197,7 +200,9 @@ async def test_new_dashboard_fix_reauth( assert mock_config_entry.data[CONF_NOISE_PSK] == VALID_NOISE_PSK -async def test_dashboard_supports_update(hass: HomeAssistant, mock_dashboard) -> None: +async def test_dashboard_supports_update( + hass: HomeAssistant, mock_dashboard: dict[str, Any] +) -> None: """Test dashboard supports update.""" dash = dashboard.async_get_dashboard(hass) diff --git a/tests/components/esphome/test_diagnostics.py b/tests/components/esphome/test_diagnostics.py index 4fb8f993aca..b66b6d72fce 100644 --- a/tests/components/esphome/test_diagnostics.py +++ b/tests/components/esphome/test_diagnostics.py @@ -1,9 +1,11 @@ """Tests for the diagnostics data provided by the ESPHome integration.""" +from typing import Any from unittest.mock import ANY import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components import bluetooth from homeassistant.core import HomeAssistant @@ -20,13 +22,13 @@ async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, init_integration: MockConfigEntry, - mock_dashboard, + mock_dashboard: dict[str, Any], snapshot: SnapshotAssertion, ) -> None: """Test diagnostics for config entry.""" result = await get_diagnostics_for_config_entry(hass, hass_client, init_integration) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) async def test_diagnostics_with_bluetooth( @@ -60,6 +62,7 @@ async def test_diagnostics_with_bluetooth( }, }, "config": { + "created_at": ANY, "data": { "device_name": "test", "host": "test.local", @@ -70,6 +73,7 @@ async def test_diagnostics_with_bluetooth( "domain": "esphome", "entry_id": ANY, "minor_version": 1, + "modified_at": ANY, "options": {"allow_service_calls": False}, "pref_disable_new_entities": False, "pref_disable_polling": False, diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index c17ff9a7d8c..9d2a906466e 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -2,7 +2,7 @@ import asyncio from collections.abc import Awaitable, Callable -from unittest.mock import AsyncMock, call +from unittest.mock import AsyncMock, call, patch from aioesphomeapi import ( APIClient, @@ -17,6 +17,7 @@ from aioesphomeapi import ( UserService, UserServiceArg, UserServiceArgType, + VoiceAssistantFeature, ) import pytest @@ -28,6 +29,10 @@ from homeassistant.components.esphome.const import ( DOMAIN, STABLE_BLE_VERSION_STR, ) +from homeassistant.components.esphome.voice_assistant import ( + VoiceAssistantAPIPipeline, + VoiceAssistantUDPPipeline, +) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -39,7 +44,7 @@ from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, issue_registry as ir from homeassistant.setup import async_setup_component -from .conftest import MockESPHomeDevice +from .conftest import _ONE_SECOND, MockESPHomeDevice from tests.common import MockConfigEntry, async_capture_events, async_mock_service @@ -575,7 +580,7 @@ async def test_connection_aborted_wrong_device( entry.add_to_hass(hass) disconnect_done = hass.loop.create_future() - def async_disconnect(*args, **kwargs) -> None: + async def async_disconnect(*args, **kwargs) -> None: disconnect_done.set_result(None) mock_client.disconnect = async_disconnect @@ -1019,7 +1024,7 @@ async def test_esphome_device_with_project( ) assert dev.manufacturer == "mfr" assert dev.model == "model" - assert dev.hw_version == "2.2.2" + assert dev.sw_version == "2.2.2 (ESPHome 1.0.0)" async def test_esphome_device_with_manufacturer( @@ -1156,3 +1161,127 @@ async def test_start_reauth( assert len(flows) == 1 flow = flows[0] assert flow["context"]["source"] == "reauth" + + +async def test_entry_missing_unique_id( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test the unique id is added from storage if available.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=None, + data={ + CONF_HOST: "test.local", + CONF_PORT: 6053, + CONF_PASSWORD: "", + }, + options={CONF_ALLOW_SERVICE_CALLS: True}, + ) + entry.add_to_hass(hass) + await mock_esphome_device(mock_client=mock_client, mock_storage=True) + await hass.async_block_till_done() + assert entry.unique_id == "11:22:33:44:55:aa" + + +async def test_manager_voice_assistant_handlers_api( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + caplog: pytest.LogCaptureFixture, + mock_voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, +) -> None: + """Test the handlers are correctly executed in manager.py.""" + + device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.API_AUDIO + }, + ) + + await hass.async_block_till_done() + + with ( + patch( + "homeassistant.components.esphome.manager.VoiceAssistantAPIPipeline", + new=mock_voice_assistant_api_pipeline, + ), + ): + port: int | None = await device.mock_voice_assistant_handle_start( + "", 0, None, None + ) + + assert port == 0 + + port: int | None = await device.mock_voice_assistant_handle_start( + "", 0, None, None + ) + + assert "Previous Voice assistant pipeline was not stopped" in caplog.text + + await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) + + mock_voice_assistant_api_pipeline.receive_audio_bytes.assert_called_with( + bytes(_ONE_SECOND) + ) + + mock_voice_assistant_api_pipeline.receive_audio_bytes.reset_mock() + + await device.mock_voice_assistant_handle_stop() + mock_voice_assistant_api_pipeline.handle_finished() + + await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) + + mock_voice_assistant_api_pipeline.receive_audio_bytes.assert_not_called() + + +async def test_manager_voice_assistant_handlers_udp( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_voice_assistant_udp_pipeline: VoiceAssistantUDPPipeline, +) -> None: + """Test the handlers are correctly executed in manager.py.""" + + device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + + await hass.async_block_till_done() + + with ( + patch( + "homeassistant.components.esphome.manager.VoiceAssistantUDPPipeline", + new=mock_voice_assistant_udp_pipeline, + ), + ): + await device.mock_voice_assistant_handle_start("", 0, None, None) + + mock_voice_assistant_udp_pipeline.run_pipeline.assert_called() + + await device.mock_voice_assistant_handle_stop() + mock_voice_assistant_udp_pipeline.handle_finished() + + mock_voice_assistant_udp_pipeline.stop.assert_called() + mock_voice_assistant_udp_pipeline.close.assert_called() diff --git a/tests/components/esphome/test_sensor.py b/tests/components/esphome/test_sensor.py index bebfaaa69d4..76f71b53167 100644 --- a/tests/components/esphome/test_sensor.py +++ b/tests/components/esphome/test_sensor.py @@ -28,10 +28,10 @@ from homeassistant.const import ( ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, + EntityCategory, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity import EntityCategory from .conftest import MockESPHomeDevice diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index fc845299142..83e89b1de00 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -1,12 +1,14 @@ """Test ESPHome update entities.""" from collections.abc import Awaitable, Callable +from typing import Any from unittest.mock import Mock, patch from aioesphomeapi import ( APIClient, EntityInfo, EntityState, + UpdateCommand, UpdateInfo, UpdateState, UserService, @@ -14,6 +16,10 @@ from aioesphomeapi import ( import pytest from homeassistant.components.esphome.dashboard import async_get_dashboard +from homeassistant.components.homeassistant import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, +) from homeassistant.components.update import ( DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, @@ -33,6 +39,11 @@ from homeassistant.exceptions import HomeAssistantError from .conftest import MockESPHomeDevice +@pytest.fixture(autouse=True) +def enable_entity(entity_registry_enabled_by_default: None) -> None: + """Enable update entity.""" + + @pytest.fixture def stub_reconnect(): """Stub reconnect.""" @@ -84,7 +95,7 @@ async def test_update_entity( stub_reconnect, mock_config_entry, mock_device_info, - mock_dashboard, + mock_dashboard: dict[str, Any], devices_payload, expected_state, expected_attributes, @@ -190,7 +201,7 @@ async def test_update_static_info( [APIClient, list[EntityInfo], list[UserService], list[EntityState]], Awaitable[MockESPHomeDevice], ], - mock_dashboard, + mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity.""" mock_dashboard["configured"] = [ @@ -236,7 +247,7 @@ async def test_update_device_state_for_availability( expected_disconnect: bool, expected_state: str, has_deep_sleep: bool, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_client: APIClient, mock_esphome_device: Callable[ [APIClient, list[EntityInfo], list[UserService], list[EntityState]], @@ -272,7 +283,7 @@ async def test_update_entity_dashboard_not_available_startup( stub_reconnect, mock_config_entry, mock_device_info, - mock_dashboard, + mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity when dashboard is not available at startup.""" with ( @@ -321,7 +332,7 @@ async def test_update_entity_dashboard_discovered_after_startup_but_update_faile [APIClient, list[EntityInfo], list[UserService], list[EntityState]], Awaitable[MockESPHomeDevice], ], - mock_dashboard, + mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity when dashboard is discovered after startup and the first update fails.""" with patch( @@ -386,7 +397,7 @@ async def test_update_becomes_available_at_runtime( [APIClient, list[EntityInfo], list[UserService], list[EntityState]], Awaitable[MockESPHomeDevice], ], - mock_dashboard, + mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity when the dashboard has no device at startup but gets them later.""" await mock_esphome_device( @@ -521,3 +532,12 @@ async def test_generic_device_update_entity_has_update( assert state is not None assert state.state == STATE_ON assert state.attributes["in_progress"] == 50 + + await hass.services.async_call( + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: "update.test_myupdate"}, + blocking=True, + ) + + mock_client.update_command.assert_called_with(key=1, command=UpdateCommand.CHECK) diff --git a/tests/components/esphome/test_voice_assistant.py b/tests/components/esphome/test_voice_assistant.py index bcd49f91c03..eafc0243dc6 100644 --- a/tests/components/esphome/test_voice_assistant.py +++ b/tests/components/esphome/test_voice_assistant.py @@ -37,15 +37,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import intent as intent_helper import homeassistant.helpers.device_registry as dr -from .conftest import MockESPHomeDevice +from .conftest import _ONE_SECOND, MockESPHomeDevice _TEST_INPUT_TEXT = "This is an input test" _TEST_OUTPUT_TEXT = "This is an output test" _TEST_OUTPUT_URL = "output.mp3" _TEST_MEDIA_ID = "12345" -_ONE_SECOND = 16000 * 2 # 16Khz 16-bit - @pytest.fixture def voice_assistant_udp_pipeline( @@ -813,6 +811,7 @@ async def test_wake_word_abort_exception( async def test_timer_events( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, mock_client: APIClient, mock_esphome_device: Callable[ [APIClient, list[EntityInfo], list[UserService], list[EntityState]], @@ -831,11 +830,12 @@ async def test_timer_events( | VoiceAssistantFeature.TIMERS }, ) - dev_reg = dr.async_get(hass) - dev = dev_reg.async_get_device( + await hass.async_block_till_done() + dev = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} ) + total_seconds = (1 * 60 * 60) + (2 * 60) + 3 await intent_helper.async_handle( hass, "test", @@ -853,14 +853,39 @@ async def test_timer_events( VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED, ANY, "test timer", - 3723, - 3723, + total_seconds, + total_seconds, + True, + ) + + # Increase timer beyond original time and check total_seconds has increased + mock_client.send_voice_assistant_timer_event.reset_mock() + + total_seconds += 5 * 60 + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_INCREASE_TIMER, + { + "name": {"value": "test timer"}, + "minutes": {"value": 5}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_called_with( + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED, + ANY, + "test timer", + total_seconds, + ANY, True, ) async def test_unknown_timer_event( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, mock_client: APIClient, mock_esphome_device: Callable[ [APIClient, list[EntityInfo], list[UserService], list[EntityState]], @@ -879,8 +904,8 @@ async def test_unknown_timer_event( | VoiceAssistantFeature.TIMERS }, ) - dev_reg = dr.async_get(hass) - dev = dev_reg.async_get_device( + await hass.async_block_till_done() + dev = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} ) diff --git a/tests/components/event/test_init.py b/tests/components/event/test_init.py index 981a7744beb..c6828c2c290 100644 --- a/tests/components/event/test_init.py +++ b/tests/components/event/test_init.py @@ -1,10 +1,10 @@ """The tests for the event integration.""" +from collections.abc import Generator from typing import Any from freezegun import freeze_time import pytest -from typing_extensions import Generator from homeassistant.components.event import ( ATTR_EVENT_TYPE, diff --git a/tests/components/evil_genius_labs/conftest.py b/tests/components/evil_genius_labs/conftest.py index 3941917e130..fc0725607e2 100644 --- a/tests/components/evil_genius_labs/conftest.py +++ b/tests/components/evil_genius_labs/conftest.py @@ -1,36 +1,44 @@ """Test helpers for Evil Genius Labs.""" -import json +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import patch import pytest +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_fixture +from tests.common import ( + MockConfigEntry, + load_json_array_fixture, + load_json_object_fixture, +) @pytest.fixture(scope="package") -def all_fixture(): +def all_fixture() -> dict[str, Any]: """Fixture data.""" - data = json.loads(load_fixture("data.json", "evil_genius_labs")) + data = load_json_array_fixture("data.json", "evil_genius_labs") return {item["name"]: item for item in data} @pytest.fixture(scope="package") -def info_fixture(): +def info_fixture() -> JsonObjectType: """Fixture info.""" - return json.loads(load_fixture("info.json", "evil_genius_labs")) + return load_json_object_fixture("info.json", "evil_genius_labs") @pytest.fixture(scope="package") -def product_fixture(): +def product_fixture() -> dict[str, str]: """Fixture info.""" return {"productName": "Fibonacci256"} @pytest.fixture -def config_entry(hass): +def config_entry(hass: HomeAssistant) -> MockConfigEntry: """Evil genius labs config entry.""" entry = MockConfigEntry(domain="evil_genius_labs", data={"host": "192.168.1.113"}) entry.add_to_hass(hass) @@ -39,8 +47,13 @@ def config_entry(hass): @pytest.fixture async def setup_evil_genius_labs( - hass, config_entry, all_fixture, info_fixture, product_fixture, platforms -): + hass: HomeAssistant, + config_entry: MockConfigEntry, + all_fixture: dict[str, Any], + info_fixture: JsonObjectType, + product_fixture: dict[str, str], + platforms: list[Platform], +) -> AsyncGenerator[None]: """Test up Evil Genius Labs instance.""" with ( patch( diff --git a/tests/components/evohome/__init__.py b/tests/components/evohome/__init__.py new file mode 100644 index 00000000000..588e0f61746 --- /dev/null +++ b/tests/components/evohome/__init__.py @@ -0,0 +1 @@ +"""The tests for the evohome integration.""" diff --git a/tests/components/evohome/conftest.py b/tests/components/evohome/conftest.py new file mode 100644 index 00000000000..260330896b7 --- /dev/null +++ b/tests/components/evohome/conftest.py @@ -0,0 +1,111 @@ +"""Fixtures and helpers for the evohome tests.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any, Final +from unittest.mock import MagicMock, patch + +from aiohttp import ClientSession +from evohomeasync2 import EvohomeClient +from evohomeasync2.broker import Broker +import pytest + +from homeassistant.components.evohome import CONF_PASSWORD, CONF_USERNAME, DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonArrayType, JsonObjectType + +from .const import ACCESS_TOKEN, REFRESH_TOKEN + +from tests.common import load_json_array_fixture, load_json_object_fixture + +TEST_CONFIG: Final = { + CONF_USERNAME: "username", + CONF_PASSWORD: "password", +} + + +def user_account_config_fixture() -> JsonObjectType: + """Load JSON for the config of a user's account.""" + return load_json_object_fixture("user_account.json", DOMAIN) + + +def user_locations_config_fixture() -> JsonArrayType: + """Load JSON for the config of a user's installation (a list of locations).""" + return load_json_array_fixture("user_locations.json", DOMAIN) + + +def location_status_fixture(loc_id: str) -> JsonObjectType: + """Load JSON for the status of a specific location.""" + return load_json_object_fixture(f"status_{loc_id}.json", DOMAIN) + + +def dhw_schedule_fixture() -> JsonObjectType: + """Load JSON for the schedule of a domesticHotWater zone.""" + return load_json_object_fixture("schedule_dhw.json", DOMAIN) + + +def zone_schedule_fixture() -> JsonObjectType: + """Load JSON for the schedule of a temperatureZone zone.""" + return load_json_object_fixture("schedule_zone.json", DOMAIN) + + +async def mock_get( + self: Broker, url: str, **kwargs: Any +) -> JsonArrayType | JsonObjectType: + """Return the JSON for a HTTP get of a given URL.""" + + # a proxy for the behaviour of the real web API + if self.refresh_token is None: + self.refresh_token = f"new_{REFRESH_TOKEN}" + + if self.access_token_expires is None or self.access_token_expires < datetime.now(): + self.access_token = f"new_{ACCESS_TOKEN}" + self.access_token_expires = datetime.now() + timedelta(minutes=30) + + # assume a valid GET, and return the JSON for that web API + if url == "userAccount": # userAccount + return user_account_config_fixture() + + if url.startswith("location"): + if "installationInfo" in url: # location/installationInfo?userId={id} + return user_locations_config_fixture() + if "location" in url: # location/{id}/status + return location_status_fixture("2738909") + + elif "schedule" in url: + if url.startswith("domesticHotWater"): # domesticHotWater/{id}/schedule + return dhw_schedule_fixture() + if url.startswith("temperatureZone"): # temperatureZone/{id}/schedule + return zone_schedule_fixture() + + pytest.xfail(f"Unexpected URL: {url}") + + +@patch("evohomeasync2.broker.Broker.get", mock_get) +async def setup_evohome(hass: HomeAssistant, test_config: dict[str, str]) -> MagicMock: + """Set up the evohome integration and return its client. + + The class is mocked here to check the client was instantiated with the correct args. + """ + + with ( + patch("homeassistant.components.evohome.evo.EvohomeClient") as mock_client, + patch("homeassistant.components.evohome.ev1.EvohomeClient", return_value=None), + ): + mock_client.side_effect = EvohomeClient + + assert await async_setup_component(hass, DOMAIN, {DOMAIN: test_config}) + await hass.async_block_till_done() + + mock_client.assert_called_once() + + assert mock_client.call_args.args[0] == test_config[CONF_USERNAME] + assert mock_client.call_args.args[1] == test_config[CONF_PASSWORD] + + assert isinstance(mock_client.call_args.kwargs["session"], ClientSession) + + assert mock_client.account_info is not None + + return mock_client diff --git a/tests/components/evohome/const.py b/tests/components/evohome/const.py new file mode 100644 index 00000000000..0b298db533a --- /dev/null +++ b/tests/components/evohome/const.py @@ -0,0 +1,10 @@ +"""Constants for the evohome tests.""" + +from __future__ import annotations + +from typing import Final + +ACCESS_TOKEN: Final = "at_1dc7z657UKzbhKA..." +REFRESH_TOKEN: Final = "rf_jg68ZCKYdxEI3fF..." +SESSION_ID: Final = "F7181186..." +USERNAME: Final = "test_user@gmail.com" diff --git a/tests/components/evohome/fixtures/schedule_dhw.json b/tests/components/evohome/fixtures/schedule_dhw.json new file mode 100644 index 00000000000..da9a225fb82 --- /dev/null +++ b/tests/components/evohome/fixtures/schedule_dhw.json @@ -0,0 +1,81 @@ +{ + "dailySchedules": [ + { + "dayOfWeek": "Monday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Tuesday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Wednesday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Thursday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Friday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Saturday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "09:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Sunday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "09:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "23:00:00" } + ] + } + ] +} diff --git a/tests/components/evohome/fixtures/schedule_zone.json b/tests/components/evohome/fixtures/schedule_zone.json new file mode 100644 index 00000000000..5030d92ff3d --- /dev/null +++ b/tests/components/evohome/fixtures/schedule_zone.json @@ -0,0 +1,67 @@ +{ + "dailySchedules": [ + { + "dayOfWeek": "Monday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Tuesday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Wednesday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Thursday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Friday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Saturday", + "switchpoints": [ + { "heatSetpoint": 18.5, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:30:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Sunday", + "switchpoints": [ + { "heatSetpoint": 18.5, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:30:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + } + ] +} diff --git a/tests/components/evohome/fixtures/status_2738909.json b/tests/components/evohome/fixtures/status_2738909.json new file mode 100644 index 00000000000..6d555ba4e3e --- /dev/null +++ b/tests/components/evohome/fixtures/status_2738909.json @@ -0,0 +1,125 @@ +{ + "locationId": "2738909", + "gateways": [ + { + "gatewayId": "2499896", + "temperatureControlSystems": [ + { + "systemId": "3432522", + "zones": [ + { + "zoneId": "3432521", + "name": "Dead Zone", + "temperatureStatus": { "isAvailable": false }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "activeFaults": [] + }, + { + "zoneId": "3432576", + "name": "Main Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "PermanentOverride" + }, + "activeFaults": [ + { + "faultType": "TempZoneActuatorCommunicationLost", + "since": "2022-03-02T15:56:01" + } + ] + }, + { + "zoneId": "3432577", + "name": "Front Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 21.0, + "setpointMode": "TemporaryOverride", + "until": "2022-03-07T19:00:00Z" + }, + "activeFaults": [ + { + "faultType": "TempZoneActuatorLowBattery", + "since": "2022-03-02T04:50:20" + } + ] + }, + { + "zoneId": "3432578", + "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "name": "Kitchen" + }, + { + "zoneId": "3432579", + "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.0, + "setpointMode": "FollowSchedule" + }, + "name": "Bathroom Dn" + }, + { + "zoneId": "3432580", + "temperatureStatus": { "temperature": 21.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.0, + "setpointMode": "FollowSchedule" + }, + "name": "Main Bedroom" + }, + { + "zoneId": "3449703", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "name": "Kids Room" + }, + { + "zoneId": "3449740", + "temperatureStatus": { "temperature": 21.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.5, + "setpointMode": "FollowSchedule" + }, + "name": "" + }, + { + "zoneId": "3450733", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 14.0, + "setpointMode": "PermanentOverride" + }, + "name": "Spare Room" + } + ], + "dhw": { + "dhwId": "3933910", + "temperatureStatus": { "temperature": 23.0, "isAvailable": true }, + "stateStatus": { "state": "Off", "mode": "PermanentOverride" }, + "activeFaults": [] + }, + "activeFaults": [], + "systemModeStatus": { "mode": "AutoWithEco", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/user_account.json b/tests/components/evohome/fixtures/user_account.json new file mode 100644 index 00000000000..99a96a7961e --- /dev/null +++ b/tests/components/evohome/fixtures/user_account.json @@ -0,0 +1,11 @@ +{ + "userId": "2263181", + "username": "user_2263181@gmail.com", + "firstname": "John", + "lastname": "Smith", + "streetAddress": "1 Main Street", + "city": "London", + "postcode": "E1 1AA", + "country": "UnitedKingdom", + "language": "enGB" +} diff --git a/tests/components/evohome/fixtures/user_locations.json b/tests/components/evohome/fixtures/user_locations.json new file mode 100644 index 00000000000..cf59aa9ae8a --- /dev/null +++ b/tests/components/evohome/fixtures/user_locations.json @@ -0,0 +1,346 @@ +[ + { + "locationInfo": { + "locationId": "2738909", + "name": "My Home", + "streetAddress": "1 Main Street", + "city": "London", + "country": "UnitedKingdom", + "postcode": "E1 1AA", + "locationType": "Residential", + "useDaylightSaveSwitching": true, + "timeZone": { + "timeZoneId": "GMTStandardTime", + "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", + "offsetMinutes": 0, + "currentOffsetMinutes": 60, + "supportsDaylightSaving": true + }, + "locationOwner": { + "userId": "2263181", + "username": "user_2263181@gmail.com", + "firstname": "John", + "lastname": "Smith" + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "2499896", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "3432522", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "3432521", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Dead Zone", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432576", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432577", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Front Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432578", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Kitchen", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432579", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Bathroom Dn", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432580", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Bedroom", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3449703", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Kids Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3449740", + "modelType": "Unknown", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "", + "zoneType": "Unknown" + }, + { + "zoneId": "3450733", + "modelType": "xx", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Spare Room", + "zoneType": "xx" + } + ], + "dhw": { + "dhwId": "3933910", + "dhwStateCapabilitiesResponse": { + "allowedStates": ["On", "Off"], + "allowedModes": [ + "FollowSchedule", + "PermanentOverride", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilitiesResponse": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00" + } + }, + "allowedSystemModes": [ + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithReset", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "DayOff", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "Custom", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/test_storage.py b/tests/components/evohome/test_storage.py new file mode 100644 index 00000000000..e87b847a9ff --- /dev/null +++ b/tests/components/evohome/test_storage.py @@ -0,0 +1,208 @@ +"""The tests for evohome storage load & save.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any, Final, NotRequired, TypedDict + +import pytest + +from homeassistant.components.evohome import ( + CONF_PASSWORD, + CONF_USERNAME, + DOMAIN, + STORAGE_KEY, + STORAGE_VER, + dt_aware_to_naive, +) +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from .conftest import setup_evohome +from .const import ACCESS_TOKEN, REFRESH_TOKEN, SESSION_ID, USERNAME + + +class _SessionDataT(TypedDict): + sessionId: str + + +class _TokenStoreT(TypedDict): + username: str + refresh_token: str + access_token: str + access_token_expires: str # 2024-07-27T23:57:30+01:00 + user_data: NotRequired[_SessionDataT] + + +class _EmptyStoreT(TypedDict): + pass + + +SZ_USERNAME: Final = "username" +SZ_REFRESH_TOKEN: Final = "refresh_token" +SZ_ACCESS_TOKEN: Final = "access_token" +SZ_ACCESS_TOKEN_EXPIRES: Final = "access_token_expires" +SZ_USER_DATA: Final = "user_data" + + +def dt_pair(dt_dtm: datetime) -> tuple[datetime, str]: + """Return a datetime without milliseconds and its string representation.""" + dt_str = dt_dtm.isoformat(timespec="seconds") # e.g. 2024-07-28T00:57:29+01:00 + return dt_util.parse_datetime(dt_str, raise_on_error=True), dt_str + + +ACCESS_TOKEN_EXP_DTM, ACCESS_TOKEN_EXP_STR = dt_pair(dt_util.now() + timedelta(hours=1)) + +USERNAME_DIFF: Final = f"not_{USERNAME}" +USERNAME_SAME: Final = USERNAME + +TEST_CONFIG: Final = { + CONF_USERNAME: USERNAME_SAME, + CONF_PASSWORD: "password", +} + +TEST_DATA: Final[dict[str, _TokenStoreT]] = { + "sans_session_id": { + SZ_USERNAME: USERNAME_SAME, + SZ_REFRESH_TOKEN: REFRESH_TOKEN, + SZ_ACCESS_TOKEN: ACCESS_TOKEN, + SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, + }, + "with_session_id": { + SZ_USERNAME: USERNAME_SAME, + SZ_REFRESH_TOKEN: REFRESH_TOKEN, + SZ_ACCESS_TOKEN: ACCESS_TOKEN, + SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, + SZ_USER_DATA: {"sessionId": SESSION_ID}, + }, +} + +TEST_DATA_NULL: Final[dict[str, _EmptyStoreT | None]] = { + "store_is_absent": None, + "store_was_reset": {}, +} + +DOMAIN_STORAGE_BASE: Final = { + "version": STORAGE_VER, + "minor_version": 1, + "key": STORAGE_KEY, +} + + +@pytest.mark.parametrize("idx", TEST_DATA_NULL) +async def test_auth_tokens_null( + hass: HomeAssistant, + hass_storage: dict[str, Any], + idx: str, +) -> None: + """Test loading/saving authentication tokens when no cached tokens in the store.""" + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA_NULL[idx]} + + mock_client = await setup_evohome(hass, TEST_CONFIG) + + # Confirm client was instantiated without tokens, as cache was empty... + assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_SAME + assert data[SZ_REFRESH_TOKEN] == f"new_{REFRESH_TOKEN}" + assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" + assert ( + dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) + > dt_util.now() + ) + + +@pytest.mark.parametrize("idx", TEST_DATA) +async def test_auth_tokens_same( + hass: HomeAssistant, hass_storage: dict[str, Any], idx: str +) -> None: + """Test loading/saving authentication tokens when matching username.""" + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} + + mock_client = await setup_evohome(hass, TEST_CONFIG) + + # Confirm client was instantiated with the cached tokens... + assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN_EXPIRES] == dt_aware_to_naive( + ACCESS_TOKEN_EXP_DTM + ) + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_SAME + assert data[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert data[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES]) == ACCESS_TOKEN_EXP_DTM + + +@pytest.mark.parametrize("idx", TEST_DATA) +async def test_auth_tokens_past( + hass: HomeAssistant, hass_storage: dict[str, Any], idx: str +) -> None: + """Test loading/saving authentication tokens with matching username, but expired.""" + + dt_dtm, dt_str = dt_pair(dt_util.now() - timedelta(hours=1)) + + # make this access token have expired in the past... + test_data = TEST_DATA[idx].copy() # shallow copy is OK here + test_data[SZ_ACCESS_TOKEN_EXPIRES] = dt_str + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": test_data} + + mock_client = await setup_evohome(hass, TEST_CONFIG) + + # Confirm client was instantiated with the cached tokens... + assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN_EXPIRES] == dt_aware_to_naive( + dt_dtm + ) + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_SAME + assert data[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" + assert ( + dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) + > dt_util.now() + ) + + +@pytest.mark.parametrize("idx", TEST_DATA) +async def test_auth_tokens_diff( + hass: HomeAssistant, hass_storage: dict[str, Any], idx: str +) -> None: + """Test loading/saving authentication tokens when unmatched username.""" + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} + + mock_client = await setup_evohome( + hass, TEST_CONFIG | {CONF_USERNAME: USERNAME_DIFF} + ) + + # Confirm client was instantiated without tokens, as username was different... + assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_DIFF + assert data[SZ_REFRESH_TOKEN] == f"new_{REFRESH_TOKEN}" + assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" + assert ( + dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) + > dt_util.now() + ) diff --git a/tests/components/ezviz/__init__.py b/tests/components/ezviz/__init__.py index 9fc297be099..78bbee0b0ad 100644 --- a/tests/components/ezviz/__init__.py +++ b/tests/components/ezviz/__init__.py @@ -1,6 +1,6 @@ """Tests for the EZVIZ integration.""" -from unittest.mock import patch +from unittest.mock import _patch, patch from homeassistant.components.ezviz.const import ( ATTR_SERIAL, @@ -83,10 +83,11 @@ API_LOGIN_RETURN_VALIDATE = { } -def _patch_async_setup_entry(return_value=True): +def patch_async_setup_entry() -> _patch: + """Patch async_setup_entry.""" return patch( "homeassistant.components.ezviz.async_setup_entry", - return_value=return_value, + return_value=True, ) diff --git a/tests/components/ezviz/conftest.py b/tests/components/ezviz/conftest.py index 10fd0406a1c..171cfffc2fc 100644 --- a/tests/components/ezviz/conftest.py +++ b/tests/components/ezviz/conftest.py @@ -1,11 +1,14 @@ """Define pytest.fixtures available for all tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from pyezviz import EzvizClient from pyezviz.test_cam_rtsp import TestRTSPAuth import pytest +from homeassistant.core import HomeAssistant + ezviz_login_token_return = { "session_id": "fake_token", "rf_session_id": "fake_rf_token", @@ -14,13 +17,13 @@ ezviz_login_token_return = { @pytest.fixture(autouse=True) -def mock_ffmpeg(hass): +def mock_ffmpeg(hass: HomeAssistant) -> None: """Mock ffmpeg is loaded.""" hass.config.components.add("ffmpeg") @pytest.fixture -def ezviz_test_rtsp_config_flow(hass): +def ezviz_test_rtsp_config_flow() -> Generator[MagicMock]: """Mock the EzvizApi for easier testing.""" with ( patch.object(TestRTSPAuth, "main", return_value=True), @@ -40,7 +43,7 @@ def ezviz_test_rtsp_config_flow(hass): @pytest.fixture -def ezviz_config_flow(hass): +def ezviz_config_flow() -> Generator[MagicMock]: """Mock the EzvizAPI for easier config flow testing.""" with ( patch.object(EzvizClient, "login", return_value=True), diff --git a/tests/components/ezviz/test_config_flow.py b/tests/components/ezviz/test_config_flow.py index 57c3ae0600e..f9459635f2c 100644 --- a/tests/components/ezviz/test_config_flow.py +++ b/tests/components/ezviz/test_config_flow.py @@ -1,6 +1,6 @@ """Test the EZVIZ config flow.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch from pyezviz.exceptions import ( AuthTestResultFailed, @@ -10,6 +10,7 @@ from pyezviz.exceptions import ( InvalidURL, PyEzvizError, ) +import pytest from homeassistant.components.ezviz.const import ( ATTR_SERIAL, @@ -40,12 +41,13 @@ from . import ( API_LOGIN_RETURN_VALIDATE, DISCOVERY_INFO, USER_INPUT_VALIDATE, - _patch_async_setup_entry, init_integration, + patch_async_setup_entry, ) -async def test_user_form(hass: HomeAssistant, ezviz_config_flow) -> None: +@pytest.mark.usefixtures("ezviz_config_flow") +async def test_user_form(hass: HomeAssistant) -> None: """Test the user initiated form.""" result = await hass.config_entries.flow.async_init( @@ -55,7 +57,7 @@ async def test_user_form(hass: HomeAssistant, ezviz_config_flow) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_VALIDATE, @@ -75,7 +77,8 @@ async def test_user_form(hass: HomeAssistant, ezviz_config_flow) -> None: assert result["reason"] == "already_configured_account" -async def test_user_custom_url(hass: HomeAssistant, ezviz_config_flow) -> None: +@pytest.mark.usefixtures("ezviz_config_flow") +async def test_user_custom_url(hass: HomeAssistant) -> None: """Test custom url step.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -94,7 +97,7 @@ async def test_user_custom_url(hass: HomeAssistant, ezviz_config_flow) -> None: assert result["step_id"] == "user_custom_url" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_URL: "test-user"}, @@ -107,7 +110,8 @@ async def test_user_custom_url(hass: HomeAssistant, ezviz_config_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_async_step_reauth(hass: HomeAssistant, ezviz_config_flow) -> None: +@pytest.mark.usefixtures("ezviz_config_flow") +async def test_async_step_reauth(hass: HomeAssistant) -> None: """Test the reauth step.""" result = await hass.config_entries.flow.async_init( @@ -117,7 +121,7 @@ async def test_async_step_reauth(hass: HomeAssistant, ezviz_config_flow) -> None assert result["step_id"] == "user" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_VALIDATE, @@ -185,9 +189,8 @@ async def test_step_reauth_abort_if_cloud_account_missing(hass: HomeAssistant) - assert result["reason"] == "ezviz_cloud_account_missing" -async def test_async_step_integration_discovery( - hass: HomeAssistant, ezviz_config_flow, ezviz_test_rtsp_config_flow -) -> None: +@pytest.mark.usefixtures("ezviz_config_flow", "ezviz_test_rtsp_config_flow") +async def test_async_step_integration_discovery(hass: HomeAssistant) -> None: """Test discovery and confirm step.""" with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): await init_integration(hass) @@ -199,7 +202,7 @@ async def test_async_step_integration_discovery( assert result["step_id"] == "confirm" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -221,7 +224,7 @@ async def test_async_step_integration_discovery( async def test_options_flow(hass: HomeAssistant) -> None: """Test updating options.""" - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: entry = await init_integration(hass) assert entry.options[CONF_FFMPEG_ARGUMENTS] == DEFAULT_FFMPEG_ARGUMENTS @@ -245,7 +248,9 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_form_exception(hass: HomeAssistant, ezviz_config_flow) -> None: +async def test_user_form_exception( + hass: HomeAssistant, ezviz_config_flow: MagicMock +) -> None: """Test we handle exception on user form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -311,7 +316,7 @@ async def test_user_form_exception(hass: HomeAssistant, ezviz_config_flow) -> No async def test_discover_exception_step1( hass: HomeAssistant, - ezviz_config_flow, + ezviz_config_flow: MagicMock, ) -> None: """Test we handle unexpected exception on discovery.""" with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): @@ -397,10 +402,9 @@ async def test_discover_exception_step1( assert result["reason"] == "unknown" +@pytest.mark.usefixtures("ezviz_config_flow") async def test_discover_exception_step3( - hass: HomeAssistant, - ezviz_config_flow, - ezviz_test_rtsp_config_flow, + hass: HomeAssistant, ezviz_test_rtsp_config_flow: MagicMock ) -> None: """Test we handle unexpected exception on discovery.""" with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): @@ -459,7 +463,7 @@ async def test_discover_exception_step3( async def test_user_custom_url_exception( - hass: HomeAssistant, ezviz_config_flow + hass: HomeAssistant, ezviz_config_flow: MagicMock ) -> None: """Test we handle unexpected exception.""" ezviz_config_flow.side_effect = PyEzvizError() @@ -534,7 +538,7 @@ async def test_user_custom_url_exception( async def test_async_step_reauth_exception( - hass: HomeAssistant, ezviz_config_flow + hass: HomeAssistant, ezviz_config_flow: MagicMock ) -> None: """Test the reauth step exceptions.""" @@ -545,7 +549,7 @@ async def test_async_step_reauth_exception( assert result["step_id"] == "user" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_VALIDATE, diff --git a/tests/components/fan/conftest.py b/tests/components/fan/conftest.py new file mode 100644 index 00000000000..2e3644793df --- /dev/null +++ b/tests/components/fan/conftest.py @@ -0,0 +1,23 @@ +"""Fixtures for Fan platform tests.""" + +from collections.abc import Generator + +import pytest + +from homeassistant.config_entries import ConfigFlow +from homeassistant.core import HomeAssistant + +from tests.common import mock_config_flow, mock_platform + + +class MockFlow(ConfigFlow): + """Test flow.""" + + +@pytest.fixture +def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: + """Mock config flow.""" + mock_platform(hass, "test.config_flow") + + with mock_config_flow("test", MockFlow): + yield diff --git a/tests/components/fan/test_device_condition.py b/tests/components/fan/test_device_condition.py index 9f9bde1a680..da48f3223af 100644 --- a/tests/components/fan/test_device_condition.py +++ b/tests/components/fan/test_device_condition.py @@ -12,11 +12,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -24,12 +20,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -114,7 +104,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -184,22 +174,22 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on - event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off - event - test_event2" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -246,5 +236,5 @@ async def test_if_state_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on - event - test_event1" diff --git a/tests/components/fan/test_device_trigger.py b/tests/components/fan/test_device_trigger.py index 38f39376592..f4673636637 100644 --- a/tests/components/fan/test_device_trigger.py +++ b/tests/components/fan/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -180,7 +173,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -273,8 +266,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_on - device - {entry.entity_id} - off - on - None", f"turn_on_or_off - device - {entry.entity_id} - off - on - None", } @@ -282,8 +275,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning off. hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_off - device - {entry.entity_id} - on - off - None", f"turn_on_or_off - device - {entry.entity_id} - on - off - None", } @@ -293,7 +286,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -342,9 +335,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_on - device - {entry.entity_id} - off - on - None" ) @@ -353,7 +346,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -399,16 +392,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index 04f594b959c..a7dc544a97a 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -1,5 +1,7 @@ """Tests for fan platforms.""" +from unittest.mock import patch + import pytest from homeassistant.components import fan @@ -12,15 +14,23 @@ from homeassistant.components.fan import ( FanEntityFeature, NotValidPresetModeError, ) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.helpers.entity_registry as er from homeassistant.setup import async_setup_component from .common import MockFan from tests.common import ( + MockConfigEntry, + MockModule, + MockPlatform, help_test_all, import_and_test_deprecated_constant_enum, + mock_integration, + mock_platform, setup_test_component_platform, ) @@ -28,7 +38,7 @@ from tests.common import ( class BaseFan(FanEntity): """Implementation of the abstract FanEntity.""" - def __init__(self): + def __init__(self) -> None: """Initialize the fan.""" @@ -167,7 +177,10 @@ def test_deprecated_constants( enum: fan.FanEntityFeature, ) -> None: """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, fan, enum, "SUPPORT_", "2025.1") + if not FanEntityFeature.TURN_OFF and not FanEntityFeature.TURN_ON: + import_and_test_deprecated_constant_enum( + caplog, fan, enum, "SUPPORT_", "2025.1" + ) def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: @@ -180,11 +193,288 @@ def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> return 1 entity = MockFan() - assert entity.supported_features_compat is FanEntityFeature(1) + assert entity.supported_features is FanEntityFeature(1) assert "MockFan" in caplog.text assert "is using deprecated supported features values" in caplog.text assert "Instead it should use" in caplog.text assert "FanEntityFeature.SET_SPEED" in caplog.text caplog.clear() - assert entity.supported_features_compat is FanEntityFeature(1) + assert entity.supported_features is FanEntityFeature(1) assert "is using deprecated supported features values" not in caplog.text + + +async def test_warning_not_implemented_turn_on_off_feature( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test adding feature flag and warn if missing when methods are set.""" + + called = [] + + class MockFanEntityTest(MockFan): + """Mock Fan device.""" + + def turn_on( + self, + percentage: int | None = None, + preset_mode: str | None = None, + ) -> None: + """Turn on.""" + called.append("turn_on") + + def turn_off(self) -> None: + """Turn off.""" + called.append("turn_off") + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_fan_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test fan platform via config entry.""" + async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.fan", + MockPlatform(async_setup_entry=async_setup_entry_fan_platform), + ) + + with patch.object( + MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("fan.test") + assert state is not None + + assert ( + "Entity fan.test (.MockFanEntityTest'>) " + "does not set FanEntityFeature.TURN_OFF but implements the turn_off method. Please report it to the author of the 'test' custom integration" + in caplog.text + ) + assert ( + "Entity fan.test (.MockFanEntityTest'>) " + "does not set FanEntityFeature.TURN_ON but implements the turn_on method. Please report it to the author of the 'test' custom integration" + in caplog.text + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_ON, + { + "entity_id": "fan.test", + }, + blocking=True, + ) + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_OFF, + { + "entity_id": "fan.test", + }, + blocking=True, + ) + + assert len(called) == 2 + assert "turn_on" in called + assert "turn_off" in called + + +async def test_no_warning_implemented_turn_on_off_feature( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test no warning when feature flags are set.""" + + class MockFanEntityTest(MockFan): + """Mock Fan device.""" + + _attr_supported_features = ( + FanEntityFeature.DIRECTION + | FanEntityFeature.OSCILLATE + | FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_fan_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test fan platform via config entry.""" + async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.fan", + MockPlatform(async_setup_entry=async_setup_entry_fan_platform), + ) + + with patch.object( + MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("fan.test") + assert state is not None + + assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text + assert "does not set FanEntityFeature.TURN_ON" not in caplog.text + + +async def test_no_warning_integration_has_migrated( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" + + class MockFanEntityTest(MockFan): + """Mock Fan device.""" + + _enable_turn_on_off_backwards_compatibility = False + _attr_supported_features = ( + FanEntityFeature.DIRECTION + | FanEntityFeature.OSCILLATE + | FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + ) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_fan_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test fan platform via config entry.""" + async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.fan", + MockPlatform(async_setup_entry=async_setup_entry_fan_platform), + ) + + with patch.object( + MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("fan.test") + assert state is not None + + assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text + assert "does not set FanEntityFeature.TURN_ON" not in caplog.text + + +async def test_no_warning_integration_implement_feature_flags( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test no warning when integration uses the correct feature flags.""" + + class MockFanEntityTest(MockFan): + """Mock Fan device.""" + + _attr_supported_features = ( + FanEntityFeature.DIRECTION + | FanEntityFeature.OSCILLATE + | FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_fan_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test fan platform via config entry.""" + async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.fan", + MockPlatform(async_setup_entry=async_setup_entry_fan_platform), + ) + + with patch.object( + MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("fan.test") + assert state is not None + + assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text + assert "does not set FanEntityFeature.TURN_ON" not in caplog.text diff --git a/tests/components/feedreader/conftest.py b/tests/components/feedreader/conftest.py index 0a5342615a9..8eeb89e00cd 100644 --- a/tests/components/feedreader/conftest.py +++ b/tests/components/feedreader/conftest.py @@ -52,6 +52,18 @@ def fixture_feed_identically_timed_events(hass: HomeAssistant) -> bytes: return load_fixture_bytes("feedreader6.xml") +@pytest.fixture(name="feed_without_items") +def fixture_feed_without_items(hass: HomeAssistant) -> bytes: + """Load test feed without any items.""" + return load_fixture_bytes("feedreader7.xml") + + +@pytest.fixture(name="feed_only_summary") +def fixture_feed_only_summary(hass: HomeAssistant) -> bytes: + """Load test feed data with one event containing only a summary, no content.""" + return load_fixture_bytes("feedreader8.xml") + + @pytest.fixture(name="events") async def fixture_events(hass: HomeAssistant) -> list[Event]: """Fixture that catches alexa events.""" diff --git a/tests/components/feedreader/fixtures/feedreader.xml b/tests/components/feedreader/fixtures/feedreader.xml index 8c85a4975ee..17402cad081 100644 --- a/tests/components/feedreader/fixtures/feedreader.xml +++ b/tests/components/feedreader/fixtures/feedreader.xml @@ -14,6 +14,7 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +1000 + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader1.xml b/tests/components/feedreader/fixtures/feedreader1.xml index ff856125779..c71507c15b7 100644 --- a/tests/components/feedreader/fixtures/feedreader1.xml +++ b/tests/components/feedreader/fixtures/feedreader1.xml @@ -8,19 +8,21 @@ Mon, 30 Apr 2018 15:00:00 +1000 1800 - - Title 1 - Description 1 - http://www.example.com/link/1 - GUID 1 - Mon, 30 Apr 2018 15:10:00 +1000 - Title 2 Description 2 http://www.example.com/link/2 GUID 2 Mon, 30 Apr 2018 15:11:00 +1000 + Content 2 + + + Title 1 + Description 1 + http://www.example.com/link/1 + GUID 1 + Mon, 30 Apr 2018 15:10:00 +1000 + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader2.xml b/tests/components/feedreader/fixtures/feedreader2.xml index 653a16e4561..2471d70edcb 100644 --- a/tests/components/feedreader/fixtures/feedreader2.xml +++ b/tests/components/feedreader/fixtures/feedreader2.xml @@ -9,88 +9,109 @@ 1800 - Title 1 - Mon, 30 Apr 2018 15:00:00 +1000 - - - Title 2 - Mon, 30 Apr 2018 15:01:00 +1000 - - - Title 3 - Mon, 30 Apr 2018 15:02:00 +1000 - - - Title 4 - Mon, 30 Apr 2018 15:03:00 +1000 - - - Title 5 - Mon, 30 Apr 2018 15:04:00 +1000 - - - Title 6 - Mon, 30 Apr 2018 15:05:00 +1000 - - - Title 7 - Mon, 30 Apr 2018 15:06:00 +1000 - - - Title 8 - Mon, 30 Apr 2018 15:07:00 +1000 - - - Title 9 - Mon, 30 Apr 2018 15:08:00 +1000 - - - Title 10 - Mon, 30 Apr 2018 15:09:00 +1000 - - - Title 11 - Mon, 30 Apr 2018 15:10:00 +1000 - - - Title 12 - Mon, 30 Apr 2018 15:11:00 +1000 - - - Title 13 - Mon, 30 Apr 2018 15:12:00 +1000 - - - Title 14 - Mon, 30 Apr 2018 15:13:00 +1000 - - - Title 15 - Mon, 30 Apr 2018 15:14:00 +1000 - - - Title 16 - Mon, 30 Apr 2018 15:15:00 +1000 - - - Title 17 - Mon, 30 Apr 2018 15:16:00 +1000 - - - Title 18 - Mon, 30 Apr 2018 15:17:00 +1000 - - - Title 19 - Mon, 30 Apr 2018 15:18:00 +1000 + Title 21 + Mon, 30 Apr 2018 15:20:00 +1000 + Content 21 Title 20 Mon, 30 Apr 2018 15:19:00 +1000 + Content 20 - Title 21 - Mon, 30 Apr 2018 15:20:00 +1000 + Title 19 + Mon, 30 Apr 2018 15:18:00 +1000 + Content 19 + + + Title 18 + Mon, 30 Apr 2018 15:17:00 +1000 + Content 18 + + + Title 17 + Mon, 30 Apr 2018 15:16:00 +1000 + Content 17 + + + Title 16 + Mon, 30 Apr 2018 15:15:00 +1000 + Content 16 + + + Title 15 + Mon, 30 Apr 2018 15:14:00 +1000 + Content 15 + + + Title 14 + Mon, 30 Apr 2018 15:13:00 +1000 + Content 14 + + + Title 13 + Mon, 30 Apr 2018 15:12:00 +1000 + Content 13 + + + Title 12 + Mon, 30 Apr 2018 15:11:00 +1000 + Content 12 + + + Title 11 + Mon, 30 Apr 2018 15:10:00 +1000 + Content 11 + + + Title 10 + Mon, 30 Apr 2018 15:09:00 +1000 + Content 10 + + + Title 9 + Mon, 30 Apr 2018 15:08:00 +1000 + Content 9 + + + Title 8 + Mon, 30 Apr 2018 15:07:00 +1000 + Content 8 + + + Title 7 + Mon, 30 Apr 2018 15:06:00 +1000 + Content 7 + + + Title 6 + Mon, 30 Apr 2018 15:05:00 +1000 + Content 6 + + + Title 5 + Mon, 30 Apr 2018 15:04:00 +1000 + Content 5 + + + Title 4 + Mon, 30 Apr 2018 15:03:00 +1000 + Content 4 + + + Title 3 + Mon, 30 Apr 2018 15:02:00 +1000 + Content 3 + + + Title 1 + Mon, 30 Apr 2018 15:00:00 +1000 + Content 1 + + + Title 2 + Mon, 30 Apr 2018 15:01:00 +1000 + Content 2 diff --git a/tests/components/feedreader/fixtures/feedreader3.xml b/tests/components/feedreader/fixtures/feedreader3.xml index d8ccd119306..67daef20fe8 100644 --- a/tests/components/feedreader/fixtures/feedreader3.xml +++ b/tests/components/feedreader/fixtures/feedreader3.xml @@ -14,17 +14,20 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +1000 + Content 1 Title 2 Description 2 http://www.example.com/link/2 GUID 2 + Content 2 Description 3 http://www.example.com/link/3 GUID 3 + Content 3 diff --git a/tests/components/feedreader/fixtures/feedreader4.xml b/tests/components/feedreader/fixtures/feedreader4.xml index 81828ccb6e2..11c8d501395 100644 --- a/tests/components/feedreader/fixtures/feedreader4.xml +++ b/tests/components/feedreader/fixtures/feedreader4.xml @@ -14,6 +14,7 @@ http://www.example.com/link/1 GUID 1 26.10.2019 - 12:06:24 + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader5.xml b/tests/components/feedreader/fixtures/feedreader5.xml index d9b1dda1ad2..562fd45ea93 100644 --- a/tests/components/feedreader/fixtures/feedreader5.xml +++ b/tests/components/feedreader/fixtures/feedreader5.xml @@ -14,5 +14,6 @@ urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a 2003-12-13T18:30:02Z Some text. + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader6.xml b/tests/components/feedreader/fixtures/feedreader6.xml index 621c89787e8..48abd06b95b 100644 --- a/tests/components/feedreader/fixtures/feedreader6.xml +++ b/tests/components/feedreader/fixtures/feedreader6.xml @@ -14,6 +14,7 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +0000 + Content 1 Title 2 @@ -21,6 +22,7 @@ http://www.example.com/link/2 GUID 2 Mon, 30 Apr 2018 15:10:00 +0000 + Content 2 diff --git a/tests/components/feedreader/fixtures/feedreader7.xml b/tests/components/feedreader/fixtures/feedreader7.xml new file mode 100644 index 00000000000..0ffac8dd2ee --- /dev/null +++ b/tests/components/feedreader/fixtures/feedreader7.xml @@ -0,0 +1,11 @@ + + + + RSS Sample + This is an example of an RSS feed + http://www.example.com/main.html + Mon, 30 Apr 2018 12:00:00 +1000 + Mon, 30 Apr 2018 15:00:00 +1000 + 1800 + + diff --git a/tests/components/feedreader/fixtures/feedreader8.xml b/tests/components/feedreader/fixtures/feedreader8.xml new file mode 100644 index 00000000000..d1c167352f8 --- /dev/null +++ b/tests/components/feedreader/fixtures/feedreader8.xml @@ -0,0 +1,21 @@ + + + + RSS Sample + This is an example of an RSS feed + http://www.example.com/main.html + Mon, 30 Apr 2018 12:00:00 +1000 + Mon, 30 Apr 2018 15:00:00 +1000 + 1800 + + + Title 1 + Description 1 + http://www.example.com/link/1 + GUID 1 + Mon, 30 Apr 2018 15:10:00 +1000 + This is a summary + + + + diff --git a/tests/components/feedreader/test_config_flow.py b/tests/components/feedreader/test_config_flow.py index 48c341492e0..47bccce902f 100644 --- a/tests/components/feedreader/test_config_flow.py +++ b/tests/components/feedreader/test_config_flow.py @@ -13,7 +13,7 @@ from homeassistant.components.feedreader.const import ( ) from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_URL -from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component @@ -83,16 +83,6 @@ async def test_user_errors( assert result["step_id"] == "user" assert result["errors"] == {"base": "url_error"} - # no feed entries returned - feedparser.side_effect = None - feedparser.return_value = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_URL: URL} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "no_feed_entries"} - # success feedparser.side_effect = None feedparser.return_value = feed_one_event @@ -138,43 +128,30 @@ async def test_import( assert config_entries[0].data == expected_data assert config_entries[0].options == expected_options - assert issue_registry.async_get_issue(HA_DOMAIN, "deprecated_yaml_feedreader") + assert issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, "deprecated_yaml_feedreader" + ) -@pytest.mark.parametrize( - ("side_effect", "return_value", "expected_issue_id"), - [ - ( - urllib.error.URLError("Test"), - None, - "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", - ), - ( - None, - None, - "import_yaml_error_feedreader_no_feed_entries_http_some_rss_local_rss_feed_xml", - ), - ], -) async def test_import_errors( hass: HomeAssistant, issue_registry: ir.IssueRegistry, feedparser, setup_entry, feed_one_event, - side_effect, - return_value, - expected_issue_id, ) -> None: """Test starting an import flow which results in an URL error.""" config_entries = hass.config_entries.async_entries(DOMAIN) assert not config_entries # raise URLError - feedparser.side_effect = side_effect - feedparser.return_value = return_value + feedparser.side_effect = urllib.error.URLError("Test") + feedparser.return_value = None assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_URLS: [URL]}}) - assert issue_registry.async_get_issue(DOMAIN, expected_issue_id) + assert issue_registry.async_get_issue( + DOMAIN, + "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", + ) async def test_reconfigure(hass: HomeAssistant, feedparser) -> None: @@ -248,19 +225,6 @@ async def test_reconfigure_errors( assert result["step_id"] == "reconfigure_confirm" assert result["errors"] == {"base": "url_error"} - # no feed entries returned - feedparser.side_effect = None - feedparser.return_value = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_URL: "http://other.rss.local/rss_feed.xml", - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" - assert result["errors"] == {"base": "no_feed_entries"} - # success feedparser.side_effect = None feedparser.return_value = feed_one_event diff --git a/tests/components/feedreader/test_event.py b/tests/components/feedreader/test_event.py new file mode 100644 index 00000000000..5d903383c05 --- /dev/null +++ b/tests/components/feedreader/test_event.py @@ -0,0 +1,57 @@ +"""The tests for the feedreader event entity.""" + +from datetime import timedelta +from unittest.mock import patch + +from homeassistant.components.feedreader.event import ( + ATTR_CONTENT, + ATTR_LINK, + ATTR_TITLE, +) +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from . import create_mock_entry +from .const import VALID_CONFIG_DEFAULT + +from tests.common import async_fire_time_changed + + +async def test_event_entity( + hass: HomeAssistant, feed_one_event, feed_two_event, feed_only_summary +) -> None: + """Test feed event entity.""" + entry = create_mock_entry(VALID_CONFIG_DEFAULT) + entry.add_to_hass(hass) + with patch( + "homeassistant.components.feedreader.coordinator.feedparser.http.get", + side_effect=[feed_one_event, feed_two_event, feed_only_summary], + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("event.mock_title") + assert state + assert state.attributes[ATTR_TITLE] == "Title 1" + assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1" + assert state.attributes[ATTR_CONTENT] == "Content 1" + + future = dt_util.utcnow() + timedelta(hours=1, seconds=1) + async_fire_time_changed(hass, future) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("event.mock_title") + assert state + assert state.attributes[ATTR_TITLE] == "Title 2" + assert state.attributes[ATTR_LINK] == "http://www.example.com/link/2" + assert state.attributes[ATTR_CONTENT] == "Content 2" + + future = dt_util.utcnow() + timedelta(hours=2, seconds=2) + async_fire_time_changed(hass, future) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("event.mock_title") + assert state + assert state.attributes[ATTR_TITLE] == "Title 1" + assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1" + assert state.attributes[ATTR_CONTENT] == "This is a summary" diff --git a/tests/components/feedreader/test_init.py b/tests/components/feedreader/test_init.py index 1dcbf5ba45d..d7700d79e3b 100644 --- a/tests/components/feedreader/test_init.py +++ b/tests/components/feedreader/test_init.py @@ -165,6 +165,21 @@ async def test_feed_identical_timestamps( ) +async def test_feed_with_only_summary( + hass: HomeAssistant, events, feed_only_summary +) -> None: + """Test simple feed with only summary, no content.""" + assert await async_setup_config_entry( + hass, VALID_CONFIG_DEFAULT, return_value=feed_only_summary + ) + await hass.async_block_till_done() + + assert len(events) == 1 + assert events[0].data.title == "Title 1" + assert events[0].data.description == "Description 1" + assert events[0].data.content[0].value == "This is a summary" + + async def test_feed_updates( hass: HomeAssistant, events, feed_one_event, feed_two_event ) -> None: @@ -247,6 +262,20 @@ async def test_feed_with_unrecognized_publication_date( assert len(events) == 1 +async def test_feed_without_items( + hass: HomeAssistant, events, feed_without_items, caplog: pytest.LogCaptureFixture +) -> None: + """Test simple feed without any items.""" + assert "No new entries to be published in feed" not in caplog.text + assert await async_setup_config_entry( + hass, VALID_CONFIG_DEFAULT, return_value=feed_without_items + ) + await hass.async_block_till_done() + + assert "No new entries to be published in feed" in caplog.text + assert len(events) == 0 + + async def test_feed_invalid_data(hass: HomeAssistant, events) -> None: """Test feed with invalid data.""" assert await async_setup_config_entry( @@ -296,7 +325,7 @@ async def test_feed_errors( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert ( - "Error fetching feed data from http://some.rss.local/rss_feed.xml: " + "Error fetching feed data from http://some.rss.local/rss_feed.xml : " in caplog.text ) diff --git a/tests/components/ffmpeg/test_init.py b/tests/components/ffmpeg/test_init.py index 353b8fdfcc0..aa407d5b695 100644 --- a/tests/components/ffmpeg/test_init.py +++ b/tests/components/ffmpeg/test_init.py @@ -16,13 +16,13 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.setup import async_setup_component, setup_component +from homeassistant.setup import async_setup_component -from tests.common import assert_setup_component, get_test_home_assistant +from tests.common import assert_setup_component @callback -def async_start(hass, entity_id=None): +def async_start(hass: HomeAssistant, entity_id: str | None = None) -> None: """Start a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -32,7 +32,7 @@ def async_start(hass, entity_id=None): @callback -def async_stop(hass, entity_id=None): +def async_stop(hass: HomeAssistant, entity_id: str | None = None) -> None: """Stop a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -42,7 +42,7 @@ def async_stop(hass, entity_id=None): @callback -def async_restart(hass, entity_id=None): +def async_restart(hass: HomeAssistant, entity_id: str | None = None) -> None: """Restart a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -54,7 +54,12 @@ def async_restart(hass, entity_id=None): class MockFFmpegDev(ffmpeg.FFmpegBase): """FFmpeg device mock.""" - def __init__(self, hass, initial_state=True, entity_id="test.ffmpeg_device"): + def __init__( + self, + hass: HomeAssistant, + initial_state: bool = True, + entity_id: str = "test.ffmpeg_device", + ) -> None: """Initialize mock.""" super().__init__(None, initial_state) @@ -77,26 +82,22 @@ class MockFFmpegDev(ffmpeg.FFmpegBase): self.called_entities = entity_ids -def test_setup_component() -> None: +async def test_setup_component(hass: HomeAssistant) -> None: """Set up ffmpeg component.""" - with get_test_home_assistant() as hass: - with assert_setup_component(1): - setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + with assert_setup_component(1): + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - assert hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg" - hass.stop() + assert hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg" -def test_setup_component_test_service() -> None: +async def test_setup_component_test_service(hass: HomeAssistant) -> None: """Set up ffmpeg component test services.""" - with get_test_home_assistant() as hass: - with assert_setup_component(1): - setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + with assert_setup_component(1): + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - assert hass.services.has_service(ffmpeg.DOMAIN, "start") - assert hass.services.has_service(ffmpeg.DOMAIN, "stop") - assert hass.services.has_service(ffmpeg.DOMAIN, "restart") - hass.stop() + assert hass.services.has_service(ffmpeg.DOMAIN, "start") + assert hass.services.has_service(ffmpeg.DOMAIN, "stop") + assert hass.services.has_service(ffmpeg.DOMAIN, "restart") async def test_setup_component_test_register(hass: HomeAssistant) -> None: diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index d2f004a160c..4d99dea6682 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -1,9 +1,9 @@ """Test helpers.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.fibaro import CONF_IMPORT_PLUGINS, DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME diff --git a/tests/components/fido/test_sensor.py b/tests/components/fido/test_sensor.py index a067f060af8..654221cfacd 100644 --- a/tests/components/fido/test_sensor.py +++ b/tests/components/fido/test_sensor.py @@ -6,9 +6,9 @@ from unittest.mock import MagicMock, patch from pyfido.client import PyFidoError import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components.fido import sensor as fido from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from tests.common import assert_setup_component @@ -18,7 +18,7 @@ CONTRACT = "123456789" class FidoClientMock: """Fake Fido client.""" - def __init__(self, username, password, timeout=None, httpsession=None): + def __init__(self, username, password, timeout=None, httpsession=None) -> None: """Fake Fido client init.""" def get_phone_numbers(self): diff --git a/tests/components/file/conftest.py b/tests/components/file/conftest.py index 265acde36ca..5345a0d38d0 100644 --- a/tests/components/file/conftest.py +++ b/tests/components/file/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for file platform.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/file/test_config_flow.py b/tests/components/file/test_config_flow.py index 86ada1fec61..30d00411c44 100644 --- a/tests/components/file/test_config_flow.py +++ b/tests/components/file/test_config_flow.py @@ -7,6 +7,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.file import DOMAIN +from homeassistant.const import CONF_UNIT_OF_MEASUREMENT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -15,20 +16,22 @@ from tests.common import MockConfigEntry MOCK_CONFIG_NOTIFY = { "platform": "notify", "file_path": "some_file", - "timestamp": True, } +MOCK_OPTIONS_NOTIFY = {"timestamp": True} MOCK_CONFIG_SENSOR = { "platform": "sensor", "file_path": "some/path", - "value_template": "{{ value | round(1) }}", } - -pytestmark = pytest.mark.usefixtures("mock_setup_entry") +MOCK_OPTIONS_SENSOR = {"value_template": "{{ value | round(1) }}"} +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( - ("platform", "data"), - [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], + ("platform", "data", "options"), + [ + ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), + ], ) async def test_form( hass: HomeAssistant, @@ -36,6 +39,7 @@ async def test_form( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -50,7 +54,7 @@ async def test_form( ) await hass.async_block_till_done() - user_input = dict(data) + user_input = {**data, **options} user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input @@ -59,12 +63,17 @@ async def test_form( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"] == data + assert result2["options"] == options assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( - ("platform", "data"), - [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], + ("platform", "data", "options"), + [ + ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), + ], ) async def test_already_configured( hass: HomeAssistant, @@ -72,9 +81,10 @@ async def test_already_configured( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test aborting if the entry is already configured.""" - entry = MockConfigEntry(domain=DOMAIN, data=data) + entry = MockConfigEntry(domain=DOMAIN, data=data, options=options) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( @@ -91,7 +101,7 @@ async def test_already_configured( assert result["type"] is FlowResultType.FORM assert result["step_id"] == platform - user_input = dict(data) + user_input = {**data, **options} user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -103,10 +113,14 @@ async def test_already_configured( assert result2["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize("is_allowed", [False], ids=["not_allowed"]) @pytest.mark.parametrize( - ("platform", "data"), - [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], + ("platform", "data", "options"), + [ + ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), + ], ) async def test_not_allowed( hass: HomeAssistant, @@ -114,6 +128,7 @@ async def test_not_allowed( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test aborting if the file path is not allowed.""" result = await hass.config_entries.flow.async_init( @@ -130,7 +145,7 @@ async def test_not_allowed( assert result["type"] is FlowResultType.FORM assert result["step_id"] == platform - user_input = dict(data) + user_input = {**data, **options} user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -140,3 +155,49 @@ async def test_not_allowed( assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"file_path": "not_allowed"} + + +@pytest.mark.parametrize( + ("platform", "data", "options", "new_options"), + [ + ( + "sensor", + MOCK_CONFIG_SENSOR, + MOCK_OPTIONS_SENSOR, + {CONF_UNIT_OF_MEASUREMENT: "mm"}, + ), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY, {"timestamp": False}), + ], +) +async def test_options_flow( + hass: HomeAssistant, + mock_is_allowed_path: bool, + platform: str, + data: dict[str, Any], + options: dict[str, Any], + new_options: dict[str, Any], +) -> None: + """Test options config flow.""" + entry = MockConfigEntry(domain=DOMAIN, data=data, options=options, version=2) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input=new_options, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == new_options + + entry = hass.config_entries.async_get_entry(entry.entry_id) + assert entry.state is config_entries.ConfigEntryState.LOADED + assert entry.options == new_options diff --git a/tests/components/file/test_init.py b/tests/components/file/test_init.py new file mode 100644 index 00000000000..faf1488ed07 --- /dev/null +++ b/tests/components/file/test_init.py @@ -0,0 +1,65 @@ +"""The tests for local file init.""" + +from unittest.mock import MagicMock, Mock, patch + +from homeassistant.components.file import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, get_fixture_path + + +@patch("os.path.isfile", Mock(return_value=True)) +@patch("os.access", Mock(return_value=True)) +async def test_migration_to_version_2( + hass: HomeAssistant, mock_is_allowed_path: MagicMock +) -> None: + """Test the File sensor with JSON entries.""" + data = { + "platform": "sensor", + "name": "file2", + "file_path": get_fixture_path("file_value_template.txt", "file"), + "value_template": "{{ value_json.temperature }}", + } + + entry = MockConfigEntry( + domain=DOMAIN, + version=1, + data=data, + title=f"test [{data['file_path']}]", + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.LOADED + assert entry.version == 2 + assert entry.data == { + "platform": "sensor", + "name": "file2", + "file_path": get_fixture_path("file_value_template.txt", "file"), + } + assert entry.options == { + "value_template": "{{ value_json.temperature }}", + } + + +@patch("os.path.isfile", Mock(return_value=True)) +@patch("os.access", Mock(return_value=True)) +async def test_migration_from_future_version( + hass: HomeAssistant, mock_is_allowed_path: MagicMock +) -> None: + """Test the File sensor with JSON entries.""" + data = { + "platform": "sensor", + "name": "file2", + "file_path": get_fixture_path("file_value_template.txt", "file"), + "value_template": "{{ value_json.temperature }}", + } + + entry = MockConfigEntry( + domain=DOMAIN, version=3, data=data, title=f"test [{data['file_path']}]" + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/file/test_notify.py b/tests/components/file/test_notify.py index faa9027aa21..33e4739a488 100644 --- a/tests/components/file/test_notify.py +++ b/tests/components/file/test_notify.py @@ -174,7 +174,7 @@ async def test_legacy_notify_file_exception( @pytest.mark.parametrize( - ("timestamp", "data"), + ("timestamp", "data", "options"), [ ( False, @@ -182,6 +182,8 @@ async def test_legacy_notify_file_exception( "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": False, }, ), @@ -191,6 +193,8 @@ async def test_legacy_notify_file_exception( "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": True, }, ), @@ -203,6 +207,7 @@ async def test_legacy_notify_file_entry_only_setup( timestamp: bool, mock_is_allowed_path: MagicMock, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test the legacy notify file output in entry only setup.""" filename = "mock_file" @@ -213,7 +218,11 @@ async def test_legacy_notify_file_entry_only_setup( message = params["message"] entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -252,7 +261,7 @@ async def test_legacy_notify_file_entry_only_setup( @pytest.mark.parametrize( - ("is_allowed", "config"), + ("is_allowed", "config", "options"), [ ( False, @@ -260,6 +269,8 @@ async def test_legacy_notify_file_entry_only_setup( "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": False, }, ), @@ -271,10 +282,15 @@ async def test_legacy_notify_file_not_allowed( caplog: pytest.LogCaptureFixture, mock_is_allowed_path: MagicMock, config: dict[str, Any], + options: dict[str, Any], ) -> None: """Test legacy notify file output not allowed.""" entry = MockConfigEntry( - domain=DOMAIN, data=config, title=f"test [{config['file_path']}]" + domain=DOMAIN, + data=config, + version=2, + options=options, + title=f"test [{config['file_path']}]", ) entry.add_to_hass(hass) assert not await hass.config_entries.async_setup(entry.entry_id) @@ -293,13 +309,15 @@ async def test_legacy_notify_file_not_allowed( ], ) @pytest.mark.parametrize( - ("data", "is_allowed"), + ("data", "options", "is_allowed"), [ ( { "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": False, }, True, @@ -314,12 +332,17 @@ async def test_notify_file_write_access_failed( service: str, params: dict[str, Any], data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test the notify file fails.""" domain = notify.DOMAIN entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/file/test_sensor.py b/tests/components/file/test_sensor.py index 60a81df2b1e..634ae9d626c 100644 --- a/tests/components/file/test_sensor.py +++ b/tests/components/file/test_sensor.py @@ -47,7 +47,11 @@ async def test_file_value_entry_setup( } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options={}, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -66,11 +70,17 @@ async def test_file_value_template( "platform": "sensor", "name": "file2", "file_path": get_fixture_path("file_value_template.txt", "file"), + } + options = { "value_template": "{{ value_json.temperature }}", } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -90,7 +100,11 @@ async def test_file_empty(hass: HomeAssistant, mock_is_allowed_path: MagicMock) } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options={}, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -113,7 +127,11 @@ async def test_file_path_invalid( } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options={}, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/file_upload/test_init.py b/tests/components/file_upload/test_init.py index 149bbb7ee2f..22ad9323f05 100644 --- a/tests/components/file_upload/test_init.py +++ b/tests/components/file_upload/test_init.py @@ -3,6 +3,7 @@ from contextlib import contextmanager from pathlib import Path from random import getrandbits +from typing import Any from unittest.mock import patch import pytest @@ -141,7 +142,7 @@ async def test_upload_large_file_fails( yield MockPathOpen() class MockPathOpen: - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: pass def write(self, data: bytes) -> None: diff --git a/tests/components/filesize/conftest.py b/tests/components/filesize/conftest.py index 859886a3058..ac66af0d22f 100644 --- a/tests/components/filesize/conftest.py +++ b/tests/components/filesize/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from pathlib import Path from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.filesize.const import DOMAIN from homeassistant.const import CONF_FILE_PATH diff --git a/tests/components/filter/test_sensor.py b/tests/components/filter/test_sensor.py index 0ece61708f2..a9581b78f4e 100644 --- a/tests/components/filter/test_sensor.py +++ b/tests/components/filter/test_sensor.py @@ -467,7 +467,7 @@ def test_throttle(values: list[State]) -> None: new_state = filt.filter_state(state) if not filt.skip_processing: filtered.append(new_state) - assert [20, 21] == [f.state for f in filtered] + assert [f.state for f in filtered] == [20, 21] def test_time_throttle(values: list[State]) -> None: @@ -480,7 +480,7 @@ def test_time_throttle(values: list[State]) -> None: new_state = filt.filter_state(state) if not filt.skip_processing: filtered.append(new_state) - assert [20, 18, 22] == [f.state for f in filtered] + assert [f.state for f in filtered] == [20, 18, 22] def test_time_sma(values: list[State]) -> None: diff --git a/tests/components/fitbit/conftest.py b/tests/components/fitbit/conftest.py index b1ff8a94e12..57511739993 100644 --- a/tests/components/fitbit/conftest.py +++ b/tests/components/fitbit/conftest.py @@ -1,6 +1,6 @@ """Test fixtures for fitbit.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator import datetime from http import HTTPStatus import time @@ -9,7 +9,6 @@ from unittest.mock import patch import pytest from requests_mock.mocker import Mocker -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/fjaraskupan/test_config_flow.py b/tests/components/fjaraskupan/test_config_flow.py index fa0df9241dd..6d3df614443 100644 --- a/tests/components/fjaraskupan/test_config_flow.py +++ b/tests/components/fjaraskupan/test_config_flow.py @@ -2,7 +2,8 @@ from __future__ import annotations -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import AsyncMock, patch import pytest @@ -15,7 +16,7 @@ from . import COOKER_SERVICE_INFO @pytest.fixture(name="mock_setup_entry", autouse=True) -async def fixture_mock_setup_entry(hass): +def fixture_mock_setup_entry() -> Generator[AsyncMock]: """Fixture for config entry.""" with patch( @@ -24,7 +25,7 @@ async def fixture_mock_setup_entry(hass): yield mock_setup_entry -async def test_configure(hass: HomeAssistant, mock_setup_entry) -> None: +async def test_configure(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" with patch( "homeassistant.components.fjaraskupan.config_flow.async_discovered_service_info", diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py index e1b98070d25..cc7c9fa0570 100644 --- a/tests/components/flexit_bacnet/conftest.py +++ b/tests/components/flexit_bacnet/conftest.py @@ -1,10 +1,10 @@ """Configuration for Flexit Nordic (BACnet) tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from flexit_bacnet import FlexitBACnet import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.flexit_bacnet.const import DOMAIN diff --git a/tests/components/flic/test_binary_sensor.py b/tests/components/flic/test_binary_sensor.py index 44db1d6ea1b..cdc1d64db41 100644 --- a/tests/components/flic/test_binary_sensor.py +++ b/tests/components/flic/test_binary_sensor.py @@ -8,7 +8,7 @@ from homeassistant.setup import async_setup_component class _MockFlicClient: - def __init__(self, button_addresses): + def __init__(self, button_addresses) -> None: self.addresses = button_addresses self.get_info_callback = None self.scan_wizard = None diff --git a/tests/components/flick_electric/test_config_flow.py b/tests/components/flick_electric/test_config_flow.py index 1b3ed1de34d..85a6495d3c5 100644 --- a/tests/components/flick_electric/test_config_flow.py +++ b/tests/components/flick_electric/test_config_flow.py @@ -6,6 +6,7 @@ from pyflick.authentication import AuthException from homeassistant import config_entries from homeassistant.components.flick_electric.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -15,7 +16,7 @@ from tests.common import MockConfigEntry CONF = {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"} -async def _flow_submit(hass): +async def _flow_submit(hass: HomeAssistant) -> ConfigFlowResult: return await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, diff --git a/tests/components/flo/conftest.py b/tests/components/flo/conftest.py index 33d467a2abf..66b56d1f10b 100644 --- a/tests/components/flo/conftest.py +++ b/tests/components/flo/conftest.py @@ -16,7 +16,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture -def config_entry(hass): +def config_entry() -> MockConfigEntry: """Config entry version 1 fixture.""" return MockConfigEntry( domain=FLO_DOMAIN, diff --git a/tests/components/flo/test_binary_sensor.py b/tests/components/flo/test_binary_sensor.py index d3032cde1b5..23a84734b0d 100644 --- a/tests/components/flo/test_binary_sensor.py +++ b/tests/components/flo/test_binary_sensor.py @@ -1,5 +1,7 @@ """Test Flo by Moen binary sensor entities.""" +import pytest + from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.const import ( ATTR_FRIENDLY_NAME, @@ -13,9 +15,12 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry + +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_binary_sensors( - hass: HomeAssistant, config_entry, aioclient_mock_fixture + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Test Flo by Moen sensors.""" config_entry.add_to_hass(hass) diff --git a/tests/components/flo/test_config_flow.py b/tests/components/flo/test_config_flow.py index 99f8f315fb2..f9237e979a6 100644 --- a/tests/components/flo/test_config_flow.py +++ b/tests/components/flo/test_config_flow.py @@ -5,6 +5,8 @@ import json import time from unittest.mock import patch +import pytest + from homeassistant import config_entries from homeassistant.components.flo.const import DOMAIN from homeassistant.const import CONTENT_TYPE_JSON @@ -16,7 +18,8 @@ from .common import TEST_EMAIL_ADDRESS, TEST_PASSWORD, TEST_TOKEN, TEST_USER_ID from tests.test_util.aiohttp import AiohttpClientMocker -async def test_form(hass: HomeAssistant, aioclient_mock_fixture) -> None: +@pytest.mark.usefixtures("aioclient_mock_fixture") +async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/flo/test_device.py b/tests/components/flo/test_device.py index 6248bdcd8f9..c3e26e77370 100644 --- a/tests/components/flo/test_device.py +++ b/tests/components/flo/test_device.py @@ -5,6 +5,7 @@ from unittest.mock import patch from aioflo.errors import RequestError from freezegun.api import FrozenDateTimeFactory +import pytest from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.components.flo.coordinator import FloDeviceDataUpdateCoordinator @@ -14,14 +15,14 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID -from tests.common import async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_device( hass: HomeAssistant, - config_entry, - aioclient_mock_fixture, + config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, ) -> None: @@ -90,10 +91,10 @@ async def test_device( assert aioclient_mock.call_count == call_count + 6 +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_device_failures( hass: HomeAssistant, - config_entry, - aioclient_mock_fixture, + config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, ) -> None: diff --git a/tests/components/flo/test_init.py b/tests/components/flo/test_init.py index 599a91b80fb..805a6278395 100644 --- a/tests/components/flo/test_init.py +++ b/tests/components/flo/test_init.py @@ -1,5 +1,7 @@ """Test init.""" +import pytest + from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -7,10 +9,11 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry -async def test_setup_entry( - hass: HomeAssistant, config_entry, aioclient_mock_fixture -) -> None: + +@pytest.mark.usefixtures("aioclient_mock_fixture") +async def test_setup_entry(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test migration of config entry from v1.""" config_entry.add_to_hass(hass) assert await async_setup_component( diff --git a/tests/components/flo/test_sensor.py b/tests/components/flo/test_sensor.py index 5fe388c62e1..0c763927296 100644 --- a/tests/components/flo/test_sensor.py +++ b/tests/components/flo/test_sensor.py @@ -1,5 +1,7 @@ """Test Flo by Moen sensor entities.""" +import pytest + from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass from homeassistant.const import ATTR_ENTITY_ID, CONF_PASSWORD, CONF_USERNAME @@ -9,12 +11,12 @@ from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker -async def test_sensors( - hass: HomeAssistant, config_entry, aioclient_mock_fixture -) -> None: +@pytest.mark.usefixtures("aioclient_mock_fixture") +async def test_sensors(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test Flo by Moen sensors.""" hass.config.units = US_CUSTOMARY_SYSTEM config_entry.add_to_hass(hass) @@ -85,10 +87,10 @@ async def test_sensors( ) +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_manual_update_entity( hass: HomeAssistant, - config_entry, - aioclient_mock_fixture, + config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, ) -> None: """Test manual update entity via service homeasasistant/update_entity.""" diff --git a/tests/components/flo/test_services.py b/tests/components/flo/test_services.py index d8837d9c6b6..565f39f69fe 100644 --- a/tests/components/flo/test_services.py +++ b/tests/components/flo/test_services.py @@ -19,15 +19,16 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker SWITCH_ENTITY_ID = "switch.smart_water_shutoff_shutoff_valve" +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_services( hass: HomeAssistant, - config_entry, - aioclient_mock_fixture, + config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, ) -> None: """Test Flo services.""" diff --git a/tests/components/flo/test_switch.py b/tests/components/flo/test_switch.py index 85f7ea0f317..02ab93f9e67 100644 --- a/tests/components/flo/test_switch.py +++ b/tests/components/flo/test_switch.py @@ -1,5 +1,7 @@ """Tests for the switch domain for Flo by Moen.""" +import pytest + from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.components.switch import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, STATE_OFF, STATE_ON @@ -8,9 +10,12 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry + +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_valve_switches( - hass: HomeAssistant, config_entry, aioclient_mock_fixture + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Test Flo by Moen valve switches.""" config_entry.add_to_hass(hass) diff --git a/tests/components/flume/conftest.py b/tests/components/flume/conftest.py new file mode 100644 index 00000000000..fb0d0157bbc --- /dev/null +++ b/tests/components/flume/conftest.py @@ -0,0 +1,167 @@ +"""Flume test fixtures.""" + +from collections.abc import Generator +import datetime +from http import HTTPStatus +import json +from unittest.mock import mock_open, patch + +import jwt +import pytest +import requests +from requests_mock.mocker import Mocker + +from homeassistant.components.flume.const import DOMAIN +from homeassistant.const import ( + CONF_CLIENT_ID, + CONF_CLIENT_SECRET, + CONF_PASSWORD, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +USER_ID = "test-user-id" +REFRESH_TOKEN = "refresh-token" +TOKEN_URL = "https://api.flumetech.com/oauth/token" +DEVICE_LIST_URL = ( + "https://api.flumetech.com/users/test-user-id/devices?user=true&location=true" +) +BRIDGE_DEVICE = { + "id": "1234", + "type": 1, # Bridge + "location": { + "name": "Bridge Location", + }, + "name": "Flume Bridge", + "connected": True, +} +SENSOR_DEVICE = { + "id": "1234", + "type": 2, # Sensor + "location": { + "name": "Sensor Location", + }, + "name": "Flume Sensor", + "connected": True, +} +DEVICE_LIST = [BRIDGE_DEVICE, SENSOR_DEVICE] +NOTIFICATIONS_URL = "https://api.flumetech.com/users/test-user-id/notifications?limit=50&offset=0&sort_direction=ASC" +NOTIFICATION = { + "id": 111111, + "device_id": "6248148189204194987", + "user_id": USER_ID, + "type": 1, + "message": "Low Flow Leak triggered at Home. Water has been running for 2 hours averaging 0.43 gallons every minute.", + "created_datetime": "2020-01-15T16:33:39.000Z", + "title": "Potential Leak Detected!", + "read": True, + "extra": { + "query": { + "request_id": "SYSTEM_TRIGGERED_USAGE_ALERT", + "since_datetime": "2020-01-15 06:33:59", + "until_datetime": "2020-01-15 08:33:59", + "tz": "America/Los_Angeles", + "bucket": "MIN", + "raw": False, + "group_multiplier": 2, + "device_id": ["6248148189204194987"], + } + }, + "event_rule": "Low Flow Leak", +} + +NOTIFICATIONS_LIST = [NOTIFICATION] + + +@pytest.fixture(name="config_entry") +def config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: + """Fixture to create a config entry.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + title="test-username", + unique_id="test-username", + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) + config_entry.add_to_hass(hass) + return config_entry + + +def encode_access_token() -> str: + """Encode the payload of the access token.""" + expiration_time = datetime.datetime.now() + datetime.timedelta(hours=12) + payload = { + "user_id": USER_ID, + "exp": int(expiration_time.timestamp()), + } + return jwt.encode(payload, key="secret") + + +@pytest.fixture(name="access_token") +def access_token_fixture(requests_mock: Mocker) -> Generator[None]: + """Fixture to setup the access token.""" + token_response = { + "refresh_token": REFRESH_TOKEN, + "access_token": encode_access_token(), + } + requests_mock.register_uri( + "POST", + TOKEN_URL, + status_code=HTTPStatus.OK, + json={"data": [token_response]}, + ) + with patch("builtins.open", mock_open(read_data=json.dumps(token_response))): + yield + + +@pytest.fixture(name="device_list") +def device_list_fixture(requests_mock: Mocker) -> None: + """Fixture to setup the device list API response access token.""" + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.OK, + json={ + "data": DEVICE_LIST, + }, + ) + + +@pytest.fixture(name="device_list_timeout") +def device_list_timeout_fixture(requests_mock: Mocker) -> None: + """Fixture to test a timeout when connecting to the device list url.""" + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + exc=requests.exceptions.ConnectTimeout, + ) + + +@pytest.fixture(name="device_list_unauthorized") +def device_list_unauthorized_fixture(requests_mock: Mocker) -> None: + """Fixture to test an authorized error from the device list url.""" + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.UNAUTHORIZED, + json={}, + ) + + +@pytest.fixture(name="notifications_list") +def notifications_list_fixture(requests_mock: Mocker) -> None: + """Fixture to setup the device list API response access token.""" + requests_mock.register_uri( + "GET", + NOTIFICATIONS_URL, + status_code=HTTPStatus.OK, + json={ + "data": NOTIFICATIONS_LIST, + }, + ) diff --git a/tests/components/flume/test_config_flow.py b/tests/components/flume/test_config_flow.py index 706cee44739..915299223e9 100644 --- a/tests/components/flume/test_config_flow.py +++ b/tests/components/flume/test_config_flow.py @@ -1,8 +1,11 @@ """Test the flume config flow.""" -from unittest.mock import MagicMock, patch +from http import HTTPStatus +from unittest.mock import patch +import pytest import requests.exceptions +from requests_mock.mocker import Mocker from homeassistant import config_entries from homeassistant.components.flume.const import DOMAIN @@ -15,15 +18,12 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import DEVICE_LIST, DEVICE_LIST_URL + from tests.common import MockConfigEntry -def _get_mocked_flume_device_list(): - flume_device_list_mock = MagicMock() - type(flume_device_list_mock).device_list = ["mock"] - return flume_device_list_mock - - +@pytest.mark.usefixtures("access_token", "device_list") async def test_form(hass: HomeAssistant) -> None: """Test we get the form and can setup from user input.""" @@ -33,17 +33,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - mock_flume_device_list = _get_mocked_flume_device_list() - with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - return_value=mock_flume_device_list, - ), patch( "homeassistant.components.flume.async_setup_entry", return_value=True, @@ -71,66 +61,57 @@ async def test_form(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_invalid_auth(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("access_token") +async def test_form_invalid_auth(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - side_effect=Exception, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - }, - ) + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.UNAUTHORIZED, + json={"message": "Failure"}, + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"password": "invalid_auth"} +@pytest.mark.usefixtures("access_token", "device_list_timeout") async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - side_effect=requests.exceptions.ConnectionError(), - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - }, - ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} -async def test_reauth(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("access_token") +async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test we can reauth.""" entry = MockConfigEntry( domain=DOMAIN, @@ -151,35 +132,28 @@ async def test_reauth(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - side_effect=Exception, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: "test-password", - }, - ) + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PASSWORD: "test-password", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"password": "invalid_auth"} + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + exc=requests.exceptions.ConnectTimeout, + ) + with ( patch( - "homeassistant.components.flume.config_flow.FlumeAuth", + "homeassistant.components.flume.config_flow.os.path.exists", return_value=True, ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - side_effect=requests.exceptions.ConnectionError(), - ), + patch("homeassistant.components.flume.config_flow.os.unlink") as mock_unlink, ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], @@ -187,21 +161,22 @@ async def test_reauth(hass: HomeAssistant) -> None: CONF_PASSWORD: "test-password", }, ) + # The existing token file was removed + assert len(mock_unlink.mock_calls) == 1 assert result3["type"] is FlowResultType.FORM assert result3["errors"] == {"base": "cannot_connect"} - mock_flume_device_list = _get_mocked_flume_device_list() + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.OK, + json={ + "data": DEVICE_LIST, + }, + ) with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - return_value=mock_flume_device_list, - ), patch( "homeassistant.components.flume.async_setup_entry", return_value=True, @@ -217,3 +192,31 @@ async def test_reauth(hass: HomeAssistant) -> None: assert mock_setup_entry.called assert result4["type"] is FlowResultType.ABORT assert result4["reason"] == "reauth_successful" + + +@pytest.mark.usefixtures("access_token") +async def test_form_no_devices(hass: HomeAssistant, requests_mock: Mocker) -> None: + """Test a device list response that contains no values will raise an error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.OK, + json={"data": []}, + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} diff --git a/tests/components/flume/test_init.py b/tests/components/flume/test_init.py new file mode 100644 index 00000000000..85c01c1051e --- /dev/null +++ b/tests/components/flume/test_init.py @@ -0,0 +1,135 @@ +"""Test the flume init.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest +from requests_mock.mocker import Mocker + +from homeassistant import config_entries +from homeassistant.components.flume.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .conftest import USER_ID + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +def platforms_fixture() -> Generator[None]: + """Return the platforms to be loaded for this test.""" + # Arbitrary platform to ensure notifications are loaded + with patch("homeassistant.components.flume.PLATFORMS", [Platform.BINARY_SENSOR]): + yield + + +@pytest.mark.usefixtures("access_token", "device_list") +async def test_setup_config_entry( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test load and unload of a ConfigEntry.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is config_entries.ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state is config_entries.ConfigEntryState.NOT_LOADED + + +@pytest.mark.usefixtures("access_token", "device_list_timeout") +async def test_device_list_timeout( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test error handling for a timeout when listing devices.""" + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is config_entries.ConfigEntryState.SETUP_RETRY + + +@pytest.mark.usefixtures("access_token", "device_list_unauthorized") +async def test_reauth_when_unauthorized( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test error handling for an authentication error when listing devices.""" + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is config_entries.ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" + + +@pytest.mark.usefixtures("access_token", "device_list", "notifications_list") +async def test_list_notifications_service( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test the list notifications service.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is config_entries.ConfigEntryState.LOADED + + response = await hass.services.async_call( + DOMAIN, + "list_notifications", + {}, + target={ + "config_entry": config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + notifications = response.get("notifications") + assert notifications + assert len(notifications) == 1 + assert notifications[0].get("user_id") == USER_ID + + +@pytest.mark.usefixtures("access_token", "device_list", "notifications_list") +async def test_list_notifications_service_config_entry_errors( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test error handling for notification service with invalid config entries.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is config_entries.ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state is config_entries.ConfigEntryState.NOT_LOADED + + with pytest.raises(ValueError, match="Config entry not loaded"): + await hass.services.async_call( + DOMAIN, + "list_notifications", + {}, + target={ + "config_entry": config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + + with pytest.raises(ValueError, match="Invalid config entry: does-not-exist"): + await hass.services.async_call( + DOMAIN, + "list_notifications", + {}, + target={ + "config_entry": "does-not-exist", + }, + blocking=True, + return_response=True, + ) diff --git a/tests/components/flux/test_switch.py b/tests/components/flux/test_switch.py index ab85303584f..f957083dd11 100644 --- a/tests/components/flux/test_switch.py +++ b/tests/components/flux/test_switch.py @@ -29,7 +29,7 @@ from tests.components.light.common import MockLight @pytest.fixture(autouse=True) -async def set_utc(hass): +async def set_utc(hass: HomeAssistant) -> None: """Set timezone to UTC.""" await hass.config.async_set_time_zone("UTC") @@ -723,10 +723,8 @@ async def test_flux_after_sunrise_before_sunset_stop_next_day( assert call.data[light.ATTR_XY_COLOR] == [0.439, 0.37] -@pytest.mark.parametrize("x", [0, 1]) async def test_flux_after_sunset_before_midnight_stop_next_day( hass: HomeAssistant, - x, mock_light_entities: list[MockLight], ) -> None: """Test the flux switch after sunset and before stop. diff --git a/tests/components/flux_led/conftest.py b/tests/components/flux_led/conftest.py index 2a67c7b46f7..d323b321e08 100644 --- a/tests/components/flux_led/conftest.py +++ b/tests/components/flux_led/conftest.py @@ -1,20 +1,13 @@ """Tests for the flux_led integration.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from tests.common import mock_device_registry - - -@pytest.fixture(name="device_reg") -def device_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - @pytest.fixture -def mock_single_broadcast_address(): +def mock_single_broadcast_address() -> Generator[None]: """Mock network's async_async_get_ipv4_broadcast_addresses.""" with patch( "homeassistant.components.network.async_get_ipv4_broadcast_addresses", @@ -24,7 +17,7 @@ def mock_single_broadcast_address(): @pytest.fixture -def mock_multiple_broadcast_addresses(): +def mock_multiple_broadcast_addresses() -> Generator[None]: """Mock network's async_async_get_ipv4_broadcast_addresses to return multiple addresses.""" with patch( "homeassistant.components.network.async_get_ipv4_broadcast_addresses", diff --git a/tests/components/folder_watcher/conftest.py b/tests/components/folder_watcher/conftest.py index 6de9c69d574..ed0adea7a7d 100644 --- a/tests/components/folder_watcher/conftest.py +++ b/tests/components/folder_watcher/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from pathlib import Path from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator from homeassistant.components.folder_watcher.const import DOMAIN from homeassistant.config_entries import SOURCE_USER diff --git a/tests/components/folder_watcher/test_init.py b/tests/components/folder_watcher/test_init.py index 8309988931a..965ae33c4f8 100644 --- a/tests/components/folder_watcher/test_init.py +++ b/tests/components/folder_watcher/test_init.py @@ -36,7 +36,7 @@ def test_event() -> None: class MockPatternMatchingEventHandler: """Mock base class for the pattern matcher event handler.""" - def __init__(self, patterns): + def __init__(self, patterns) -> None: pass with patch( @@ -66,7 +66,7 @@ def test_move_event() -> None: class MockPatternMatchingEventHandler: """Mock base class for the pattern matcher event handler.""" - def __init__(self, patterns): + def __init__(self, patterns) -> None: pass with patch( diff --git a/tests/components/forecast_solar/conftest.py b/tests/components/forecast_solar/conftest.py index d1eacad8dbe..01c1f6d8d32 100644 --- a/tests/components/forecast_solar/conftest.py +++ b/tests/components/forecast_solar/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Forecast.Solar integration tests.""" +from collections.abc import Generator from datetime import datetime, timedelta from unittest.mock import AsyncMock, MagicMock, patch from forecast_solar import models import pytest -from typing_extensions import Generator from homeassistant.components.forecast_solar.const import ( CONF_AZIMUTH, diff --git a/tests/components/forked_daapd/conftest.py b/tests/components/forked_daapd/conftest.py index b9dd7087aef..e9f315c030c 100644 --- a/tests/components/forked_daapd/conftest.py +++ b/tests/components/forked_daapd/conftest.py @@ -10,7 +10,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="config_entry") -def config_entry_fixture(): +def config_entry_fixture() -> MockConfigEntry: """Create hass config_entry fixture.""" data = { CONF_HOST: "192.168.1.1", diff --git a/tests/components/forked_daapd/test_browse_media.py b/tests/components/forked_daapd/test_browse_media.py index 805bcac3976..cbd278128ae 100644 --- a/tests/components/forked_daapd/test_browse_media.py +++ b/tests/components/forked_daapd/test_browse_media.py @@ -3,8 +3,6 @@ from http import HTTPStatus from unittest.mock import patch -import pytest - from homeassistant.components import media_source, spotify from homeassistant.components.forked_daapd.browse_media import ( MediaContent, @@ -19,13 +17,16 @@ from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry from tests.typing import ClientSessionGenerator, WebSocketGenerator TEST_MASTER_ENTITY_NAME = "media_player.owntone_server" async def test_async_browse_media( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browse media.""" @@ -203,7 +204,9 @@ async def test_async_browse_media( async def test_async_browse_media_not_found( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browse media not found.""" @@ -261,7 +264,9 @@ async def test_async_browse_media_not_found( async def test_async_browse_spotify( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browsing spotify.""" @@ -313,7 +318,9 @@ async def test_async_browse_spotify( async def test_async_browse_media_source( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browsing media_source.""" @@ -361,7 +368,9 @@ async def test_async_browse_media_source( async def test_async_browse_image( - hass: HomeAssistant, hass_client: ClientSessionGenerator, config_entry + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browse media images.""" @@ -416,8 +425,7 @@ async def test_async_browse_image( async def test_async_browse_image_missing( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry, - caplog: pytest.LogCaptureFixture, + config_entry: MockConfigEntry, ) -> None: """Test browse media images with no image available.""" diff --git a/tests/components/forked_daapd/test_config_flow.py b/tests/components/forked_daapd/test_config_flow.py index 593b527009b..076fffef59b 100644 --- a/tests/components/forked_daapd/test_config_flow.py +++ b/tests/components/forked_daapd/test_config_flow.py @@ -67,7 +67,7 @@ async def test_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "user" -async def test_config_flow(hass: HomeAssistant, config_entry) -> None: +async def test_config_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test that the user step works.""" with ( patch( @@ -102,7 +102,9 @@ async def test_config_flow(hass: HomeAssistant, config_entry) -> None: assert result["type"] is FlowResultType.ABORT -async def test_zeroconf_updates_title(hass: HomeAssistant, config_entry) -> None: +async def test_zeroconf_updates_title( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test that zeroconf updates title and aborts with same host.""" MockConfigEntry(domain=DOMAIN, data={CONF_HOST: "different host"}).add_to_hass(hass) config_entry.add_to_hass(hass) @@ -125,7 +127,9 @@ async def test_zeroconf_updates_title(hass: HomeAssistant, config_entry) -> None assert len(hass.config_entries.async_entries(DOMAIN)) == 2 -async def test_config_flow_no_websocket(hass: HomeAssistant, config_entry) -> None: +async def test_config_flow_no_websocket( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test config flow setup without websocket enabled on server.""" with patch( "homeassistant.components.forked_daapd.config_flow.ForkedDaapdAPI.test_connection", @@ -224,7 +228,7 @@ async def test_config_flow_zeroconf_valid(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM -async def test_options_flow(hass: HomeAssistant, config_entry) -> None: +async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test config flow options.""" with patch( @@ -251,7 +255,9 @@ async def test_options_flow(hass: HomeAssistant, config_entry) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY -async def test_async_setup_entry_not_ready(hass: HomeAssistant, config_entry) -> None: +async def test_async_setup_entry_not_ready( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test that a PlatformNotReady exception is thrown during platform setup.""" with patch( diff --git a/tests/components/forked_daapd/test_media_player.py b/tests/components/forked_daapd/test_media_player.py index dd2e03f435f..6d7d267eb63 100644 --- a/tests/components/forked_daapd/test_media_player.py +++ b/tests/components/forked_daapd/test_media_player.py @@ -1,6 +1,7 @@ """The media player tests for the forked_daapd media player platform.""" -from unittest.mock import patch +from typing import Any +from unittest.mock import Mock, patch import pytest @@ -63,9 +64,9 @@ from homeassistant.const import ( STATE_PAUSED, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse -from tests.common import async_mock_signal +from tests.common import MockConfigEntry, async_mock_signal TEST_MASTER_ENTITY_NAME = "media_player.owntone_server" TEST_ZONE_ENTITY_NAMES = [ @@ -288,7 +289,7 @@ SAMPLE_PLAYLISTS = [{"id": 7, "name": "test_playlist", "uri": "library:playlist: @pytest.fixture(name="get_request_return_values") -async def get_request_return_values_fixture(): +async def get_request_return_values_fixture() -> dict[str, Any]: """Get request return values we can change later.""" return { "config": SAMPLE_CONFIG, @@ -299,7 +300,11 @@ async def get_request_return_values_fixture(): @pytest.fixture(name="mock_api_object") -async def mock_api_object_fixture(hass, config_entry, get_request_return_values): +async def mock_api_object_fixture( + hass: HomeAssistant, + config_entry: MockConfigEntry, + get_request_return_values: dict[str, Any], +) -> Mock: """Create mock api fixture.""" async def get_request_side_effect(update_type): @@ -341,8 +346,9 @@ async def mock_api_object_fixture(hass, config_entry, get_request_return_values) return mock_api.return_value +@pytest.mark.usefixtures("mock_api_object") async def test_unload_config_entry( - hass: HomeAssistant, config_entry, mock_api_object + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Test the player is set unavailable when the config entry is unloaded.""" assert hass.states.get(TEST_MASTER_ENTITY_NAME) @@ -352,7 +358,8 @@ async def test_unload_config_entry( assert hass.states.get(TEST_ZONE_ENTITY_NAMES[0]).state == STATE_UNAVAILABLE -def test_master_state(hass: HomeAssistant, mock_api_object) -> None: +@pytest.mark.usefixtures("mock_api_object") +def test_master_state(hass: HomeAssistant) -> None: """Test master state attributes.""" state = hass.states.get(TEST_MASTER_ENTITY_NAME) assert state.state == STATE_PAUSED @@ -373,7 +380,7 @@ def test_master_state(hass: HomeAssistant, mock_api_object) -> None: async def test_no_update_when_get_request_returns_none( - hass: HomeAssistant, config_entry, mock_api_object + hass: HomeAssistant, config_entry: MockConfigEntry, mock_api_object: Mock ) -> None: """Test when get request returns None.""" @@ -399,8 +406,12 @@ async def test_no_update_when_get_request_returns_none( async def _service_call( - hass, entity_name, service, additional_service_data=None, blocking=True -): + hass: HomeAssistant, + entity_name: str, + service: str, + additional_service_data: dict[str, Any] | None = None, + blocking: bool = True, +) -> ServiceResponse: if additional_service_data is None: additional_service_data = {} return await hass.services.async_call( @@ -411,7 +422,7 @@ async def _service_call( ) -async def test_zone(hass: HomeAssistant, mock_api_object) -> None: +async def test_zone(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test zone attributes and methods.""" zone_entity_name = TEST_ZONE_ENTITY_NAMES[0] state = hass.states.get(zone_entity_name) @@ -450,7 +461,7 @@ async def test_zone(hass: HomeAssistant, mock_api_object) -> None: mock_api_object.change_output.assert_any_call(output_id, selected=True) -async def test_last_outputs_master(hass: HomeAssistant, mock_api_object) -> None: +async def test_last_outputs_master(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test restoration of _last_outputs.""" # Test turning on sends API call await _service_call(hass, TEST_MASTER_ENTITY_NAME, SERVICE_TURN_ON) @@ -467,7 +478,9 @@ async def test_last_outputs_master(hass: HomeAssistant, mock_api_object) -> None async def test_bunch_of_stuff_master( - hass: HomeAssistant, get_request_return_values, mock_api_object + hass: HomeAssistant, + get_request_return_values: dict[str, Any], + mock_api_object: Mock, ) -> None: """Run bunch of stuff.""" await _service_call(hass, TEST_MASTER_ENTITY_NAME, SERVICE_TURN_ON) @@ -551,9 +564,8 @@ async def test_bunch_of_stuff_master( mock_api_object.clear_queue.assert_called_once() -async def test_async_play_media_from_paused( - hass: HomeAssistant, mock_api_object -) -> None: +@pytest.mark.usefixtures("mock_api_object") +async def test_async_play_media_from_paused(hass: HomeAssistant) -> None: """Test async play media from paused.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -571,7 +583,9 @@ async def test_async_play_media_from_paused( async def test_async_play_media_announcement_from_stopped( - hass: HomeAssistant, get_request_return_values, mock_api_object + hass: HomeAssistant, + get_request_return_values: dict[str, Any], + mock_api_object: Mock, ) -> None: """Test async play media announcement (from stopped).""" updater_update = mock_api_object.start_websocket_handler.call_args[0][2] @@ -597,9 +611,8 @@ async def test_async_play_media_announcement_from_stopped( assert state.last_updated > initial_state.last_updated -async def test_async_play_media_unsupported( - hass: HomeAssistant, mock_api_object -) -> None: +@pytest.mark.usefixtures("mock_api_object") +async def test_async_play_media_unsupported(hass: HomeAssistant) -> None: """Test async play media on unsupported media type.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -616,7 +629,7 @@ async def test_async_play_media_unsupported( async def test_async_play_media_announcement_tts_timeout( - hass: HomeAssistant, mock_api_object + hass: HomeAssistant, mock_api_object: Mock ) -> None: """Test async play media announcement with TTS timeout.""" mock_api_object.add_to_queue.side_effect = None @@ -638,7 +651,7 @@ async def test_async_play_media_announcement_tts_timeout( async def test_use_pipe_control_with_no_api( - hass: HomeAssistant, mock_api_object + hass: HomeAssistant, mock_api_object: Mock ) -> None: """Test using pipe control with no api set.""" await _service_call( @@ -651,7 +664,8 @@ async def test_use_pipe_control_with_no_api( assert mock_api_object.start_playback.call_count == 0 -async def test_clear_source(hass: HomeAssistant, mock_api_object) -> None: +@pytest.mark.usefixtures("mock_api_object") +async def test_clear_source(hass: HomeAssistant) -> None: """Test changing source to clear.""" await _service_call( hass, @@ -665,8 +679,11 @@ async def test_clear_source(hass: HomeAssistant, mock_api_object) -> None: @pytest.fixture(name="pipe_control_api_object") async def pipe_control_api_object_fixture( - hass, config_entry, get_request_return_values, mock_api_object -): + hass: HomeAssistant, + config_entry: MockConfigEntry, + get_request_return_values: dict[str, Any], + mock_api_object: Mock, +) -> Mock: """Fixture for mock librespot_java api.""" with patch( "homeassistant.components.forked_daapd.media_player.LibrespotJavaAPI", @@ -697,9 +714,9 @@ async def pipe_control_api_object_fixture( async def test_librespot_java_stuff( hass: HomeAssistant, - get_request_return_values, - mock_api_object, - pipe_control_api_object, + get_request_return_values: dict[str, Any], + mock_api_object: Mock, + pipe_control_api_object: Mock, ) -> None: """Test options update and librespot-java stuff.""" state = hass.states.get(TEST_MASTER_ENTITY_NAME) @@ -734,9 +751,8 @@ async def test_librespot_java_stuff( assert state.attributes[ATTR_MEDIA_ALBUM_NAME] == "some album" -async def test_librespot_java_play_announcement( - hass: HomeAssistant, pipe_control_api_object -) -> None: +@pytest.mark.usefixtures("pipe_control_api_object") +async def test_librespot_java_play_announcement(hass: HomeAssistant) -> None: """Test play announcement with librespot-java pipe.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -755,7 +771,7 @@ async def test_librespot_java_play_announcement( async def test_librespot_java_play_media_pause_timeout( - hass: HomeAssistant, pipe_control_api_object + hass: HomeAssistant, pipe_control_api_object: Mock ) -> None: """Test play media with librespot-java pipe.""" # test media play with pause timeout @@ -778,7 +794,7 @@ async def test_librespot_java_play_media_pause_timeout( assert state.last_updated > initial_state.last_updated -async def test_unsupported_update(hass: HomeAssistant, mock_api_object) -> None: +async def test_unsupported_update(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test unsupported update type.""" last_updated = hass.states.get(TEST_MASTER_ENTITY_NAME).last_updated updater_update = mock_api_object.start_websocket_handler.call_args[0][2] @@ -787,7 +803,9 @@ async def test_unsupported_update(hass: HomeAssistant, mock_api_object) -> None: assert hass.states.get(TEST_MASTER_ENTITY_NAME).last_updated == last_updated -async def test_invalid_websocket_port(hass: HomeAssistant, config_entry) -> None: +async def test_invalid_websocket_port( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test invalid websocket port on async_init.""" with patch( "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", @@ -800,7 +818,7 @@ async def test_invalid_websocket_port(hass: HomeAssistant, config_entry) -> None assert hass.states.get(TEST_MASTER_ENTITY_NAME).state == STATE_UNAVAILABLE -async def test_websocket_disconnect(hass: HomeAssistant, mock_api_object) -> None: +async def test_websocket_disconnect(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test websocket disconnection.""" assert hass.states.get(TEST_MASTER_ENTITY_NAME).state != STATE_UNAVAILABLE assert hass.states.get(TEST_ZONE_ENTITY_NAMES[0]).state != STATE_UNAVAILABLE @@ -811,7 +829,9 @@ async def test_websocket_disconnect(hass: HomeAssistant, mock_api_object) -> Non assert hass.states.get(TEST_ZONE_ENTITY_NAMES[0]).state == STATE_UNAVAILABLE -async def test_async_play_media_enqueue(hass: HomeAssistant, mock_api_object) -> None: +async def test_async_play_media_enqueue( + hass: HomeAssistant, mock_api_object: Mock +) -> None: """Test async play media with different enqueue options.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -887,7 +907,7 @@ async def test_async_play_media_enqueue(hass: HomeAssistant, mock_api_object) -> ) -async def test_play_owntone_media(hass: HomeAssistant, mock_api_object) -> None: +async def test_play_owntone_media(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test async play media with an owntone source.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -913,7 +933,7 @@ async def test_play_owntone_media(hass: HomeAssistant, mock_api_object) -> None: ) -async def test_play_spotify_media(hass: HomeAssistant, mock_api_object) -> None: +async def test_play_spotify_media(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test async play media with a spotify source.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -937,7 +957,7 @@ async def test_play_spotify_media(hass: HomeAssistant, mock_api_object) -> None: ) -async def test_play_media_source(hass: HomeAssistant, mock_api_object) -> None: +async def test_play_media_source(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test async play media with a spotify source.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) with patch( diff --git a/tests/components/freedompro/conftest.py b/tests/components/freedompro/conftest.py index 91eecc24f27..8e581673b92 100644 --- a/tests/components/freedompro/conftest.py +++ b/tests/components/freedompro/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from copy import deepcopy from typing import Any from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.freedompro.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/fritz/conftest.py b/tests/components/fritz/conftest.py index bb049f067b4..fa92fa37c04 100644 --- a/tests/components/fritz/conftest.py +++ b/tests/components/fritz/conftest.py @@ -30,7 +30,7 @@ class FritzServiceMock(Service): class FritzConnectionMock: """FritzConnection mocking.""" - def __init__(self, services): + def __init__(self, services) -> None: """Init Mocking class.""" self.modelname = MOCK_MODELNAME self.call_action = self._call_action diff --git a/tests/components/fritz/test_button.py b/tests/components/fritz/test_button.py index 8666491eb7a..79639835003 100644 --- a/tests/components/fritz/test_button.py +++ b/tests/components/fritz/test_button.py @@ -1,6 +1,6 @@ """Tests for Fritz!Tools button platform.""" -import copy +from copy import deepcopy from datetime import timedelta from unittest.mock import patch @@ -11,9 +11,15 @@ from homeassistant.components.fritz.const import DOMAIN, MeshRoles from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util.dt import utcnow -from .const import MOCK_MESH_DATA, MOCK_NEW_DEVICE_NODE, MOCK_USER_DATA +from .const import ( + MOCK_HOST_ATTRIBUTES_DATA, + MOCK_MESH_DATA, + MOCK_NEW_DEVICE_NODE, + MOCK_USER_DATA, +) from tests.common import MockConfigEntry, async_fire_time_changed @@ -120,7 +126,7 @@ async def test_wol_button_new_device( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - mesh_data = copy.deepcopy(MOCK_MESH_DATA) + mesh_data = deepcopy(MOCK_MESH_DATA) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.LOADED @@ -148,7 +154,7 @@ async def test_wol_button_absent_for_mesh_slave( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - slave_mesh_data = copy.deepcopy(MOCK_MESH_DATA) + slave_mesh_data = deepcopy(MOCK_MESH_DATA) slave_mesh_data["nodes"][0]["mesh_role"] = MeshRoles.SLAVE fh_class_mock.get_mesh_topology.return_value = slave_mesh_data @@ -170,7 +176,7 @@ async def test_wol_button_absent_for_non_lan_device( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - printer_wifi_data = copy.deepcopy(MOCK_MESH_DATA) + printer_wifi_data = deepcopy(MOCK_MESH_DATA) # initialization logic uses the connection type of the `node_interface_1_uid` pair of the printer # ni-230 is wifi interface of fritzbox printer_node_interface = printer_wifi_data["nodes"][1]["node_interfaces"][0] @@ -184,3 +190,61 @@ async def test_wol_button_absent_for_non_lan_device( button = hass.states.get("button.printer_wake_on_lan") assert button is None + + +async def test_cleanup_button( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + fc_class_mock, + fh_class_mock, +) -> None: + """Test cleanup of orphan devices.""" + + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED + + # check if tracked device is registered properly + device = device_registry.async_get_device( + connections={("mac", "aa:bb:cc:00:11:22")} + ) + assert device + + entities = [ + entity + for entity in er.async_entries_for_config_entry(entity_registry, entry.entry_id) + if entity.unique_id.startswith("AA:BB:CC:00:11:22") + ] + assert entities + assert len(entities) == 3 + + # removed tracked device and trigger cleanup + host_attributes = deepcopy(MOCK_HOST_ATTRIBUTES_DATA) + host_attributes.pop(0) + fh_class_mock.get_hosts_attributes.return_value = host_attributes + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.mock_title_cleanup"}, + blocking=True, + ) + + await hass.async_block_till_done(wait_background_tasks=True) + + # check if orphan tracked device is removed + device = device_registry.async_get_device( + connections={("mac", "aa:bb:cc:00:11:22")} + ) + assert not device + + entities = [ + entity + for entity in er.async_entries_for_config_entry(entity_registry, entry.entry_id) + if entity.unique_id.startswith("AA:BB:CC:00:11:22") + ] + assert not entities diff --git a/tests/components/fritzbox/__init__.py b/tests/components/fritzbox/__init__.py index 2bd8f26d73b..bd68615212d 100644 --- a/tests/components/fritzbox/__init__.py +++ b/tests/components/fritzbox/__init__.py @@ -115,6 +115,13 @@ class FritzDeviceClimateMock(FritzEntityBaseMock): scheduled_preset = PRESET_ECO +class FritzDeviceClimateWithoutTempSensorMock(FritzDeviceClimateMock): + """Mock of a AVM Fritz!Box climate device without exposing temperature sensor.""" + + temperature = None + has_temperature_sensor = False + + class FritzDeviceSensorMock(FritzEntityBaseMock): """Mock of a AVM Fritz!Box sensor device.""" @@ -151,7 +158,7 @@ class FritzDeviceSwitchMock(FritzEntityBaseMock): has_thermostat = False has_blind = False switch_state = "fake_state" - lock = "fake_locked" + lock = False power = 5678 present = True temperature = 1.23 @@ -173,6 +180,7 @@ class FritzDeviceLightMock(FritzEntityBaseMock): level = 100 present = True state = True + color_temp = None class FritzDeviceCoverMock(FritzEntityBaseMock): @@ -187,3 +195,9 @@ class FritzDeviceCoverMock(FritzEntityBaseMock): has_thermostat = False has_blind = True levelpercentage = 0 + + +class FritzDeviceCoverUnknownPositionMock(FritzDeviceCoverMock): + """Mock of a AVM Fritz!Box cover device with unknown position.""" + + levelpercentage = None diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index 8d1da9d09d5..358eeaa714e 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -46,7 +46,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError import homeassistant.util.dt as dt_util -from . import FritzDeviceClimateMock, set_devices, setup_config_entry +from . import ( + FritzDeviceClimateMock, + FritzDeviceClimateWithoutTempSensorMock, + set_devices, + setup_config_entry, +) from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -162,6 +167,18 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state.state == PRESET_COMFORT +async def test_hkr_wo_temperature_sensor(hass: HomeAssistant, fritz: Mock) -> None: + """Test hkr without exposing dedicated temperature sensor data block.""" + device = FritzDeviceClimateWithoutTempSensorMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 18.0 + + async def test_target_temperature_on(hass: HomeAssistant, fritz: Mock) -> None: """Test turn device on.""" device = FritzDeviceClimateMock() @@ -263,10 +280,10 @@ async def test_set_temperature_temperature(hass: HomeAssistant, fritz: Mock) -> await hass.services.async_call( DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 123}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 23}, True, ) - assert device.set_target_temperature.call_args_list == [call(123)] + assert device.set_target_temperature.call_args_list == [call(23)] async def test_set_temperature_mode_off(hass: HomeAssistant, fritz: Mock) -> None: @@ -282,7 +299,7 @@ async def test_set_temperature_mode_off(hass: HomeAssistant, fritz: Mock) -> Non { ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF, - ATTR_TEMPERATURE: 123, + ATTR_TEMPERATURE: 23, }, True, ) @@ -303,7 +320,7 @@ async def test_set_temperature_mode_heat(hass: HomeAssistant, fritz: Mock) -> No { ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT, - ATTR_TEMPERATURE: 123, + ATTR_TEMPERATURE: 23, }, True, ) diff --git a/tests/components/fritzbox/test_cover.py b/tests/components/fritzbox/test_cover.py index 6c301fc8f46..6626db2bccf 100644 --- a/tests/components/fritzbox/test_cover.py +++ b/tests/components/fritzbox/test_cover.py @@ -3,7 +3,12 @@ from datetime import timedelta from unittest.mock import Mock, call -from homeassistant.components.cover import ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN, + STATE_OPEN, +) from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -12,11 +17,17 @@ from homeassistant.const import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, + STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from . import FritzDeviceCoverMock, set_devices, setup_config_entry +from . import ( + FritzDeviceCoverMock, + FritzDeviceCoverUnknownPositionMock, + set_devices, + setup_config_entry, +) from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -33,9 +44,22 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: state = hass.states.get(ENTITY_ID) assert state + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 +async def test_unknown_position(hass: HomeAssistant, fritz: Mock) -> None: + """Test cover with unknown position.""" + device = FritzDeviceCoverUnknownPositionMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + assert state.state == STATE_UNKNOWN + + async def test_open_cover(hass: HomeAssistant, fritz: Mock) -> None: """Test opening the cover.""" device = FritzDeviceCoverMock() diff --git a/tests/components/fritzbox/test_init.py b/tests/components/fritzbox/test_init.py index c84498b1560..56e3e7a5738 100644 --- a/tests/components/fritzbox/test_init.py +++ b/tests/components/fritzbox/test_init.py @@ -18,6 +18,7 @@ from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_USERNAME, + EVENT_HOMEASSISTANT_STOP, STATE_UNAVAILABLE, UnitOfTemperature, ) @@ -199,6 +200,35 @@ async def test_unload_remove(hass: HomeAssistant, fritz: Mock) -> None: assert state is None +async def test_logout_on_stop(hass: HomeAssistant, fritz: Mock) -> None: + """Test we log out from fritzbox when Home Assistants stops.""" + fritz().get_devices.return_value = [FritzDeviceSwitchMock()] + entity_id = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}" + + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id=entity_id, + ) + entry.add_to_hass(hass) + + config_entries = hass.config_entries.async_entries(FB_DOMAIN) + assert len(config_entries) == 1 + assert entry is config_entries[0] + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + state = hass.states.get(entity_id) + assert state + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + + assert fritz().logout.call_count == 1 + + async def test_remove_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/fritzbox/test_light.py b/tests/components/fritzbox/test_light.py index 45920c7c3ee..3cafa933fa3 100644 --- a/tests/components/fritzbox/test_light.py +++ b/tests/components/fritzbox/test_light.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import Mock, call +import pytest from requests.exceptions import HTTPError from homeassistant.components.fritzbox.const import ( @@ -12,12 +13,14 @@ from homeassistant.components.fritzbox.const import ( ) from homeassistant.components.light import ( ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, DOMAIN, + ColorMode, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -56,9 +59,11 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.state == STATE_ON assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 + assert state.attributes[ATTR_HS_COLOR] == (28.395, 65.723) assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -99,6 +104,9 @@ async def test_setup_non_color_non_level(hass: HomeAssistant, fritz: Mock) -> No assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" assert ATTR_BRIGHTNESS not in state.attributes assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["onoff"] + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.ONOFF + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None + assert state.attributes.get(ATTR_HS_COLOR) is None async def test_setup_color(hass: HomeAssistant, fritz: Mock) -> None: @@ -120,6 +128,8 @@ async def test_setup_color(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.state == STATE_ON assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.HS + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] is None assert state.attributes[ATTR_BRIGHTNESS] == 100 assert state.attributes[ATTR_HS_COLOR] == (100, 70) assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -183,16 +193,16 @@ async def test_turn_on_color_unsupported_api_method( device.get_colors.return_value = { "Red": [("100", "70", "10"), ("100", "50", "10"), ("100", "30", "10")] } - mockresponse = Mock() - mockresponse.status_code = 400 - - error = HTTPError("Bad Request") - error.response = mockresponse - device.set_unmapped_color.side_effect = error - assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) + + # test fallback to `setcolor` + error = HTTPError("Bad Request") + error.response = Mock() + error.response.status_code = 400 + device.set_unmapped_color.side_effect = error + await hass.services.async_call( DOMAIN, SERVICE_TURN_ON, @@ -205,6 +215,16 @@ async def test_turn_on_color_unsupported_api_method( assert device.set_level.call_args_list == [call(100)] assert device.set_color.call_args_list == [call((100, 70))] + # test for unknown error + error.response.status_code = 500 + with pytest.raises(HTTPError, match="Bad Request"): + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, + True, + ) + async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: """Test turn device off.""" diff --git a/tests/components/fritzbox/test_switch.py b/tests/components/fritzbox/test_switch.py index 417b355b396..ba3b1de9b2f 100644 --- a/tests/components/fritzbox/test_switch.py +++ b/tests/components/fritzbox/test_switch.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import Mock +import pytest from requests.exceptions import HTTPError from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN @@ -29,6 +30,7 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util @@ -130,6 +132,7 @@ async def test_turn_on(hass: HomeAssistant, fritz: Mock) -> None: async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: """Test turn device off.""" device = FritzDeviceSwitchMock() + assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) @@ -137,9 +140,36 @@ async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: await hass.services.async_call( DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) + assert device.set_switch_state_off.call_count == 1 +async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None: + """Test toggling while device is locked.""" + device = FritzDeviceSwitchMock() + device.lock = True + + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + with pytest.raises( + HomeAssistantError, + match="Can't toggle switch while manual switching is disabled for the device", + ): + await hass.services.async_call( + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + ) + + with pytest.raises( + HomeAssistantError, + match="Can't toggle switch while manual switching is disabled for the device", + ): + await hass.services.async_call( + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True + ) + + async def test_update(hass: HomeAssistant, fritz: Mock) -> None: """Test update without error.""" device = FritzDeviceSwitchMock() diff --git a/tests/components/fronius/__init__.py b/tests/components/fronius/__init__.py index 2109d4a6692..57b22490ed0 100644 --- a/tests/components/fronius/__init__.py +++ b/tests/components/fronius/__init__.py @@ -3,9 +3,12 @@ from __future__ import annotations from collections.abc import Callable +from datetime import timedelta import json from typing import Any +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components.fronius.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -114,7 +117,12 @@ def mock_responses( ) -async def enable_all_entities(hass, freezer, config_entry_id, time_till_next_update): +async def enable_all_entities( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry_id: str, + time_till_next_update: timedelta, +) -> None: """Enable all entities for a config entry and fast forward time to receive data.""" registry = er.async_get(hass) entities = er.async_entries_for_config_entry(registry, config_entry_id) diff --git a/tests/components/fronius/test_diagnostics.py b/tests/components/fronius/test_diagnostics.py index 7b1f384e405..ddef5b4a18c 100644 --- a/tests/components/fronius/test_diagnostics.py +++ b/tests/components/fronius/test_diagnostics.py @@ -1,6 +1,7 @@ """Tests for the diagnostics data provided by the Fronius integration.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -21,11 +22,8 @@ async def test_diagnostics( mock_responses(aioclient_mock) entry = await setup_fronius_integration(hass) - assert ( - await get_diagnostics_for_config_entry( - hass, - hass_client, - entry, - ) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, + hass_client, + entry, + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/frontend/test_init.py b/tests/components/frontend/test_init.py index 83c82abea35..5006adedd77 100644 --- a/tests/components/frontend/test_init.py +++ b/tests/components/frontend/test_init.py @@ -1,6 +1,7 @@ """The tests for Home Assistant frontend.""" from asyncio import AbstractEventLoop +from collections.abc import Generator from http import HTTPStatus from pathlib import Path import re @@ -64,7 +65,7 @@ CONFIG_THEMES = {DOMAIN: {CONF_THEMES: MOCK_THEMES}} @pytest.fixture -async def ignore_frontend_deps(hass): +async def ignore_frontend_deps(hass: HomeAssistant) -> None: """Frontend dependencies.""" frontend = await async_get_integration(hass, "frontend") for dep in frontend.dependencies: @@ -73,7 +74,7 @@ async def ignore_frontend_deps(hass): @pytest.fixture -async def frontend(hass, ignore_frontend_deps): +async def frontend(hass: HomeAssistant, ignore_frontend_deps: None) -> None: """Frontend setup with themes.""" assert await async_setup_component( hass, @@ -83,7 +84,7 @@ async def frontend(hass, ignore_frontend_deps): @pytest.fixture -async def frontend_themes(hass): +async def frontend_themes(hass: HomeAssistant) -> None: """Frontend setup with themes.""" assert await async_setup_component( hass, @@ -104,7 +105,7 @@ def aiohttp_client( @pytest.fixture async def mock_http_client( - hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, frontend + hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, frontend: None ) -> TestClient: """Start the Home Assistant HTTP component.""" return await aiohttp_client(hass.http.app) @@ -112,7 +113,7 @@ async def mock_http_client( @pytest.fixture async def themes_ws_client( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend_themes + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend_themes: None ) -> MockHAClientWebSocket: """Start the Home Assistant HTTP component.""" return await hass_ws_client(hass) @@ -120,7 +121,7 @@ async def themes_ws_client( @pytest.fixture async def ws_client( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend: None ) -> MockHAClientWebSocket: """Start the Home Assistant HTTP component.""" return await hass_ws_client(hass) @@ -128,7 +129,9 @@ async def ws_client( @pytest.fixture async def mock_http_client_with_extra_js( - hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, ignore_frontend_deps + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + ignore_frontend_deps: None, ) -> TestClient: """Start the Home Assistant HTTP component.""" assert await async_setup_component( @@ -145,7 +148,7 @@ async def mock_http_client_with_extra_js( @pytest.fixture -def mock_onboarded(): +def mock_onboarded() -> Generator[None]: """Mock that we're onboarded.""" with patch( "homeassistant.components.onboarding.async_is_onboarded", return_value=True @@ -153,7 +156,8 @@ def mock_onboarded(): yield -async def test_frontend_and_static(mock_http_client, mock_onboarded) -> None: +@pytest.mark.usefixtures("mock_onboarded") +async def test_frontend_and_static(mock_http_client: TestClient) -> None: """Test if we can get the frontend.""" resp = await mock_http_client.get("") assert resp.status == 200 @@ -170,26 +174,31 @@ async def test_frontend_and_static(mock_http_client, mock_onboarded) -> None: assert "public" in resp.headers.get("cache-control") -async def test_dont_cache_service_worker(mock_http_client) -> None: +@pytest.mark.parametrize("sw_url", ["/sw-modern.js", "/sw-legacy.js"]) +async def test_dont_cache_service_worker( + mock_http_client: TestClient, sw_url: str +) -> None: """Test that we don't cache the service worker.""" - resp = await mock_http_client.get("/service_worker.js") + resp = await mock_http_client.get(sw_url) assert resp.status == 200 assert "cache-control" not in resp.headers -async def test_404(mock_http_client) -> None: +async def test_404(mock_http_client: TestClient) -> None: """Test for HTTP 404 error.""" resp = await mock_http_client.get("/not-existing") assert resp.status == HTTPStatus.NOT_FOUND -async def test_we_cannot_POST_to_root(mock_http_client) -> None: +async def test_we_cannot_POST_to_root(mock_http_client: TestClient) -> None: """Test that POST is not allow to root.""" resp = await mock_http_client.post("/") assert resp.status == 405 -async def test_themes_api(hass: HomeAssistant, themes_ws_client) -> None: +async def test_themes_api( + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket +) -> None: """Test that /api/themes returns correct data.""" await themes_ws_client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await themes_ws_client.receive_json() @@ -216,11 +225,11 @@ async def test_themes_api(hass: HomeAssistant, themes_ws_client) -> None: assert msg["result"]["themes"] == {} +@pytest.mark.usefixtures("ignore_frontend_deps") async def test_themes_persist( hass: HomeAssistant, hass_storage: dict[str, Any], hass_ws_client: WebSocketGenerator, - ignore_frontend_deps, ) -> None: """Test that theme settings are restores after restart.""" hass_storage[THEMES_STORAGE_KEY] = { @@ -242,11 +251,11 @@ async def test_themes_persist( assert msg["result"]["default_dark_theme"] == "dark" +@pytest.mark.usefixtures("frontend_themes") async def test_themes_save_storage( hass: HomeAssistant, hass_storage: dict[str, Any], freezer: FrozenDateTimeFactory, - frontend_themes, ) -> None: """Test that theme settings are restores after restart.""" @@ -270,7 +279,9 @@ async def test_themes_save_storage( } -async def test_themes_set_theme(hass: HomeAssistant, themes_ws_client) -> None: +async def test_themes_set_theme( + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket +) -> None: """Test frontend.set_theme service.""" await hass.services.async_call( DOMAIN, "set_theme", {"name": "happy"}, blocking=True @@ -303,7 +314,7 @@ async def test_themes_set_theme(hass: HomeAssistant, themes_ws_client) -> None: async def test_themes_set_theme_wrong_name( - hass: HomeAssistant, themes_ws_client + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket ) -> None: """Test frontend.set_theme service called with wrong name.""" @@ -318,7 +329,9 @@ async def test_themes_set_theme_wrong_name( assert msg["result"]["default_theme"] == "default" -async def test_themes_set_dark_theme(hass: HomeAssistant, themes_ws_client) -> None: +async def test_themes_set_dark_theme( + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket +) -> None: """Test frontend.set_theme service called with dark mode.""" await hass.services.async_call( @@ -358,8 +371,9 @@ async def test_themes_set_dark_theme(hass: HomeAssistant, themes_ws_client) -> N assert msg["result"]["default_dark_theme"] == "light_and_dark" +@pytest.mark.usefixtures("frontend") async def test_themes_set_dark_theme_wrong_name( - hass: HomeAssistant, frontend, themes_ws_client + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket ) -> None: """Test frontend.set_theme service called with mode dark and wrong name.""" await hass.services.async_call( @@ -373,8 +387,9 @@ async def test_themes_set_dark_theme_wrong_name( assert msg["result"]["default_dark_theme"] is None +@pytest.mark.usefixtures("frontend") async def test_themes_reload_themes( - hass: HomeAssistant, frontend, themes_ws_client + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket ) -> None: """Test frontend.reload_themes service.""" @@ -395,7 +410,7 @@ async def test_themes_reload_themes( assert msg["result"]["default_theme"] == "default" -async def test_missing_themes(hass: HomeAssistant, ws_client) -> None: +async def test_missing_themes(ws_client: MockHAClientWebSocket) -> None: """Test that themes API works when themes are not defined.""" await ws_client.send_json({"id": 5, "type": "frontend/get_themes"}) @@ -412,7 +427,7 @@ async def test_missing_themes(hass: HomeAssistant, ws_client) -> None: async def test_extra_js( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_http_client_with_extra_js, + mock_http_client_with_extra_js: TestClient, ) -> None: """Test that extra javascript is loaded.""" @@ -497,7 +512,7 @@ async def test_extra_js( async def test_get_panels( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_http_client, + mock_http_client: TestClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test get_panels command.""" @@ -547,7 +562,7 @@ async def test_get_panels( async def test_get_panels_non_admin( - hass: HomeAssistant, ws_client, hass_admin_user: MockUser + hass: HomeAssistant, ws_client: MockHAClientWebSocket, hass_admin_user: MockUser ) -> None: """Test get_panels command.""" hass_admin_user.groups = [] @@ -568,7 +583,7 @@ async def test_get_panels_non_admin( assert "map" not in msg["result"] -async def test_get_translations(hass: HomeAssistant, ws_client) -> None: +async def test_get_translations(ws_client: MockHAClientWebSocket) -> None: """Test get_translations command.""" with patch( "homeassistant.components.frontend.async_get_translations", @@ -593,7 +608,7 @@ async def test_get_translations(hass: HomeAssistant, ws_client) -> None: async def test_get_translations_for_integrations( - hass: HomeAssistant, ws_client + ws_client: MockHAClientWebSocket, ) -> None: """Test get_translations for integrations command.""" with patch( @@ -621,7 +636,7 @@ async def test_get_translations_for_integrations( async def test_get_translations_for_single_integration( - hass: HomeAssistant, ws_client + ws_client: MockHAClientWebSocket, ) -> None: """Test get_translations for integration command.""" with patch( @@ -660,7 +675,7 @@ async def test_onboarding_load(hass: HomeAssistant) -> None: assert "onboarding" in frontend.dependencies -async def test_auth_authorize(mock_http_client) -> None: +async def test_auth_authorize(mock_http_client: TestClient) -> None: """Test the authorize endpoint works.""" resp = await mock_http_client.get( "/auth/authorize?response_type=code&client_id=https://localhost/&" @@ -683,7 +698,9 @@ async def test_auth_authorize(mock_http_client) -> None: assert "public" in resp.headers.get("cache-control") -async def test_get_version(hass: HomeAssistant, ws_client) -> None: +async def test_get_version( + hass: HomeAssistant, ws_client: MockHAClientWebSocket +) -> None: """Test get_version command.""" frontend = await async_get_integration(hass, "frontend") cur_version = next( @@ -701,7 +718,7 @@ async def test_get_version(hass: HomeAssistant, ws_client) -> None: assert msg["result"] == {"version": cur_version} -async def test_static_paths(hass: HomeAssistant, mock_http_client) -> None: +async def test_static_paths(mock_http_client: TestClient) -> None: """Test static paths.""" resp = await mock_http_client.get( "/.well-known/change-password", allow_redirects=False @@ -710,9 +727,8 @@ async def test_static_paths(hass: HomeAssistant, mock_http_client) -> None: assert resp.headers["location"] == "/profile" -async def test_manifest_json( - hass: HomeAssistant, frontend_themes, mock_http_client -) -> None: +@pytest.mark.usefixtures("frontend_themes") +async def test_manifest_json(hass: HomeAssistant, mock_http_client: TestClient) -> None: """Test for fetching manifest.json.""" resp = await mock_http_client.get("/manifest.json") assert resp.status == HTTPStatus.OK @@ -734,7 +750,7 @@ async def test_manifest_json( assert json["theme_color"] != DEFAULT_THEME_COLOR -async def test_static_path_cache(hass: HomeAssistant, mock_http_client) -> None: +async def test_static_path_cache(mock_http_client: TestClient) -> None: """Test static paths cache.""" resp = await mock_http_client.get("/lovelace/default_view", allow_redirects=False) assert resp.status == 404 @@ -766,7 +782,7 @@ async def test_static_path_cache(hass: HomeAssistant, mock_http_client) -> None: assert resp.status == 404 -async def test_get_icons(hass: HomeAssistant, ws_client: MockHAClientWebSocket) -> None: +async def test_get_icons(ws_client: MockHAClientWebSocket) -> None: """Test get_icons command.""" with patch( "homeassistant.components.frontend.async_get_icons", @@ -787,9 +803,7 @@ async def test_get_icons(hass: HomeAssistant, ws_client: MockHAClientWebSocket) assert msg["result"] == {"resources": {}} -async def test_get_icons_for_integrations( - hass: HomeAssistant, ws_client: MockHAClientWebSocket -) -> None: +async def test_get_icons_for_integrations(ws_client: MockHAClientWebSocket) -> None: """Test get_icons for integrations command.""" with patch( "homeassistant.components.frontend.async_get_icons", @@ -814,7 +828,7 @@ async def test_get_icons_for_integrations( async def test_get_icons_for_single_integration( - hass: HomeAssistant, ws_client: MockHAClientWebSocket + ws_client: MockHAClientWebSocket, ) -> None: """Test get_icons for integration command.""" with patch( diff --git a/tests/components/frontend/test_storage.py b/tests/components/frontend/test_storage.py index 8b97fa9ee04..ce7f7aeb4a1 100644 --- a/tests/components/frontend/test_storage.py +++ b/tests/components/frontend/test_storage.py @@ -13,15 +13,13 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -def setup_frontend(hass): +def setup_frontend(hass: HomeAssistant) -> None: """Fixture to setup the frontend.""" hass.loop.run_until_complete(async_setup_component(hass, "frontend", {})) async def test_get_user_data_empty( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_user_data command.""" client = await hass_ws_client(hass) @@ -82,9 +80,7 @@ async def test_get_user_data( async def test_set_user_data_empty( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test set_user_data command.""" client = await hass_ws_client(hass) diff --git a/tests/components/frontier_silicon/conftest.py b/tests/components/frontier_silicon/conftest.py index 2322740c69a..709b1842472 100644 --- a/tests/components/frontier_silicon/conftest.py +++ b/tests/components/frontier_silicon/conftest.py @@ -1,9 +1,9 @@ """Configuration for frontier_silicon tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.frontier_silicon.const import CONF_WEBFSAPI_URL, DOMAIN from homeassistant.const import CONF_PIN diff --git a/tests/components/fully_kiosk/conftest.py b/tests/components/fully_kiosk/conftest.py index 3f7c2985daf..028eefcf361 100644 --- a/tests/components/fully_kiosk/conftest.py +++ b/tests/components/fully_kiosk/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.fully_kiosk.const import DOMAIN from homeassistant.const import ( diff --git a/tests/components/fully_kiosk/test_camera.py b/tests/components/fully_kiosk/test_camera.py index 4e48749eebb..a2e7067ff1b 100644 --- a/tests/components/fully_kiosk/test_camera.py +++ b/tests/components/fully_kiosk/test_camera.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock +from fullykiosk import FullyKioskError import pytest from homeassistant.components.camera import async_get_image @@ -41,6 +42,12 @@ async def test_camera( assert mock_fully_kiosk.getCamshot.call_count == 1 assert image.content == b"image_bytes" + fully_kiosk_error = FullyKioskError("error", "status") + mock_fully_kiosk.getCamshot.side_effect = fully_kiosk_error + with pytest.raises(HomeAssistantError) as error: + await async_get_image(hass, entity_camera) + assert error.value.args[0] == fully_kiosk_error + mock_fully_kiosk.getSettings.return_value = {"motionDetection": False} await hass.services.async_call( "camera", diff --git a/tests/components/fully_kiosk/test_number.py b/tests/components/fully_kiosk/test_number.py index 2fbbf751725..5f74002f8cd 100644 --- a/tests/components/fully_kiosk/test_number.py +++ b/tests/components/fully_kiosk/test_number.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock from homeassistant.components import number from homeassistant.components.fully_kiosk.const import DOMAIN, UPDATE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import dt as dt_util @@ -81,9 +81,11 @@ async def test_numbers( assert device_entry.sw_version == "1.42.5" -def set_value(hass, entity_id, value): +async def set_value( + hass: HomeAssistant, entity_id: str, value: float +) -> ServiceResponse: """Set the value of a number entity.""" - return hass.services.async_call( + return await hass.services.async_call( number.DOMAIN, "set_value", {ATTR_ENTITY_ID: entity_id, number.ATTR_VALUE: value}, diff --git a/tests/components/fully_kiosk/test_switch.py b/tests/components/fully_kiosk/test_switch.py index 5b3b5e651b0..14a464e0dcd 100644 --- a/tests/components/fully_kiosk/test_switch.py +++ b/tests/components/fully_kiosk/test_switch.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock from homeassistant.components import switch from homeassistant.components.fully_kiosk.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, async_fire_mqtt_message @@ -149,8 +149,10 @@ def has_subscribed(mqtt_mock: MqttMockHAClient, topic: str) -> bool: return False -def call_service(hass, service, entity_id): +async def call_service( + hass: HomeAssistant, service: str, entity_id: str +) -> ServiceResponse: """Call any service on entity.""" - return hass.services.async_call( + return await hass.services.async_call( switch.DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True ) diff --git a/tests/components/fyta/conftest.py b/tests/components/fyta/conftest.py index de5dece776c..2bcad9b3c80 100644 --- a/tests/components/fyta/conftest.py +++ b/tests/components/fyta/conftest.py @@ -1,10 +1,11 @@ """Test helpers for FYTA.""" +from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch +from fyta_cli.fyta_models import Credentials, Plant import pytest -from typing_extensions import Generator from homeassistant.components.fyta.const import CONF_EXPIRATION, DOMAIN as FYTA_DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME @@ -35,23 +36,27 @@ def mock_config_entry() -> MockConfigEntry: def mock_fyta_connector(): """Build a fixture for the Fyta API that connects successfully and returns one device.""" + plants: dict[int, Plant] = { + 0: Plant.from_dict(load_json_object_fixture("plant_status1.json", FYTA_DOMAIN)), + 1: Plant.from_dict(load_json_object_fixture("plant_status2.json", FYTA_DOMAIN)), + } + mock_fyta_connector = AsyncMock() mock_fyta_connector.expiration = datetime.fromisoformat(EXPIRATION).replace( tzinfo=UTC ) mock_fyta_connector.client = AsyncMock(autospec=True) - mock_fyta_connector.update_all_plants.return_value = load_json_object_fixture( - "plant_status.json", FYTA_DOMAIN - ) - mock_fyta_connector.plant_list = load_json_object_fixture( - "plant_list.json", FYTA_DOMAIN - ) + mock_fyta_connector.update_all_plants.return_value = plants + mock_fyta_connector.plant_list = { + 0: "Gummibaum", + 1: "Kakaobaum", + } mock_fyta_connector.login = AsyncMock( - return_value={ - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_EXPIRATION: datetime.fromisoformat(EXPIRATION).replace(tzinfo=UTC), - } + return_value=Credentials( + access_token=ACCESS_TOKEN, + expiration=datetime.fromisoformat(EXPIRATION).replace(tzinfo=UTC), + ) ) with ( patch( diff --git a/tests/components/fyta/fixtures/plant_list.json b/tests/components/fyta/fixtures/plant_list.json deleted file mode 100644 index 9527c7d9d96..00000000000 --- a/tests/components/fyta/fixtures/plant_list.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "0": "Gummibaum", - "1": "Kakaobaum" -} diff --git a/tests/components/fyta/fixtures/plant_status.json b/tests/components/fyta/fixtures/plant_status.json deleted file mode 100644 index 5d9cb2d31d9..00000000000 --- a/tests/components/fyta/fixtures/plant_status.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "0": { - "name": "Gummibaum", - "scientific_name": "Ficus elastica", - "status": 1, - "sw_version": "1.0" - }, - "1": { - "name": "Kakaobaum", - "scientific_name": "Theobroma cacao", - "status": 2, - "sw_version": "1.0" - } -} diff --git a/tests/components/fyta/fixtures/plant_status1.json b/tests/components/fyta/fixtures/plant_status1.json new file mode 100644 index 00000000000..f2e8dc9c970 --- /dev/null +++ b/tests/components/fyta/fixtures/plant_status1.json @@ -0,0 +1,23 @@ +{ + "battery_level": 80, + "battery_status": true, + "last_updated": "2023-01-10 10:10:00", + "light": 2, + "light_status": 3, + "nickname": "Gummibaum", + "moisture": 61, + "moisture_status": 3, + "sensor_available": true, + "sw_version": "1.0", + "status": 3, + "online": true, + "ph": null, + "plant_id": 0, + "plant_origin_path": "", + "plant_thumb_path": "", + "salinity": 1, + "salinity_status": 4, + "scientific_name": "Ficus elastica", + "temperature": 25.2, + "temperature_status": 3 +} diff --git a/tests/components/fyta/fixtures/plant_status2.json b/tests/components/fyta/fixtures/plant_status2.json new file mode 100644 index 00000000000..a5c2735ca7c --- /dev/null +++ b/tests/components/fyta/fixtures/plant_status2.json @@ -0,0 +1,23 @@ +{ + "battery_level": 80, + "battery_status": true, + "last_updated": "2023-01-02 10:10:00", + "light": 2, + "light_status": 3, + "nickname": "Kakaobaum", + "moisture": 61, + "moisture_status": 3, + "sensor_available": true, + "sw_version": "1.0", + "status": 3, + "online": true, + "ph": 7, + "plant_id": 0, + "plant_origin_path": "", + "plant_thumb_path": "", + "salinity": 1, + "salinity_status": 4, + "scientific_name": "Theobroma cacao", + "temperature": 25.2, + "temperature_status": 3 +} diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index 7491310129b..cf6bcdb77ad 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -23,16 +23,50 @@ }), 'plant_data': dict({ '0': dict({ + 'battery_level': 80.0, + 'battery_status': True, + 'last_updated': '2023-01-10T10:10:00', + 'light': 2.0, + 'light_status': 3, + 'moisture': 61.0, + 'moisture_status': 3, 'name': 'Gummibaum', + 'online': True, + 'ph': None, + 'plant_id': 0, + 'plant_origin_path': '', + 'plant_thumb_path': '', + 'salinity': 1.0, + 'salinity_status': 4, 'scientific_name': 'Ficus elastica', - 'status': 1, + 'sensor_available': True, + 'status': 3, 'sw_version': '1.0', + 'temperature': 25.2, + 'temperature_status': 3, }), '1': dict({ + 'battery_level': 80.0, + 'battery_status': True, + 'last_updated': '2023-01-02T10:10:00', + 'light': 2.0, + 'light_status': 3, + 'moisture': 61.0, + 'moisture_status': 3, 'name': 'Kakaobaum', + 'online': True, + 'ph': 7.0, + 'plant_id': 0, + 'plant_origin_path': '', + 'plant_thumb_path': '', + 'salinity': 1.0, + 'salinity_status': 4, 'scientific_name': 'Theobroma cacao', - 'status': 2, + 'sensor_available': True, + 'status': 3, 'sw_version': '1.0', + 'temperature': 25.2, + 'temperature_status': 3, }), }), }) diff --git a/tests/components/fyta/snapshots/test_sensor.ambr b/tests/components/fyta/snapshots/test_sensor.ambr index 1041fff501e..2e96de0a283 100644 --- a/tests/components/fyta/snapshots/test_sensor.ambr +++ b/tests/components/fyta/snapshots/test_sensor.ambr @@ -1,4 +1,334 @@ # serializer version: 1 +# name: test_all_entities[sensor.gummibaum_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gummibaum_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.gummibaum_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Gummibaum Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gummibaum_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-light', + 'unit_of_measurement': 'μmol/s⋅m²', + }) +# --- +# name: test_all_entities[sensor.gummibaum_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gummibaum Light', + 'state_class': , + 'unit_of_measurement': 'μmol/s⋅m²', + }), + 'context': , + 'entity_id': 'sensor.gummibaum_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_light_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_light_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-light_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_light_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Light state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_light_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_moisture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-moisture', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Gummibaum Moisture', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gummibaum_moisture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '61.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_moisture_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'moisture_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-moisture_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Moisture state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_moisture_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.gummibaum_ph-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_ph', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'pH', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-ph', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_ph-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'ph', + 'friendly_name': 'Gummibaum pH', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.gummibaum_ph', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_all_entities[sensor.gummibaum_plant_state-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -56,7 +386,122 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'doing_great', + 'state': 'no_sensor', + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_salinity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'conductivity', + 'friendly_name': 'Gummibaum Salinity', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gummibaum_salinity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_salinity_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Salinity state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_salinity_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'high', }) # --- # name: test_all_entities[sensor.gummibaum_scientific_name-entry] @@ -105,6 +550,451 @@ 'state': 'Ficus elastica', }) # --- +# name: test_all_entities[sensor.gummibaum_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.gummibaum_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gummibaum Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gummibaum_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.2', + }) +# --- +# name: test_all_entities[sensor.gummibaum_temperature_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_temperature_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-temperature_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_temperature_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Temperature state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_temperature_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.kakaobaum_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Kakaobaum Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-light', + 'unit_of_measurement': 'μmol/s⋅m²', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kakaobaum Light', + 'state_class': , + 'unit_of_measurement': 'μmol/s⋅m²', + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_light_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-light_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Light state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_light_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_moisture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-moisture', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Kakaobaum Moisture', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_moisture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '61.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_moisture_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'moisture_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-moisture_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Moisture state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_moisture_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_ph-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_ph', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'pH', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-ph', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_ph-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'ph', + 'friendly_name': 'Kakaobaum pH', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_ph', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.0', + }) +# --- # name: test_all_entities[sensor.kakaobaum_plant_state-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -162,7 +1052,122 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'need_attention', + 'state': 'no_sensor', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_salinity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'conductivity', + 'friendly_name': 'Kakaobaum Salinity', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_salinity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_salinity_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Salinity state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_salinity_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'high', }) # --- # name: test_all_entities[sensor.kakaobaum_scientific_name-entry] @@ -211,3 +1216,118 @@ 'state': 'Theobroma cacao', }) # --- +# name: test_all_entities[sensor.kakaobaum_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.kakaobaum_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Kakaobaum Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.2', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_temperature_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_temperature_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-temperature_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_temperature_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Temperature state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_temperature_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- diff --git a/tests/components/fyta/test_diagnostics.py b/tests/components/fyta/test_diagnostics.py index 3a95b533489..cfaa5484b82 100644 --- a/tests/components/fyta/test_diagnostics.py +++ b/tests/components/fyta/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -28,4 +29,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/gardena_bluetooth/conftest.py b/tests/components/gardena_bluetooth/conftest.py index 08f698b4b67..882c9b1b090 100644 --- a/tests/components/gardena_bluetooth/conftest.py +++ b/tests/components/gardena_bluetooth/conftest.py @@ -1,6 +1,6 @@ """Common fixtures for the Gardena Bluetooth tests.""" -from collections.abc import Callable, Coroutine +from collections.abc import Callable, Coroutine, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -10,7 +10,6 @@ from gardena_bluetooth.const import DeviceInformation from gardena_bluetooth.exceptions import CharacteristicNotFound from gardena_bluetooth.parse import Characteristic import pytest -from typing_extensions import Generator from homeassistant.components.gardena_bluetooth.const import DOMAIN from homeassistant.components.gardena_bluetooth.coordinator import SCAN_INTERVAL diff --git a/tests/components/gardena_bluetooth/snapshots/test_init.ambr b/tests/components/gardena_bluetooth/snapshots/test_init.ambr index 82e17896d60..71195918bb1 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_init.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': None, 'model': 'Mock Model', + 'model_id': None, 'name': 'Mock Title', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '1.2.3', diff --git a/tests/components/generic/conftest.py b/tests/components/generic/conftest.py index 92a9298cbd5..69e6cc6b696 100644 --- a/tests/components/generic/conftest.py +++ b/tests/components/generic/conftest.py @@ -1,7 +1,10 @@ """Test fixtures for the generic component.""" +from __future__ import annotations + +from collections.abc import Generator from io import BytesIO -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, _patch, patch from PIL import Image import pytest @@ -9,12 +12,14 @@ import respx from homeassistant import config_entries from homeassistant.components.generic.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture(scope="package") -def fakeimgbytes_png(): +def fakeimgbytes_png() -> bytes: """Fake image in RAM for testing.""" buf = BytesIO() Image.new("RGB", (1, 1)).save(buf, format="PNG") @@ -22,7 +27,7 @@ def fakeimgbytes_png(): @pytest.fixture(scope="package") -def fakeimgbytes_jpg(): +def fakeimgbytes_jpg() -> bytes: """Fake image in RAM for testing.""" buf = BytesIO() # fake image in ram for testing. Image.new("RGB", (1, 1)).save(buf, format="jpeg") @@ -30,7 +35,7 @@ def fakeimgbytes_jpg(): @pytest.fixture(scope="package") -def fakeimgbytes_svg(): +def fakeimgbytes_svg() -> bytes: """Fake image in RAM for testing.""" return bytes( '', @@ -39,7 +44,7 @@ def fakeimgbytes_svg(): @pytest.fixture(scope="package") -def fakeimgbytes_gif(): +def fakeimgbytes_gif() -> bytes: """Fake image in RAM for testing.""" buf = BytesIO() # fake image in ram for testing. Image.new("RGB", (1, 1)).save(buf, format="gif") @@ -47,19 +52,27 @@ def fakeimgbytes_gif(): @pytest.fixture -def fakeimg_png(fakeimgbytes_png): +def fakeimg_png(fakeimgbytes_png: bytes) -> Generator[None]: """Set up respx to respond to test url with fake image bytes.""" - respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) + respx.get("http://127.0.0.1/testurl/1", name="fake_img").respond( + stream=fakeimgbytes_png + ) + yield + respx.pop("fake_img") @pytest.fixture -def fakeimg_gif(fakeimgbytes_gif): +def fakeimg_gif(fakeimgbytes_gif: bytes) -> Generator[None]: """Set up respx to respond to test url with fake image bytes.""" - respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_gif) + respx.get("http://127.0.0.1/testurl/1", name="fake_img").respond( + stream=fakeimgbytes_gif + ) + yield + respx.pop("fake_img") @pytest.fixture(scope="package") -def mock_create_stream(): +def mock_create_stream() -> _patch[MagicMock]: """Mock create stream.""" mock_stream = Mock() mock_provider = Mock() @@ -75,7 +88,7 @@ def mock_create_stream(): @pytest.fixture -async def user_flow(hass): +async def user_flow(hass: HomeAssistant) -> ConfigFlowResult: """Initiate a user flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -87,7 +100,7 @@ async def user_flow(hass): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass): +def config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -112,7 +125,9 @@ def config_entry_fixture(hass): @pytest.fixture -async def setup_entry(hass, config_entry): +async def setup_entry( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> MockConfigEntry: """Set up a config entry ready to be used in tests.""" await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/generic/test_camera.py b/tests/components/generic/test_camera.py index 72a7c32ba25..59ff513ccc9 100644 --- a/tests/components/generic/test_camera.py +++ b/tests/components/generic/test_camera.py @@ -73,7 +73,7 @@ async def help_setup_mock_config_entry( async def test_fetching_url( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_png, + fakeimgbytes_png: bytes, caplog: pytest.LogCaptureFixture, ) -> None: """Test that it fetches the given url.""" @@ -132,7 +132,7 @@ async def test_image_caching( hass: HomeAssistant, hass_client: ClientSessionGenerator, freezer: FrozenDateTimeFactory, - fakeimgbytes_png, + fakeimgbytes_png: bytes, ) -> None: """Test that the image is cached and not fetched more often than the framerate indicates.""" respx.get("http://example.com").respond(stream=fakeimgbytes_png) @@ -197,7 +197,7 @@ async def test_image_caching( @respx.mock async def test_fetching_without_verify_ssl( - hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png + hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png: bytes ) -> None: """Test that it fetches the given url when ssl verify is off.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -221,7 +221,7 @@ async def test_fetching_without_verify_ssl( @respx.mock async def test_fetching_url_with_verify_ssl( - hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png + hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png: bytes ) -> None: """Test that it fetches the given url when ssl verify is explicitly on.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -247,8 +247,8 @@ async def test_fetching_url_with_verify_ssl( async def test_limit_refetch( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_png, - fakeimgbytes_jpg, + fakeimgbytes_png: bytes, + fakeimgbytes_jpg: bytes, ) -> None: """Test that it fetches the given url.""" respx.get("http://example.com/0a").respond(stream=fakeimgbytes_png) @@ -319,7 +319,7 @@ async def test_stream_source( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - fakeimgbytes_png, + fakeimgbytes_png: bytes, ) -> None: """Test that the stream source is rendered.""" respx.get("http://example.com").respond(stream=fakeimgbytes_png) @@ -376,7 +376,7 @@ async def test_stream_source_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - fakeimgbytes_png, + fakeimgbytes_png: bytes, ) -> None: """Test that the stream source has an error.""" respx.get("http://example.com").respond(stream=fakeimgbytes_png) @@ -418,7 +418,7 @@ async def test_stream_source_error( @respx.mock async def test_setup_alternative_options( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, fakeimgbytes_png + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, fakeimgbytes_png: bytes ) -> None: """Test that the stream source is setup with different config options.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -442,7 +442,7 @@ async def test_no_stream_source( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - fakeimgbytes_png, + fakeimgbytes_png: bytes, ) -> None: """Test a stream request without stream source option set.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -482,8 +482,8 @@ async def test_no_stream_source( async def test_camera_content_type( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_svg, - fakeimgbytes_jpg, + fakeimgbytes_svg: bytes, + fakeimgbytes_jpg: bytes, ) -> None: """Test generic camera with custom content_type.""" urlsvg = "https://upload.wikimedia.org/wikipedia/commons/0/02/SVG_logo.svg" @@ -532,8 +532,8 @@ async def test_camera_content_type( async def test_timeout_cancelled( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_png, - fakeimgbytes_jpg, + fakeimgbytes_png: bytes, + fakeimgbytes_jpg: bytes, ) -> None: """Test that timeouts and cancellations return last image.""" diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index 7e76d8f3891..e7af9383791 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -1,10 +1,13 @@ """Test The generic (IP Camera) config flow.""" +from __future__ import annotations + import contextlib import errno from http import HTTPStatus import os.path -from unittest.mock import AsyncMock, PropertyMock, patch +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, PropertyMock, _patch, patch import httpx import pytest @@ -27,7 +30,7 @@ from homeassistant.components.stream import ( CONF_USE_WALLCLOCK_AS_TIMESTAMPS, ) from homeassistant.components.stream.worker import StreamWorkerError -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import ConfigEntryState, ConfigFlowResult from homeassistant.const import ( CONF_AUTHENTICATION, CONF_NAME, @@ -38,6 +41,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry @@ -67,10 +71,10 @@ TESTDATA_YAML = { @respx.mock async def test_form( hass: HomeAssistant, - fakeimgbytes_png, + fakeimgbytes_png: bytes, hass_client: ClientSessionGenerator, - user_flow, - mock_create_stream, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test the form with a normal set of settings.""" @@ -121,8 +125,9 @@ async def test_form( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_only_stillimage( - hass: HomeAssistant, fakeimg_png, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we complete ok if the user wants still images only.""" result = await hass.config_entries.flow.async_init( @@ -163,7 +168,10 @@ async def test_form_only_stillimage( @respx.mock async def test_form_reject_still_preview( - hass: HomeAssistant, fakeimgbytes_png, mock_create_stream, user_flow + hass: HomeAssistant, + fakeimgbytes_png: bytes, + mock_create_stream: _patch[MagicMock], + user_flow: ConfigFlowResult, ) -> None: """Test we go back to the config screen if the user rejects the still preview.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @@ -183,11 +191,11 @@ async def test_form_reject_still_preview( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_still_preview_cam_off( hass: HomeAssistant, - fakeimg_png, - mock_create_stream, - user_flow, + mock_create_stream: _patch[MagicMock], + user_flow: ConfigFlowResult, hass_client: ClientSessionGenerator, ) -> None: """Test camera errors are triggered during preview.""" @@ -212,8 +220,9 @@ async def test_form_still_preview_cam_off( @respx.mock +@pytest.mark.usefixtures("fakeimg_gif") async def test_form_only_stillimage_gif( - hass: HomeAssistant, fakeimg_gif, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we complete ok if the user wants a gif.""" data = TESTDATA.copy() @@ -236,7 +245,7 @@ async def test_form_only_stillimage_gif( @respx.mock async def test_form_only_svg_whitespace( - hass: HomeAssistant, fakeimgbytes_svg, user_flow + hass: HomeAssistant, fakeimgbytes_svg: bytes, user_flow: ConfigFlowResult ) -> None: """Test we complete ok if svg starts with whitespace, issue #68889.""" fakeimgbytes_wspace_svg = bytes(" \n ", encoding="utf-8") + fakeimgbytes_svg @@ -270,12 +279,12 @@ async def test_form_only_svg_whitespace( ], ) async def test_form_only_still_sample( - hass: HomeAssistant, user_flow, image_file + hass: HomeAssistant, user_flow: ConfigFlowResult, image_file ) -> None: """Test various sample images #69037.""" image_path = os.path.join(os.path.dirname(__file__), image_file) - with open(image_path, "rb") as image: - respx.get("http://127.0.0.1/testurl/1").respond(stream=image.read()) + image_bytes = await hass.async_add_executor_job(Path(image_path).read_bytes) + respx.get("http://127.0.0.1/testurl/1").respond(stream=image_bytes) data = TESTDATA.copy() data.pop(CONF_STREAM_SOURCE) with patch("homeassistant.components.generic.async_setup_entry", return_value=True): @@ -332,8 +341,8 @@ async def test_form_only_still_sample( ) async def test_still_template( hass: HomeAssistant, - user_flow, - fakeimgbytes_png, + user_flow: ConfigFlowResult, + fakeimgbytes_png: bytes, template, url, expected_result, @@ -358,8 +367,11 @@ async def test_still_template( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_rtsp_mode( - hass: HomeAssistant, fakeimg_png, user_flow, mock_create_stream + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we complete ok if the user enters a stream url.""" data = TESTDATA.copy() @@ -398,7 +410,10 @@ async def test_form_rtsp_mode( async def test_form_only_stream( - hass: HomeAssistant, fakeimgbytes_jpg, user_flow, mock_create_stream + hass: HomeAssistant, + fakeimgbytes_jpg: bytes, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we complete ok if the user wants stream only.""" data = TESTDATA.copy() @@ -434,7 +449,7 @@ async def test_form_only_stream( async def test_form_still_and_stream_not_provided( - hass: HomeAssistant, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we show a suitable error if neither still or stream URL are provided.""" result2 = await hass.config_entries.flow.async_configure( @@ -481,7 +496,11 @@ async def test_form_still_and_stream_not_provided( ], ) async def test_form_image_http_exceptions( - side_effect, expected_message, hass: HomeAssistant, user_flow, mock_create_stream + side_effect, + expected_message, + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we handle image http exceptions.""" respx.get("http://127.0.0.1/testurl/1").side_effect = [ @@ -501,7 +520,9 @@ async def test_form_image_http_exceptions( @respx.mock async def test_form_stream_invalidimage( - hass: HomeAssistant, user_flow, mock_create_stream + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we handle invalid image when a stream is specified.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=b"invalid") @@ -518,7 +539,9 @@ async def test_form_stream_invalidimage( @respx.mock async def test_form_stream_invalidimage2( - hass: HomeAssistant, user_flow, mock_create_stream + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we handle invalid image when a stream is specified.""" respx.get("http://127.0.0.1/testurl/1").respond(content=None) @@ -535,7 +558,9 @@ async def test_form_stream_invalidimage2( @respx.mock async def test_form_stream_invalidimage3( - hass: HomeAssistant, user_flow, mock_create_stream + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we handle invalid image when a stream is specified.""" respx.get("http://127.0.0.1/testurl/1").respond(content=bytes([0xFF])) @@ -551,7 +576,10 @@ async def test_form_stream_invalidimage3( @respx.mock -async def test_form_stream_timeout(hass: HomeAssistant, fakeimg_png, user_flow) -> None: +@pytest.mark.usefixtures("fakeimg_png") +async def test_form_stream_timeout( + hass: HomeAssistant, user_flow: ConfigFlowResult +) -> None: """Test we handle invalid auth.""" with patch( "homeassistant.components.generic.config_flow.create_stream" @@ -570,8 +598,49 @@ async def test_form_stream_timeout(hass: HomeAssistant, fakeimg_png, user_flow) @respx.mock +async def test_form_stream_not_set_up(hass: HomeAssistant, user_flow) -> None: + """Test we handle if stream has not been set up.""" + TESTDATA_ONLY_STREAM = TESTDATA.copy() + TESTDATA_ONLY_STREAM.pop(CONF_STILL_IMAGE_URL) + + with patch( + "homeassistant.components.generic.config_flow.create_stream", + side_effect=HomeAssistantError("Stream integration is not set up."), + ): + result1 = await hass.config_entries.flow.async_configure( + user_flow["flow_id"], + TESTDATA_ONLY_STREAM, + ) + await hass.async_block_till_done() + + assert result1["type"] is FlowResultType.FORM + assert result1["errors"] == {"stream_source": "stream_not_set_up"} + + +@respx.mock +async def test_form_stream_other_error(hass: HomeAssistant, user_flow) -> None: + """Test the unknown error for streams.""" + TESTDATA_ONLY_STREAM = TESTDATA.copy() + TESTDATA_ONLY_STREAM.pop(CONF_STILL_IMAGE_URL) + + with ( + patch( + "homeassistant.components.generic.config_flow.create_stream", + side_effect=HomeAssistantError("Some other error."), + ), + pytest.raises(HomeAssistantError), + ): + await hass.config_entries.flow.async_configure( + user_flow["flow_id"], + TESTDATA_ONLY_STREAM, + ) + await hass.async_block_till_done() + + +@respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_stream_worker_error( - hass: HomeAssistant, fakeimg_png, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we handle a StreamWorkerError and pass the message through.""" with patch( @@ -588,7 +657,7 @@ async def test_form_stream_worker_error( @respx.mock async def test_form_stream_permission_error( - hass: HomeAssistant, fakeimgbytes_png, user_flow + hass: HomeAssistant, fakeimgbytes_png: bytes, user_flow: ConfigFlowResult ) -> None: """Test we handle permission error.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @@ -605,8 +674,9 @@ async def test_form_stream_permission_error( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_no_route_to_host( - hass: HomeAssistant, fakeimg_png, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we handle no route to host.""" with patch( @@ -622,8 +692,9 @@ async def test_form_no_route_to_host( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_stream_io_error( - hass: HomeAssistant, fakeimg_png, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we handle no io error when setting up stream.""" with patch( @@ -639,7 +710,8 @@ async def test_form_stream_io_error( @respx.mock -async def test_form_oserror(hass: HomeAssistant, fakeimg_png, user_flow) -> None: +@pytest.mark.usefixtures("fakeimg_png") +async def test_form_oserror(hass: HomeAssistant, user_flow: ConfigFlowResult) -> None: """Test we handle OS error when setting up stream.""" with ( patch( @@ -656,7 +728,7 @@ async def test_form_oserror(hass: HomeAssistant, fakeimg_png, user_flow) -> None @respx.mock async def test_options_template_error( - hass: HomeAssistant, fakeimgbytes_png, mock_create_stream + hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock] ) -> None: """Test the options flow with a template error.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @@ -754,7 +826,7 @@ async def test_slug(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) -> No @respx.mock async def test_options_only_stream( - hass: HomeAssistant, fakeimgbytes_png, mock_create_stream + hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock] ) -> None: """Test the options flow without a still_image_url.""" respx.get("http://127.0.0.1/testurl/2").respond(stream=fakeimgbytes_png) @@ -791,7 +863,8 @@ async def test_options_only_stream( assert result3["data"][CONF_CONTENT_TYPE] == "image/jpeg" -async def test_unload_entry(hass: HomeAssistant, fakeimg_png) -> None: +@pytest.mark.usefixtures("fakeimg_png") +async def test_unload_entry(hass: HomeAssistant) -> None: """Test unloading the generic IP Camera entry.""" mock_entry = MockConfigEntry(domain=DOMAIN, options=TESTDATA) mock_entry.add_to_hass(hass) @@ -861,8 +934,9 @@ async def test_migrate_existing_ids( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_use_wallclock_as_timestamps_option( - hass: HomeAssistant, fakeimg_png, mock_create_stream + hass: HomeAssistant, mock_create_stream: _patch[MagicMock] ) -> None: """Test the use_wallclock_as_timestamps option flow.""" diff --git a/tests/components/generic/test_diagnostics.py b/tests/components/generic/test_diagnostics.py index f68c3ba4bc6..80fa5fd4d4e 100644 --- a/tests/components/generic/test_diagnostics.py +++ b/tests/components/generic/test_diagnostics.py @@ -6,12 +6,15 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.generic.diagnostics import redact_url from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator, setup_entry + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + setup_entry: MockConfigEntry, ) -> None: """Test config entry diagnostics.""" diff --git a/tests/components/generic_hygrostat/snapshots/test_config_flow.ambr b/tests/components/generic_hygrostat/snapshots/test_config_flow.ambr new file mode 100644 index 00000000000..3527596c9b9 --- /dev/null +++ b/tests/components/generic_hygrostat/snapshots/test_config_flow.ambr @@ -0,0 +1,66 @@ +# serializer version: 1 +# name: test_config_flow[create] + FlowResultSnapshot({ + 'result': ConfigEntrySnapshot({ + 'title': 'My hygrostat', + }), + 'title': 'My hygrostat', + 'type': , + }) +# --- +# name: test_config_flow[init] + FlowResultSnapshot({ + 'type': , + }) +# --- +# name: test_options[create_entry] + FlowResultSnapshot({ + 'result': True, + 'type': , + }) +# --- +# name: test_options[dehumidifier] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'action': , + 'current_humidity': 10.0, + 'device_class': 'dehumidifier', + 'friendly_name': 'My hygrostat', + 'humidity': 100, + 'max_humidity': 100, + 'min_humidity': 0, + 'supported_features': , + }), + 'context': , + 'entity_id': 'humidifier.my_hygrostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_options[humidifier] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'action': , + 'current_humidity': 10.0, + 'device_class': 'humidifier', + 'friendly_name': 'My hygrostat', + 'humidity': 100, + 'max_humidity': 100, + 'min_humidity': 0, + 'supported_features': , + }), + 'context': , + 'entity_id': 'humidifier.my_hygrostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_options[init] + FlowResultSnapshot({ + 'type': , + }) +# --- diff --git a/tests/components/generic_hygrostat/test_config_flow.py b/tests/components/generic_hygrostat/test_config_flow.py new file mode 100644 index 00000000000..49572e296e4 --- /dev/null +++ b/tests/components/generic_hygrostat/test_config_flow.py @@ -0,0 +1,106 @@ +"""Test the generic hygrostat config flow.""" + +from unittest.mock import patch + +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.components.generic_hygrostat import ( + CONF_DEVICE_CLASS, + CONF_DRY_TOLERANCE, + CONF_HUMIDIFIER, + CONF_NAME, + CONF_SENSOR, + CONF_WET_TOLERANCE, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +SNAPSHOT_FLOW_PROPS = props("type", "title", "result", "error") + + +async def test_config_flow(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: + """Test the config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result == snapshot(name="init", include=SNAPSHOT_FLOW_PROPS) + + with patch( + "homeassistant.components.generic_hygrostat.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: "My hygrostat", + CONF_DRY_TOLERANCE: 2, + CONF_WET_TOLERANCE: 4, + CONF_HUMIDIFIER: "switch.run", + CONF_SENSOR: "sensor.humidity", + CONF_DEVICE_CLASS: "dehumidifier", + }, + ) + await hass.async_block_till_done() + + assert result == snapshot(name="create", include=SNAPSHOT_FLOW_PROPS) + assert len(mock_setup_entry.mock_calls) == 1 + + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + assert config_entry.data == {} + assert config_entry.title == "My hygrostat" + + +async def test_options(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: + """Test reconfiguring.""" + + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + CONF_DEVICE_CLASS: "dehumidifier", + CONF_DRY_TOLERANCE: 2.0, + CONF_HUMIDIFIER: "switch.run", + CONF_NAME: "My hygrostat", + CONF_SENSOR: "sensor.humidity", + CONF_WET_TOLERANCE: 4.0, + }, + title="My hygrostat", + ) + config_entry.add_to_hass(hass) + + # set some initial values + hass.states.async_set( + "sensor.humidity", + "10", + {"unit_of_measurement": "%", "device_class": "humidity"}, + ) + hass.states.async_set("switch.run", "on") + + # check that it is setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert hass.states.get("humidifier.my_hygrostat") == snapshot(name="dehumidifier") + + # switch to humidifier + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result == snapshot(name="init", include=SNAPSHOT_FLOW_PROPS) + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_DRY_TOLERANCE: 2, + CONF_WET_TOLERANCE: 4, + CONF_HUMIDIFIER: "switch.run", + CONF_SENSOR: "sensor.humidity", + CONF_DEVICE_CLASS: "humidifier", + }, + ) + assert result == snapshot(name="create_entry", include=SNAPSHOT_FLOW_PROPS) + + # Check config entry is reloaded with new options + await hass.async_block_till_done() + assert hass.states.get("humidifier.my_hygrostat") == snapshot(name="humidifier") diff --git a/tests/components/generic_hygrostat/test_humidifier.py b/tests/components/generic_hygrostat/test_humidifier.py index eadc1b22527..fc46db48664 100644 --- a/tests/components/generic_hygrostat/test_humidifier.py +++ b/tests/components/generic_hygrostat/test_humidifier.py @@ -7,6 +7,9 @@ import pytest import voluptuous as vol from homeassistant.components import input_boolean, switch +from homeassistant.components.generic_hygrostat import ( + DOMAIN as GENERIC_HYDROSTAT_DOMAIN, +) from homeassistant.components.humidifier import ( ATTR_HUMIDITY, DOMAIN, @@ -26,17 +29,18 @@ from homeassistant.const import ( ) import homeassistant.core as ha from homeassistant.core import ( - DOMAIN as HASS_DOMAIN, + DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, HomeAssistant, State, callback, ) -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( + MockConfigEntry, assert_setup_component, async_fire_time_changed, mock_restore_cache, @@ -83,13 +87,14 @@ async def test_valid_conf(hass: HomeAssistant) -> None: @pytest.fixture -async def setup_comp_1(hass): +async def setup_comp_1(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() -async def test_humidifier_input_boolean(hass: HomeAssistant, setup_comp_1) -> None: +@pytest.mark.usefixtures("setup_comp_1") +async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: """Test humidifier switching input_boolean.""" humidifier_switch = "input_boolean.test" assert await async_setup_component( @@ -128,8 +133,9 @@ async def test_humidifier_input_boolean(hass: HomeAssistant, setup_comp_1) -> No assert hass.states.get(ENTITY).attributes.get("action") == "humidifying" +@pytest.mark.usefixtures("setup_comp_1") async def test_humidifier_switch( - hass: HomeAssistant, setup_comp_1, mock_switch_entities: list[MockSwitch] + hass: HomeAssistant, mock_switch_entities: list[MockSwitch] ) -> None: """Test humidifier switching test switch.""" setup_test_component_platform(hass, switch.DOMAIN, mock_switch_entities) @@ -172,8 +178,9 @@ async def test_humidifier_switch( assert hass.states.get(ENTITY).attributes.get("action") == "humidifying" +@pytest.mark.usefixtures("setup_comp_1") async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp_1 + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test setting a unique ID.""" unique_id = "some_unique_id" @@ -205,7 +212,7 @@ def _setup_sensor(hass, humidity): @pytest.fixture -async def setup_comp_0(hass): +async def setup_comp_0(hass: HomeAssistant) -> None: """Initialize components.""" _setup_sensor(hass, 45) hass.states.async_set(ENT_SWITCH, STATE_OFF) @@ -231,7 +238,7 @@ async def setup_comp_0(hass): @pytest.fixture -async def setup_comp_2(hass): +async def setup_comp_2(hass: HomeAssistant) -> None: """Initialize components.""" _setup_sensor(hass, 45) hass.states.async_set(ENT_SWITCH, STATE_OFF) @@ -303,7 +310,8 @@ async def test_setup_defaults_to_unknown(hass: HomeAssistant) -> None: assert hass.states.get(ENTITY).state == STATE_UNAVAILABLE -async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_default_setup_params(hass: HomeAssistant) -> None: """Test the setup with default parameters.""" state = hass.states.get(ENTITY) assert state.attributes.get("min_humidity") == 0 @@ -312,9 +320,8 @@ async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: assert state.attributes.get("action") == "idle" -async def test_default_setup_params_dehumidifier( - hass: HomeAssistant, setup_comp_0 -) -> None: +@pytest.mark.usefixtures("setup_comp_0") +async def test_default_setup_params_dehumidifier(hass: HomeAssistant) -> None: """Test the setup with default parameters for dehumidifier.""" state = hass.states.get(ENTITY) assert state.attributes.get("min_humidity") == 0 @@ -323,14 +330,16 @@ async def test_default_setup_params_dehumidifier( assert state.attributes.get("action") == "idle" -async def test_get_modes(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_get_modes(hass: HomeAssistant) -> None: """Test that the attributes returns the correct modes.""" state = hass.states.get(ENTITY) modes = state.attributes.get("available_modes") assert modes == [MODE_NORMAL, MODE_AWAY] -async def test_set_target_humidity(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_humidity(hass: HomeAssistant) -> None: """Test the setting of the target humidity.""" await hass.services.async_call( DOMAIN, @@ -353,7 +362,8 @@ async def test_set_target_humidity(hass: HomeAssistant, setup_comp_2) -> None: assert state.attributes.get("humidity") == 40 -async def test_set_away_mode(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_away_mode(hass: HomeAssistant) -> None: """Test the setting away mode.""" await hass.services.async_call( DOMAIN, @@ -373,9 +383,8 @@ async def test_set_away_mode(hass: HomeAssistant, setup_comp_2) -> None: assert state.attributes.get("humidity") == 35 -async def test_set_away_mode_and_restore_prev_humidity( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_away_mode_and_restore_prev_humidity(hass: HomeAssistant) -> None: """Test the setting and removing away mode. Verify original humidity is restored. @@ -407,8 +416,9 @@ async def test_set_away_mode_and_restore_prev_humidity( assert state.attributes.get("humidity") == 44 +@pytest.mark.usefixtures("setup_comp_2") async def test_set_away_mode_twice_and_restore_prev_humidity( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test the setting away mode twice in a row. @@ -448,7 +458,8 @@ async def test_set_away_mode_twice_and_restore_prev_humidity( assert state.attributes.get("humidity") == 44 -async def test_sensor_affects_attribute(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_sensor_affects_attribute(hass: HomeAssistant) -> None: """Test that the sensor changes are reflected in the current_humidity attribute.""" state = hass.states.get(ENTITY) assert state.attributes.get("current_humidity") == 45 @@ -460,7 +471,8 @@ async def test_sensor_affects_attribute(hass: HomeAssistant, setup_comp_2) -> No assert state.attributes.get("current_humidity") == 47 -async def test_sensor_bad_value(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_sensor_bad_value(hass: HomeAssistant) -> None: """Test sensor that have None as state.""" assert hass.states.get(ENTITY).state == STATE_ON @@ -470,8 +482,9 @@ async def test_sensor_bad_value(hass: HomeAssistant, setup_comp_2) -> None: assert hass.states.get(ENTITY).state == STATE_UNAVAILABLE +@pytest.mark.usefixtures("setup_comp_2") async def test_sensor_bad_value_twice( - hass: HomeAssistant, setup_comp_2, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test sensor that the second bad value is not logged as warning.""" assert hass.states.get(ENTITY).state == STATE_ON @@ -499,9 +512,8 @@ async def test_sensor_bad_value_twice( ] == ["DEBUG"] -async def test_set_target_humidity_humidifier_on( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_humidity_humidifier_on(hass: HomeAssistant) -> None: """Test if target humidity turn humidifier on.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 36) @@ -515,14 +527,13 @@ async def test_set_target_humidity_humidifier_on( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_set_target_humidity_humidifier_off( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_humidity_humidifier_off(hass: HomeAssistant) -> None: """Test if target humidity turn humidifier off.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) @@ -536,13 +547,14 @@ async def test_set_target_humidity_humidifier_off( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_on_within_tolerance( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test if humidity change doesn't turn on within tolerance.""" calls = await _setup_switch(hass, False) @@ -558,8 +570,9 @@ async def test_humidity_change_humidifier_on_within_tolerance( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_on_outside_tolerance( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test if humidity change turn humidifier on outside dry tolerance.""" calls = await _setup_switch(hass, False) @@ -574,13 +587,14 @@ async def test_humidity_change_humidifier_on_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_off_within_tolerance( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test if humidity change doesn't turn off within tolerance.""" calls = await _setup_switch(hass, True) @@ -596,8 +610,9 @@ async def test_humidity_change_humidifier_off_within_tolerance( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_off_outside_tolerance( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test if humidity change turn humidifier off outside wet tolerance.""" calls = await _setup_switch(hass, True) @@ -612,12 +627,13 @@ async def test_humidity_change_humidifier_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_operation_mode_humidify(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_operation_mode_humidify(hass: HomeAssistant) -> None: """Test change mode from OFF to HUMIDIFY. Switch turns on when humidity below setpoint and mode changes. @@ -648,7 +664,7 @@ async def test_operation_mode_humidify(hass: HomeAssistant, setup_comp_2) -> Non await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH @@ -671,7 +687,7 @@ async def _setup_switch(hass, is_on): @pytest.fixture -async def setup_comp_3(hass): +async def setup_comp_3(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -694,7 +710,8 @@ async def setup_comp_3(hass): await hass.async_block_till_done() -async def test_set_target_humidity_dry_off(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_target_humidity_dry_off(hass: HomeAssistant) -> None: """Test if target humidity turn dry off.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 50) @@ -708,13 +725,14 @@ async def test_set_target_humidity_dry_off(hass: HomeAssistant, setup_comp_3) -> await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH assert hass.states.get(ENTITY).attributes.get("action") == "drying" -async def test_turn_away_mode_on_drying(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_turn_away_mode_on_drying(hass: HomeAssistant) -> None: """Test the setting away mode when drying.""" await _setup_switch(hass, True) _setup_sensor(hass, 50) @@ -737,7 +755,8 @@ async def test_turn_away_mode_on_drying(hass: HomeAssistant, setup_comp_3) -> No assert state.attributes.get("humidity") == 30 -async def test_operation_mode_dry(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_operation_mode_dry(hass: HomeAssistant) -> None: """Test change mode from OFF to DRY. Switch turns on when humidity below setpoint and state changes. @@ -765,38 +784,39 @@ async def test_operation_mode_dry(hass: HomeAssistant, setup_comp_3) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_set_target_humidity_dry_on(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_target_humidity_dry_on(hass: HomeAssistant) -> None: """Test if target humidity turn dry on.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 45) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_init_ignores_tolerance(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_init_ignores_tolerance(hass: HomeAssistant) -> None: """Test if tolerance is ignored on initialization.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 39) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_humidity_change_dry_off_within_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_humidity_change_dry_off_within_tolerance(hass: HomeAssistant) -> None: """Test if humidity change doesn't turn dry off within tolerance.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) @@ -805,8 +825,9 @@ async def test_humidity_change_dry_off_within_tolerance( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_3") async def test_set_humidity_change_dry_off_outside_tolerance( - hass: HomeAssistant, setup_comp_3 + hass: HomeAssistant, ) -> None: """Test if humidity change turn dry off.""" calls = await _setup_switch(hass, True) @@ -814,14 +835,13 @@ async def test_set_humidity_change_dry_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_humidity_change_dry_on_within_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_humidity_change_dry_on_within_tolerance(hass: HomeAssistant) -> None: """Test if humidity change doesn't turn dry on within tolerance.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 37) @@ -830,23 +850,21 @@ async def test_humidity_change_dry_on_within_tolerance( assert len(calls) == 0 -async def test_humidity_change_dry_on_outside_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_humidity_change_dry_on_outside_tolerance(hass: HomeAssistant) -> None: """Test if humidity change turn dry on.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 45) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_running_when_operating_mode_is_off_2( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_running_when_operating_mode_is_off_2(hass: HomeAssistant) -> None: """Test that the switch turns off when enabled is set False.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) @@ -860,15 +878,14 @@ async def test_running_when_operating_mode_is_off_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH assert hass.states.get(ENTITY).attributes.get("action") == "off" -async def test_no_state_change_when_operation_mode_off_2( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_no_state_change_when_operation_mode_off_2(hass: HomeAssistant) -> None: """Test that the switch doesn't turn on when enabled is False.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 30) @@ -887,7 +904,7 @@ async def test_no_state_change_when_operation_mode_off_2( @pytest.fixture -async def setup_comp_4(hass): +async def setup_comp_4(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -910,8 +927,9 @@ async def setup_comp_4(hass): await hass.async_block_till_done() +@pytest.mark.usefixtures("setup_comp_4") async def test_humidity_change_dry_trigger_on_not_long_enough( - hass: HomeAssistant, setup_comp_4 + hass: HomeAssistant, ) -> None: """Test if humidity change turn dry on.""" calls = await _setup_switch(hass, False) @@ -924,9 +942,8 @@ async def test_humidity_change_dry_trigger_on_not_long_enough( assert len(calls) == 0 -async def test_humidity_change_dry_trigger_on_long_enough( - hass: HomeAssistant, setup_comp_4 -) -> None: +@pytest.mark.usefixtures("setup_comp_4") +async def test_humidity_change_dry_trigger_on_long_enough(hass: HomeAssistant) -> None: """Test if humidity change turn dry on.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) with freeze_time(fake_changed): @@ -939,13 +956,14 @@ async def test_humidity_change_dry_trigger_on_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_4") async def test_humidity_change_dry_trigger_off_not_long_enough( - hass: HomeAssistant, setup_comp_4 + hass: HomeAssistant, ) -> None: """Test if humidity change turn dry on.""" calls = await _setup_switch(hass, True) @@ -958,9 +976,8 @@ async def test_humidity_change_dry_trigger_off_not_long_enough( assert len(calls) == 0 -async def test_humidity_change_dry_trigger_off_long_enough( - hass: HomeAssistant, setup_comp_4 -) -> None: +@pytest.mark.usefixtures("setup_comp_4") +async def test_humidity_change_dry_trigger_off_long_enough(hass: HomeAssistant) -> None: """Test if humidity change turn dry on.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) with freeze_time(fake_changed): @@ -973,14 +990,13 @@ async def test_humidity_change_dry_trigger_off_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_mode_change_dry_trigger_off_not_long_enough( - hass: HomeAssistant, setup_comp_4 -) -> None: +@pytest.mark.usefixtures("setup_comp_4") +async def test_mode_change_dry_trigger_off_not_long_enough(hass: HomeAssistant) -> None: """Test if mode change turns dry off despite minimum cycle.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) @@ -1000,9 +1016,8 @@ async def test_mode_change_dry_trigger_off_not_long_enough( assert call.data["entity_id"] == ENT_SWITCH -async def test_mode_change_dry_trigger_on_not_long_enough( - hass: HomeAssistant, setup_comp_4 -) -> None: +@pytest.mark.usefixtures("setup_comp_4") +async def test_mode_change_dry_trigger_on_not_long_enough(hass: HomeAssistant) -> None: """Test if mode change turns dry on despite minimum cycle.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 35) @@ -1032,7 +1047,7 @@ async def test_mode_change_dry_trigger_on_not_long_enough( @pytest.fixture -async def setup_comp_6(hass): +async def setup_comp_6(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -1054,8 +1069,9 @@ async def setup_comp_6(hass): await hass.async_block_till_done() +@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_off_not_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if humidity change doesn't turn humidifier off because of time.""" calls = await _setup_switch(hass, True) @@ -1068,8 +1084,9 @@ async def test_humidity_change_humidifier_trigger_off_not_long_enough( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_on_not_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if humidity change doesn't turn humidifier on because of time.""" calls = await _setup_switch(hass, False) @@ -1082,8 +1099,9 @@ async def test_humidity_change_humidifier_trigger_on_not_long_enough( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_on_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if humidity change turn humidifier on after min cycle.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) @@ -1097,13 +1115,14 @@ async def test_humidity_change_humidifier_trigger_on_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_off_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if humidity change turn humidifier off after min cycle.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) @@ -1117,13 +1136,14 @@ async def test_humidity_change_humidifier_trigger_off_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_6") async def test_mode_change_humidifier_trigger_off_not_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if mode change turns humidifier off despite minimum cycle.""" calls = await _setup_switch(hass, True) @@ -1145,8 +1165,9 @@ async def test_mode_change_humidifier_trigger_off_not_long_enough( assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_6") async def test_mode_change_humidifier_trigger_on_not_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if mode change turns humidifier on despite minimum cycle.""" calls = await _setup_switch(hass, False) @@ -1182,7 +1203,7 @@ async def test_mode_change_humidifier_trigger_on_not_long_enough( @pytest.fixture -async def setup_comp_7(hass): +async def setup_comp_7(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -1206,8 +1227,9 @@ async def setup_comp_7(hass): await hass.async_block_till_done() +@pytest.mark.usefixtures("setup_comp_7") async def test_humidity_change_dry_trigger_on_long_enough_3( - hass: HomeAssistant, setup_comp_7 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, True) @@ -1221,13 +1243,14 @@ async def test_humidity_change_dry_trigger_on_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_7") async def test_humidity_change_dry_trigger_off_long_enough_3( - hass: HomeAssistant, setup_comp_7 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, False) @@ -1241,13 +1264,13 @@ async def test_humidity_change_dry_trigger_off_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @pytest.fixture -async def setup_comp_8(hass): +async def setup_comp_8(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -1270,8 +1293,9 @@ async def setup_comp_8(hass): await hass.async_block_till_done() +@pytest.mark.usefixtures("setup_comp_8") async def test_humidity_change_humidifier_trigger_on_long_enough_2( - hass: HomeAssistant, setup_comp_8 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, True) @@ -1285,13 +1309,14 @@ async def test_humidity_change_humidifier_trigger_on_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_8") async def test_humidity_change_humidifier_trigger_off_long_enough_2( - hass: HomeAssistant, setup_comp_8 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, False) @@ -1305,7 +1330,7 @@ async def test_humidity_change_humidifier_trigger_off_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @@ -1360,7 +1385,7 @@ async def test_float_tolerance_values_2(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @@ -1702,8 +1727,9 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: assert state.state == STATE_OFF +@pytest.mark.usefixtures("setup_comp_1") async def test_sensor_stale_duration( - hass: HomeAssistant, setup_comp_1, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test turn off on sensor stale.""" @@ -1782,3 +1808,50 @@ async def test_sensor_stale_duration( # Not turning on by itself assert hass.states.get(humidifier_switch).state == STATE_OFF + + +async def test_device_id( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test for source entity device.""" + + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + source_device_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("switch", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + source_entity = entity_registry.async_get_or_create( + "switch", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("switch.test_source") is not None + + helper_config_entry = MockConfigEntry( + data={}, + domain=GENERIC_HYDROSTAT_DOMAIN, + options={ + "device_class": "humidifier", + "dry_tolerance": 2.0, + "humidifier": "switch.test_source", + "name": "Test", + "target_sensor": ENT_SENSOR, + "wet_tolerance": 4.0, + }, + title="Test", + ) + helper_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(helper_config_entry.entry_id) + await hass.async_block_till_done() + + helper_entity = entity_registry.async_get("humidifier.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id diff --git a/tests/components/generic_hygrostat/test_init.py b/tests/components/generic_hygrostat/test_init.py new file mode 100644 index 00000000000..bd4792f939d --- /dev/null +++ b/tests/components/generic_hygrostat/test_init.py @@ -0,0 +1,102 @@ +"""Test Generic Hygrostat component setup process.""" + +from __future__ import annotations + +from homeassistant.components.generic_hygrostat import ( + DOMAIN as GENERIC_HYDROSTAT_DOMAIN, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from .test_humidifier import ENT_SENSOR + +from tests.common import MockConfigEntry + + +async def test_device_cleaning( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test cleaning of devices linked to the helper config entry.""" + + # Source entity device config entry + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + + # Device entry of the source entity + source_device1_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("switch", "identifier_test1")}, + connections={("mac", "30:31:32:33:34:01")}, + ) + + # Source entity registry + source_entity = entity_registry.async_get_or_create( + "switch", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device1_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("switch.test_source") is not None + + # Configure the configuration entry for helper + helper_config_entry = MockConfigEntry( + data={}, + domain=GENERIC_HYDROSTAT_DOMAIN, + options={ + "device_class": "humidifier", + "dry_tolerance": 2.0, + "humidifier": "switch.test_source", + "name": "Test", + "target_sensor": ENT_SENSOR, + "wet_tolerance": 4.0, + }, + title="Test", + ) + helper_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(helper_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the helper entity + helper_entity = entity_registry.async_get("humidifier.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id + + # Device entry incorrectly linked to config entry + device_registry.async_get_or_create( + config_entry_id=helper_config_entry.entry_id, + identifiers={("sensor", "identifier_test2")}, + connections={("mac", "30:31:32:33:34:02")}, + ) + device_registry.async_get_or_create( + config_entry_id=helper_config_entry.entry_id, + identifiers={("sensor", "identifier_test3")}, + connections={("mac", "30:31:32:33:34:03")}, + ) + await hass.async_block_till_done() + + # Before reloading the config entry, 3 devices are expected to be linked + devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( + helper_config_entry.entry_id + ) + assert len(devices_before_reload) == 3 + + # Config entry reload + await hass.config_entries.async_reload(helper_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the helper entity + helper_entity = entity_registry.async_get("humidifier.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id + + # After reloading the config entry, only one linked device is expected + devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( + helper_config_entry.entry_id + ) + assert len(devices_after_reload) == 1 + + assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index 1ecde733f48..0f438056fbd 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -21,7 +21,7 @@ from homeassistant.components.climate import ( PRESET_SLEEP, HVACMode, ) -from homeassistant.components.generic_thermostat import ( +from homeassistant.components.generic_thermostat.const import ( DOMAIN as GENERIC_THERMOSTAT_DOMAIN, ) from homeassistant.const import ( @@ -37,19 +37,20 @@ from homeassistant.const import ( ) import homeassistant.core as ha from homeassistant.core import ( - DOMAIN as HASS_DOMAIN, + DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, HomeAssistant, State, callback, ) from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM from tests.common import ( + MockConfigEntry, assert_setup_component, async_fire_time_changed, async_mock_service, @@ -102,14 +103,15 @@ async def test_valid_conf(hass: HomeAssistant) -> None: @pytest.fixture -async def setup_comp_1(hass): +async def setup_comp_1(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.units = METRIC_SYSTEM assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() -async def test_heater_input_boolean(hass: HomeAssistant, setup_comp_1) -> None: +@pytest.mark.usefixtures("setup_comp_1") +async def test_heater_input_boolean(hass: HomeAssistant) -> None: """Test heater switching input_boolean.""" heater_switch = "input_boolean.test" assert await async_setup_component( @@ -141,8 +143,9 @@ async def test_heater_input_boolean(hass: HomeAssistant, setup_comp_1) -> None: assert hass.states.get(heater_switch).state == STATE_ON +@pytest.mark.usefixtures("setup_comp_1") async def test_heater_switch( - hass: HomeAssistant, setup_comp_1, mock_switch_entities: list[MockSwitch] + hass: HomeAssistant, mock_switch_entities: list[MockSwitch] ) -> None: """Test heater switching test switch.""" setup_test_component_platform(hass, switch.DOMAIN, mock_switch_entities) @@ -177,8 +180,9 @@ async def test_heater_switch( assert hass.states.get(heater_switch).state == STATE_ON +@pytest.mark.usefixtures("setup_comp_1") async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp_1 + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test setting a unique ID.""" unique_id = "some_unique_id" @@ -210,7 +214,7 @@ def _setup_sensor(hass, temp): @pytest.fixture -async def setup_comp_2(hass): +async def setup_comp_2(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.units = METRIC_SYSTEM assert await async_setup_component( @@ -283,7 +287,8 @@ async def test_setup_gets_current_temp_from_sensor(hass: HomeAssistant) -> None: assert hass.states.get(ENTITY).attributes["current_temperature"] == 18 -async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_default_setup_params(hass: HomeAssistant) -> None: """Test the setup with default parameters.""" state = hass.states.get(ENTITY) assert state.attributes.get("min_temp") == 7 @@ -292,14 +297,16 @@ async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: assert state.attributes.get("target_temp_step") == 0.1 -async def test_get_hvac_modes(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_get_hvac_modes(hass: HomeAssistant) -> None: """Test that the operation list returns the correct modes.""" state = hass.states.get(ENTITY) modes = state.attributes.get("hvac_modes") assert modes == [HVACMode.HEAT, HVACMode.OFF] -async def test_set_target_temp(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_temp(hass: HomeAssistant) -> None: """Test the setting of the target temperature.""" await common.async_set_temperature(hass, 30) state = hass.states.get(ENTITY) @@ -322,7 +329,8 @@ async def test_set_target_temp(hass: HomeAssistant, setup_comp_2) -> None: (PRESET_ACTIVITY, 21), ], ) -async def test_set_away_mode(hass: HomeAssistant, setup_comp_2, preset, temp) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_away_mode(hass: HomeAssistant, preset, temp) -> None: """Test the setting away mode.""" await common.async_set_temperature(hass, 23) await common.async_set_preset_mode(hass, preset) @@ -342,8 +350,9 @@ async def test_set_away_mode(hass: HomeAssistant, setup_comp_2, preset, temp) -> (PRESET_ACTIVITY, 21), ], ) +@pytest.mark.usefixtures("setup_comp_2") async def test_set_away_mode_and_restore_prev_temp( - hass: HomeAssistant, setup_comp_2, preset, temp + hass: HomeAssistant, preset, temp ) -> None: """Test the setting and removing away mode. @@ -370,8 +379,9 @@ async def test_set_away_mode_and_restore_prev_temp( (PRESET_ACTIVITY, 21), ], ) +@pytest.mark.usefixtures("setup_comp_2") async def test_set_away_mode_twice_and_restore_prev_temp( - hass: HomeAssistant, setup_comp_2, preset, temp + hass: HomeAssistant, preset, temp ) -> None: """Test the setting away mode twice in a row. @@ -387,7 +397,8 @@ async def test_set_away_mode_twice_and_restore_prev_temp( assert state.attributes.get("temperature") == 23 -async def test_set_preset_mode_invalid(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_preset_mode_invalid(hass: HomeAssistant) -> None: """Test an invalid mode raises an error and ignore case when checking modes.""" await common.async_set_temperature(hass, 23) await common.async_set_preset_mode(hass, "away") @@ -402,7 +413,8 @@ async def test_set_preset_mode_invalid(hass: HomeAssistant, setup_comp_2) -> Non assert state.attributes.get("preset_mode") == "none" -async def test_sensor_bad_value(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_sensor_bad_value(hass: HomeAssistant) -> None: """Test sensor that have None as state.""" state = hass.states.get(ENTITY) temp = state.attributes.get("current_temperature") @@ -463,7 +475,8 @@ async def test_sensor_unavailable(hass: HomeAssistant) -> None: assert state.attributes.get("current_temperature") is None -async def test_set_target_temp_heater_on(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_temp_heater_on(hass: HomeAssistant) -> None: """Test if target temperature turn heater on.""" calls = _setup_switch(hass, False) _setup_sensor(hass, 25) @@ -471,12 +484,13 @@ async def test_set_target_temp_heater_on(hass: HomeAssistant, setup_comp_2) -> N await common.async_set_temperature(hass, 30) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_set_target_temp_heater_off(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_temp_heater_off(hass: HomeAssistant) -> None: """Test if target temperature turn heater off.""" calls = _setup_switch(hass, True) _setup_sensor(hass, 30) @@ -484,14 +498,13 @@ async def test_set_target_temp_heater_off(hass: HomeAssistant, setup_comp_2) -> await common.async_set_temperature(hass, 25) assert len(calls) == 2 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_heater_on_within_tolerance( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_temp_change_heater_on_within_tolerance(hass: HomeAssistant) -> None: """Test if temperature change doesn't turn on within tolerance.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -500,9 +513,8 @@ async def test_temp_change_heater_on_within_tolerance( assert len(calls) == 0 -async def test_temp_change_heater_on_outside_tolerance( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_temp_change_heater_on_outside_tolerance(hass: HomeAssistant) -> None: """Test if temperature change turn heater on outside cold tolerance.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -510,14 +522,13 @@ async def test_temp_change_heater_on_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_heater_off_within_tolerance( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_temp_change_heater_off_within_tolerance(hass: HomeAssistant) -> None: """Test if temperature change doesn't turn off within tolerance.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -526,9 +537,8 @@ async def test_temp_change_heater_off_within_tolerance( assert len(calls) == 0 -async def test_temp_change_heater_off_outside_tolerance( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_temp_change_heater_off_outside_tolerance(hass: HomeAssistant) -> None: """Test if temperature change turn heater off outside hot tolerance.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -536,26 +546,26 @@ async def test_temp_change_heater_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_running_when_hvac_mode_is_off(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_running_when_hvac_mode_is_off(hass: HomeAssistant) -> None: """Test that the switch turns off when enabled is set False.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) await common.async_set_hvac_mode(hass, HVACMode.OFF) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_no_state_change_when_hvac_mode_off( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_no_state_change_when_hvac_mode_off(hass: HomeAssistant) -> None: """Test that the switch doesn't turn on when enabled is False.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -565,7 +575,8 @@ async def test_no_state_change_when_hvac_mode_off( assert len(calls) == 0 -async def test_hvac_mode_heat(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_hvac_mode_heat(hass: HomeAssistant) -> None: """Test change mode from OFF to HEAT. Switch turns on when temp below setpoint and mode changes. @@ -578,7 +589,7 @@ async def test_hvac_mode_heat(hass: HomeAssistant, setup_comp_2) -> None: await common.async_set_hvac_mode(hass, HVACMode.HEAT) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH @@ -600,7 +611,7 @@ def _setup_switch(hass, is_on): @pytest.fixture -async def setup_comp_3(hass): +async def setup_comp_3(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( @@ -623,7 +634,8 @@ async def setup_comp_3(hass): await hass.async_block_till_done() -async def test_set_target_temp_ac_off(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_target_temp_ac_off(hass: HomeAssistant) -> None: """Test if target temperature turn ac off.""" calls = _setup_switch(hass, True) _setup_sensor(hass, 25) @@ -631,12 +643,13 @@ async def test_set_target_temp_ac_off(hass: HomeAssistant, setup_comp_3) -> None await common.async_set_temperature(hass, 30) assert len(calls) == 2 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_turn_away_mode_on_cooling(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_turn_away_mode_on_cooling(hass: HomeAssistant) -> None: """Test the setting away mode when cooling.""" _setup_switch(hass, True) _setup_sensor(hass, 25) @@ -647,7 +660,8 @@ async def test_turn_away_mode_on_cooling(hass: HomeAssistant, setup_comp_3) -> N assert state.attributes.get("temperature") == 30 -async def test_hvac_mode_cool(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_hvac_mode_cool(hass: HomeAssistant) -> None: """Test change mode from OFF to COOL. Switch turns on when temp below setpoint and mode changes. @@ -660,12 +674,13 @@ async def test_hvac_mode_cool(hass: HomeAssistant, setup_comp_3) -> None: await common.async_set_hvac_mode(hass, HVACMode.COOL) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_set_target_temp_ac_on(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_target_temp_ac_on(hass: HomeAssistant) -> None: """Test if target temperature turn ac on.""" calls = _setup_switch(hass, False) _setup_sensor(hass, 30) @@ -673,14 +688,13 @@ async def test_set_target_temp_ac_on(hass: HomeAssistant, setup_comp_3) -> None: await common.async_set_temperature(hass, 25) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_ac_off_within_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_temp_change_ac_off_within_tolerance(hass: HomeAssistant) -> None: """Test if temperature change doesn't turn ac off within tolerance.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -689,9 +703,8 @@ async def test_temp_change_ac_off_within_tolerance( assert len(calls) == 0 -async def test_set_temp_change_ac_off_outside_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_temp_change_ac_off_outside_tolerance(hass: HomeAssistant) -> None: """Test if temperature change turn ac off.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -699,14 +712,13 @@ async def test_set_temp_change_ac_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_ac_on_within_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_temp_change_ac_on_within_tolerance(hass: HomeAssistant) -> None: """Test if temperature change doesn't turn ac on within tolerance.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 25) @@ -715,9 +727,8 @@ async def test_temp_change_ac_on_within_tolerance( assert len(calls) == 0 -async def test_temp_change_ac_on_outside_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_temp_change_ac_on_outside_tolerance(hass: HomeAssistant) -> None: """Test if temperature change turn ac on.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 25) @@ -725,28 +736,26 @@ async def test_temp_change_ac_on_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_running_when_operating_mode_is_off_2( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_running_when_operating_mode_is_off_2(hass: HomeAssistant) -> None: """Test that the switch turns off when enabled is set False.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) await common.async_set_hvac_mode(hass, HVACMode.OFF) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_no_state_change_when_operation_mode_off_2( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_no_state_change_when_operation_mode_off_2(hass: HomeAssistant) -> None: """Test that the switch doesn't turn on when enabled is False.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -858,7 +867,7 @@ async def test_heating_cooling_switch_toggles_when_outside_min_cycle_duration( # Then assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == expected_triggered_service_call assert call.data["entity_id"] == ENT_SWITCH @@ -911,7 +920,7 @@ async def test_hvac_mode_change_toggles_heating_cooling_switch_even_when_within_ @pytest.fixture -async def setup_comp_7(hass): +async def setup_comp_7(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( @@ -937,9 +946,8 @@ async def setup_comp_7(hass): await hass.async_block_till_done() -async def test_temp_change_ac_trigger_on_long_enough_3( - hass: HomeAssistant, setup_comp_7 -) -> None: +@pytest.mark.usefixtures("setup_comp_7") +async def test_temp_change_ac_trigger_on_long_enough_3(hass: HomeAssistant) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, True) await hass.async_block_till_done() @@ -957,14 +965,13 @@ async def test_temp_change_ac_trigger_on_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_ac_trigger_off_long_enough_3( - hass: HomeAssistant, setup_comp_7 -) -> None: +@pytest.mark.usefixtures("setup_comp_7") +async def test_temp_change_ac_trigger_off_long_enough_3(hass: HomeAssistant) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, False) await hass.async_block_till_done() @@ -982,13 +989,13 @@ async def test_temp_change_ac_trigger_off_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @pytest.fixture -async def setup_comp_8(hass): +async def setup_comp_8(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( @@ -1012,9 +1019,8 @@ async def setup_comp_8(hass): await hass.async_block_till_done() -async def test_temp_change_heater_trigger_on_long_enough_2( - hass: HomeAssistant, setup_comp_8 -) -> None: +@pytest.mark.usefixtures("setup_comp_8") +async def test_temp_change_heater_trigger_on_long_enough_2(hass: HomeAssistant) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, True) await hass.async_block_till_done() @@ -1032,13 +1038,14 @@ async def test_temp_change_heater_trigger_on_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_8") async def test_temp_change_heater_trigger_off_long_enough_2( - hass: HomeAssistant, setup_comp_8 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, False) @@ -1057,13 +1064,13 @@ async def test_temp_change_heater_trigger_off_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @pytest.fixture -async def setup_comp_9(hass): +async def setup_comp_9(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -1086,12 +1093,13 @@ async def setup_comp_9(hass): await hass.async_block_till_done() -async def test_precision(hass: HomeAssistant, setup_comp_9) -> None: +@pytest.mark.usefixtures("setup_comp_9") +async def test_precision(hass: HomeAssistant) -> None: """Test that setting precision to tenths works as intended.""" hass.config.units = US_CUSTOMARY_SYSTEM - await common.async_set_temperature(hass, 23.27) + await common.async_set_temperature(hass, 55.27) state = hass.states.get(ENTITY) - assert state.attributes.get("temperature") == 23.3 + assert state.attributes.get("temperature") == 55.3 # check that target_temp_step defaults to precision assert state.attributes.get("target_temp_step") == 0.1 @@ -1229,7 +1237,7 @@ async def test_initial_hvac_off_force_heater_off(hass: HomeAssistant) -> None: # heater must be switched off assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @@ -1337,7 +1345,7 @@ async def test_restore_will_turn_off_when_loaded_second(hass: HomeAssistant) -> assert len(calls_on) == 0 assert len(calls_off) == 1 call = calls_off[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == "input_boolean.test" @@ -1431,3 +1439,50 @@ async def test_reload(hass: HomeAssistant) -> None: assert len(hass.states.async_all()) == 1 assert hass.states.get("climate.test") is None assert hass.states.get("climate.reload") + + +async def test_device_id( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test for source entity device.""" + + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + source_device_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("switch", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + source_entity = entity_registry.async_get_or_create( + "switch", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("switch.test_source") is not None + + helper_config_entry = MockConfigEntry( + data={}, + domain=GENERIC_THERMOSTAT_DOMAIN, + options={ + "name": "Test", + "heater": "switch.test_source", + "target_sensor": ENT_SENSOR, + "ac_mode": False, + "cold_tolerance": 0.3, + "hot_tolerance": 0.3, + }, + title="Test", + ) + helper_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(helper_config_entry.entry_id) + await hass.async_block_till_done() + + helper_entity = entity_registry.async_get("climate.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id diff --git a/tests/components/generic_thermostat/test_config_flow.py b/tests/components/generic_thermostat/test_config_flow.py index 81e06146a14..7a7fdabc6e6 100644 --- a/tests/components/generic_thermostat/test_config_flow.py +++ b/tests/components/generic_thermostat/test_config_flow.py @@ -6,12 +6,11 @@ from syrupy.assertion import SnapshotAssertion from syrupy.filters import props from homeassistant.components.climate import PRESET_AWAY -from homeassistant.components.generic_thermostat.climate import ( +from homeassistant.components.generic_thermostat.const import ( CONF_AC_MODE, CONF_COLD_TOLERANCE, CONF_HEATER, CONF_HOT_TOLERANCE, - CONF_NAME, CONF_PRESETS, CONF_SENSOR, DOMAIN, @@ -21,6 +20,7 @@ from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT, + CONF_NAME, STATE_OFF, UnitOfTemperature, ) diff --git a/tests/components/generic_thermostat/test_init.py b/tests/components/generic_thermostat/test_init.py new file mode 100644 index 00000000000..addae2f684e --- /dev/null +++ b/tests/components/generic_thermostat/test_init.py @@ -0,0 +1,98 @@ +"""Test Generic Thermostat component setup process.""" + +from __future__ import annotations + +from homeassistant.components.generic_thermostat.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_device_cleaning( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test cleaning of devices linked to the helper config entry.""" + + # Source entity device config entry + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + + # Device entry of the source entity + source_device1_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("switch", "identifier_test1")}, + connections={("mac", "30:31:32:33:34:01")}, + ) + + # Source entity registry + source_entity = entity_registry.async_get_or_create( + "switch", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device1_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("switch.test_source") is not None + + # Configure the configuration entry for helper + helper_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test", + "heater": "switch.test_source", + "target_sensor": "sensor.temperature", + "ac_mode": False, + "cold_tolerance": 0.3, + "hot_tolerance": 0.3, + }, + title="Test", + ) + helper_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(helper_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the helper entity + helper_entity = entity_registry.async_get("climate.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id + + # Device entry incorrectly linked to config entry + device_registry.async_get_or_create( + config_entry_id=helper_config_entry.entry_id, + identifiers={("sensor", "identifier_test2")}, + connections={("mac", "30:31:32:33:34:02")}, + ) + device_registry.async_get_or_create( + config_entry_id=helper_config_entry.entry_id, + identifiers={("sensor", "identifier_test3")}, + connections={("mac", "30:31:32:33:34:03")}, + ) + await hass.async_block_till_done() + + # Before reloading the config entry, 3 devices are expected to be linked + devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( + helper_config_entry.entry_id + ) + assert len(devices_before_reload) == 3 + + # Config entry reload + await hass.config_entries.async_reload(helper_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the helper entity + helper_entity = entity_registry.async_get("climate.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id + + # After reloading the config entry, only one linked device is expected + devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( + helper_config_entry.entry_id + ) + assert len(devices_after_reload) == 1 + + assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/geniushub/__init__.py b/tests/components/geniushub/__init__.py new file mode 100644 index 00000000000..15886486e38 --- /dev/null +++ b/tests/components/geniushub/__init__.py @@ -0,0 +1 @@ +"""Tests for the geniushub integration.""" diff --git a/tests/components/geniushub/conftest.py b/tests/components/geniushub/conftest.py new file mode 100644 index 00000000000..125f1cfa80c --- /dev/null +++ b/tests/components/geniushub/conftest.py @@ -0,0 +1,65 @@ +"""GeniusHub tests configuration.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest + +from homeassistant.components.geniushub.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME + +from tests.common import MockConfigEntry +from tests.components.smhi.common import AsyncMock + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.geniushub.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_geniushub_client() -> Generator[AsyncMock]: + """Mock a GeniusHub client.""" + with patch( + "homeassistant.components.geniushub.config_flow.GeniusService", + autospec=True, + ) as mock_client: + client = mock_client.return_value + client.request.return_value = { + "data": { + "UID": "aa:bb:cc:dd:ee:ff", + } + } + yield client + + +@pytest.fixture +def mock_local_config_entry() -> MockConfigEntry: + """Mock a local config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="aa:bb:cc:dd:ee:ff", + data={ + CONF_HOST: "10.0.0.131", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + unique_id="aa:bb:cc:dd:ee:ff", + ) + + +@pytest.fixture +def mock_cloud_config_entry() -> MockConfigEntry: + """Mock a cloud config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Genius hub", + data={ + CONF_TOKEN: "abcdef", + }, + ) diff --git a/tests/components/geniushub/test_config_flow.py b/tests/components/geniushub/test_config_flow.py new file mode 100644 index 00000000000..9234e03e35a --- /dev/null +++ b/tests/components/geniushub/test_config_flow.py @@ -0,0 +1,482 @@ +"""Test the Geniushub config flow.""" + +from http import HTTPStatus +import socket +from typing import Any +from unittest.mock import AsyncMock + +from aiohttp import ClientConnectionError, ClientResponseError +import pytest + +from homeassistant.components.geniushub import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_local_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, +) -> None: + """Test full local flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "local_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "local_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "10.0.0.130" + assert result["data"] == { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + } + assert result["result"].unique_id == "aa:bb:cc:dd:ee:ff" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (socket.gaierror, "invalid_host"), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), + "invalid_auth", + ), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), + "invalid_host", + ), + (TimeoutError, "cannot_connect"), + (ClientConnectionError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_local_flow_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test local flow exceptions.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "local_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "local_api" + + mock_geniushub_client.request.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + + mock_geniushub_client.request.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_local_duplicate_data( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_local_config_entry: MockConfigEntry, +) -> None: + """Test local flow aborts on duplicate data.""" + mock_local_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "local_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "local_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_local_duplicate_mac( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_local_config_entry: MockConfigEntry, +) -> None: + """Test local flow aborts on duplicate MAC.""" + mock_local_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "local_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "local_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.131", + CONF_USERNAME: "test-username1", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_full_cloud_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, +) -> None: + """Test full cloud flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "cloud_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Genius hub" + assert result["data"] == { + CONF_TOKEN: "abcdef", + } + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (socket.gaierror, "invalid_host"), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), + "invalid_auth", + ), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), + "invalid_host", + ), + (TimeoutError, "cannot_connect"), + (ClientConnectionError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_cloud_flow_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test cloud flow exceptions.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "cloud_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_api" + + mock_geniushub_client.request.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + + mock_geniushub_client.request.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_cloud_duplicate( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_cloud_config_entry: MockConfigEntry, +) -> None: + """Test cloud flow aborts on duplicate data.""" + mock_cloud_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "cloud_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + ], +) +async def test_import_local_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + data: dict[str, Any], +) -> None: + """Test full local import flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "10.0.0.130" + assert result["data"] == data + assert result["result"].unique_id == "aa:bb:cc:dd:ee:ff" + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_TOKEN: "abcdef", + }, + { + CONF_TOKEN: "abcdef", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + ], +) +async def test_import_cloud_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + data: dict[str, Any], +) -> None: + """Test full cloud import flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Genius hub" + assert result["data"] == data + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + { + CONF_TOKEN: "abcdef", + }, + { + CONF_TOKEN: "abcdef", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + ], +) +@pytest.mark.parametrize( + ("exception", "reason"), + [ + (socket.gaierror, "invalid_host"), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), + "invalid_auth", + ), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), + "invalid_host", + ), + (TimeoutError, "cannot_connect"), + (ClientConnectionError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_import_flow_exceptions( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + data: dict[str, Any], + exception: Exception, + reason: str, +) -> None: + """Test import flow exceptions.""" + mock_geniushub_client.request.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "10.0.0.131", + CONF_USERNAME: "test-username1", + CONF_PASSWORD: "test-password", + }, + ], +) +async def test_import_flow_local_duplicate( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_local_config_entry: MockConfigEntry, + data: dict[str, Any], +) -> None: + """Test import flow aborts on local duplicate data.""" + mock_local_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_flow_cloud_duplicate( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_cloud_config_entry: MockConfigEntry, +) -> None: + """Test import flow aborts on cloud duplicate data.""" + mock_cloud_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/geo_json_events/conftest.py b/tests/components/geo_json_events/conftest.py index beab7bf1403..11928e6f012 100644 --- a/tests/components/geo_json_events/conftest.py +++ b/tests/components/geo_json_events/conftest.py @@ -1,9 +1,9 @@ """Configuration for GeoJSON Events tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.geo_json_events import DOMAIN from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_URL diff --git a/tests/components/geo_location/test_trigger.py b/tests/components/geo_location/test_trigger.py index e5fb93dcf8f..7673f357a08 100644 --- a/tests/components/geo_location/test_trigger.py +++ b/tests/components/geo_location/test_trigger.py @@ -29,7 +29,7 @@ def calls(hass: HomeAssistant) -> list[ServiceCall]: @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") hass.loop.run_until_complete( @@ -49,7 +49,7 @@ def setup_comp(hass): async def test_if_fires_on_zone_enter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone enter.""" context = Context() @@ -96,10 +96,10 @@ async def test_if_fires_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "geo_location - geo_location.entity - hello - hello - test - 0" ) @@ -118,6 +118,8 @@ async def test_if_fires_on_zone_enter( blocking=True, ) + assert len(service_calls) == 2 + hass.states.async_set( "geo_location.entity", "hello", @@ -125,11 +127,11 @@ async def test_if_fires_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_not_fires_for_enter_on_zone_leave( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on zone leave.""" hass.states.async_set( @@ -162,11 +164,11 @@ async def test_if_not_fires_for_enter_on_zone_leave( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_zone_leave( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone leave.""" hass.states.async_set( @@ -199,11 +201,11 @@ async def test_if_fires_on_zone_leave( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_zone_leave_2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone leave for unavailable entity.""" hass.states.async_set( @@ -236,11 +238,11 @@ async def test_if_fires_on_zone_leave_2( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_for_leave_on_zone_enter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on zone enter.""" hass.states.async_set( @@ -273,11 +275,11 @@ async def test_if_not_fires_for_leave_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_zone_appear( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if entity appears in zone.""" assert await async_setup_component( @@ -317,15 +319,16 @@ async def test_if_fires_on_zone_appear( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id assert ( - calls[0].data["some"] == "geo_location - geo_location.entity - - hello - test" + service_calls[0].data["some"] + == "geo_location - geo_location.entity - - hello - test" ) async def test_if_fires_on_zone_appear_2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if entity appears in zone.""" assert await async_setup_component( @@ -373,16 +376,16 @@ async def test_if_fires_on_zone_appear_2( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "geo_location - geo_location.entity - goodbye - hello - test" ) async def test_if_fires_on_zone_disappear( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if entity disappears from zone.""" hass.states.async_set( @@ -423,14 +426,17 @@ async def test_if_fires_on_zone_disappear( hass.states.async_remove("geo_location.entity") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == "geo_location - geo_location.entity - hello - - test" + service_calls[0].data["some"] + == "geo_location - geo_location.entity - hello - - test" ) async def test_zone_undefined( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + service_calls: list[ServiceCall], + caplog: pytest.LogCaptureFixture, ) -> None: """Test for undefined zone.""" hass.states.async_set( @@ -466,7 +472,7 @@ async def test_zone_undefined( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 assert ( f"Unable to execute automation automation 0: Zone {zone_does_not_exist} not found" diff --git a/tests/components/geocaching/conftest.py b/tests/components/geocaching/conftest.py index 155cd2c5a7e..28d87176e46 100644 --- a/tests/components/geocaching/conftest.py +++ b/tests/components/geocaching/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from geocachingapi import GeocachingStatus import pytest -from typing_extensions import Generator from homeassistant.components.geocaching.const import DOMAIN diff --git a/tests/components/geofency/test_init.py b/tests/components/geofency/test_init.py index 2228cea80ee..3a98c6480bd 100644 --- a/tests/components/geofency/test_init.py +++ b/tests/components/geofency/test_init.py @@ -137,7 +137,7 @@ async def geofency_client( @pytest.fixture(autouse=True) -async def setup_zones(hass): +async def setup_zones(hass: HomeAssistant) -> None: """Set up Zone config in HA.""" assert await async_setup_component( hass, @@ -155,7 +155,7 @@ async def setup_zones(hass): @pytest.fixture -async def webhook_id(hass, geofency_client): +async def webhook_id(hass: HomeAssistant) -> str: """Initialize the Geofency component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -173,7 +173,7 @@ async def webhook_id(hass, geofency_client): return result["result"].data["webhook_id"] -async def test_data_validation(geofency_client, webhook_id) -> None: +async def test_data_validation(geofency_client: TestClient, webhook_id: str) -> None: """Test data validation.""" url = f"/api/webhook/{webhook_id}" @@ -195,8 +195,8 @@ async def test_gps_enter_and_exit_home( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - geofency_client, - webhook_id, + geofency_client: TestClient, + webhook_id: str, ) -> None: """Test GPS based zone enter and exit.""" url = f"/api/webhook/{webhook_id}" @@ -240,7 +240,7 @@ async def test_gps_enter_and_exit_home( async def test_beacon_enter_and_exit_home( - hass: HomeAssistant, geofency_client, webhook_id + hass: HomeAssistant, geofency_client: TestClient, webhook_id: str ) -> None: """Test iBeacon based zone enter and exit - a.k.a stationary iBeacon.""" url = f"/api/webhook/{webhook_id}" @@ -263,7 +263,7 @@ async def test_beacon_enter_and_exit_home( async def test_beacon_enter_and_exit_car( - hass: HomeAssistant, geofency_client, webhook_id + hass: HomeAssistant, geofency_client: TestClient, webhook_id: str ) -> None: """Test use of mobile iBeacon.""" url = f"/api/webhook/{webhook_id}" @@ -305,7 +305,7 @@ async def test_beacon_enter_and_exit_car( async def test_load_unload_entry( - hass: HomeAssistant, geofency_client, webhook_id + hass: HomeAssistant, geofency_client: TestClient, webhook_id: str ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" diff --git a/tests/components/gios/test_diagnostics.py b/tests/components/gios/test_diagnostics.py index 903de4872a2..a965e5550df 100644 --- a/tests/components/gios/test_diagnostics.py +++ b/tests/components/gios/test_diagnostics.py @@ -1,6 +1,7 @@ """Test GIOS diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -18,4 +19,6 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" entry = await init_integration(hass) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/github/conftest.py b/tests/components/github/conftest.py index df7de604c2c..ab262f3f522 100644 --- a/tests/components/github/conftest.py +++ b/tests/components/github/conftest.py @@ -1,9 +1,9 @@ """conftest for the GitHub integration.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN diff --git a/tests/components/glances/test_sensor.py b/tests/components/glances/test_sensor.py index 7dee47680ed..8e0367a712c 100644 --- a/tests/components/glances/test_sensor.py +++ b/tests/components/glances/test_sensor.py @@ -7,6 +7,7 @@ from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion from homeassistant.components.glances.const import DOMAIN +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -71,3 +72,40 @@ async def test_uptime_variation( async_fire_time_changed(hass) await hass.async_block_till_done() assert hass.states.get("sensor.0_0_0_0_uptime").state == "2024-02-15T12:49:52+00:00" + + +async def test_sensor_removed( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_api: AsyncMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test sensor removed server side.""" + + # Init with reference time + freezer.move_to(MOCK_REFERENCE_DATE) + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT, entry_id="test") + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("sensor.0_0_0_0_ssl_disk_used").state != STATE_UNAVAILABLE + assert hass.states.get("sensor.0_0_0_0_memory_use").state != STATE_UNAVAILABLE + assert hass.states.get("sensor.0_0_0_0_uptime").state != STATE_UNAVAILABLE + + # Remove some sensors from Glances API data + mock_data = HA_SENSOR_DATA.copy() + mock_data.pop("fs") + mock_data.pop("mem") + mock_data.pop("uptime") + mock_api.return_value.get_ha_sensor_data = AsyncMock(return_value=mock_data) + + # Server stops providing some sensors, so state should switch to Unavailable + freezer.move_to(MOCK_REFERENCE_DATE + timedelta(minutes=2)) + freezer.tick(delta=timedelta(seconds=120)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.0_0_0_0_ssl_disk_used").state == STATE_UNAVAILABLE + assert hass.states.get("sensor.0_0_0_0_memory_use").state == STATE_UNAVAILABLE + assert hass.states.get("sensor.0_0_0_0_uptime").state == STATE_UNAVAILABLE diff --git a/tests/components/goodwe/test_diagnostics.py b/tests/components/goodwe/test_diagnostics.py index 21917265811..0a997edc594 100644 --- a/tests/components/goodwe/test_diagnostics.py +++ b/tests/components/goodwe/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import MagicMock, patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.goodwe import CONF_MODEL_FAMILY, DOMAIN from homeassistant.const import CONF_HOST @@ -32,4 +33,4 @@ async def test_entry_diagnostics( assert await async_setup_component(hass, DOMAIN, {}) result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/google/conftest.py b/tests/components/google/conftest.py index 26a32a64b21..791e5613b0b 100644 --- a/tests/components/google/conftest.py +++ b/tests/components/google/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import AsyncGenerator, Awaitable, Callable, Generator import datetime import http import time @@ -13,7 +13,6 @@ from aiohttp.client_exceptions import ClientError from gcal_sync.auth import API_BASE_URL from oauth2client.client import OAuth2Credentials import pytest -from typing_extensions import AsyncGenerator, Generator import yaml from homeassistant.components.application_credentials import ( @@ -294,7 +293,7 @@ def mock_calendars_list( @pytest.fixture def mock_calendar_get( aioclient_mock: AiohttpClientMocker, -) -> Callable[[...], None]: +) -> Callable[..., None]: """Fixture for returning a calendar get response.""" def _result( @@ -316,7 +315,7 @@ def mock_calendar_get( @pytest.fixture def mock_insert_event( aioclient_mock: AiohttpClientMocker, -) -> Callable[[...], None]: +) -> Callable[..., None]: """Fixture for capturing event creation.""" def _expect_result( @@ -331,7 +330,7 @@ def mock_insert_event( @pytest.fixture(autouse=True) -async def set_time_zone(hass): +async def set_time_zone(hass: HomeAssistant) -> None: """Set the time zone for the tests.""" # Set our timezone to CST/Regina so we can check calculations # This keeps UTC-6 all year round diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 8e934925f46..11d4ec46bd1 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -74,7 +74,7 @@ def upcoming_event_url(entity: str = TEST_ENTITY) -> str: class Client: """Test client with helper methods for calendar websocket.""" - def __init__(self, client): + def __init__(self, client) -> None: """Initialize Client.""" self.client = client self.id = 0 @@ -385,6 +385,9 @@ async def test_update_error( with patch("homeassistant.util.utcnow", return_value=now): async_fire_time_changed(hass, now) await hass.async_block_till_done() + # Ensure coordinator update completes + await hass.async_block_till_done() + await hass.async_block_till_done() # Entity is marked uanvailable due to API failure state = hass.states.get(TEST_ENTITY) @@ -414,6 +417,9 @@ async def test_update_error( with patch("homeassistant.util.utcnow", return_value=now): async_fire_time_changed(hass, now) await hass.async_block_till_done() + # Ensure coordinator update completes + await hass.async_block_till_done() + await hass.async_block_till_done() # State updated with new API response state = hass.states.get(TEST_ENTITY) @@ -606,6 +612,9 @@ async def test_future_event_update_behavior( freezer.move_to(now) async_fire_time_changed(hass, now) await hass.async_block_till_done() + # Ensure coordinator update completes + await hass.async_block_till_done() + await hass.async_block_till_done() # Event has started state = hass.states.get(TEST_ENTITY) @@ -643,6 +652,9 @@ async def test_future_event_offset_update_behavior( freezer.move_to(now) async_fire_time_changed(hass, now) await hass.async_block_till_done() + # Ensure coordinator update completes + await hass.async_block_till_done() + await hass.async_block_till_done() # Event has not started, but the offset was reached state = hass.states.get(TEST_ENTITY) @@ -827,7 +839,7 @@ async def test_websocket_create( hass: HomeAssistant, component_setup: ComponentSetup, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -869,7 +881,7 @@ async def test_websocket_create_all_day( hass: HomeAssistant, component_setup: ComponentSetup, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -1066,7 +1078,7 @@ async def test_readonly_websocket_create( hass: HomeAssistant, component_setup: ComponentSetup, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -1117,7 +1129,7 @@ async def test_readonly_search_calendar( hass: HomeAssistant, component_setup: ComponentSetup, mock_calendars_yaml, - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, diff --git a/tests/components/google/test_config_flow.py b/tests/components/google/test_config_flow.py index 12281f6d348..47156299b57 100644 --- a/tests/components/google/test_config_flow.py +++ b/tests/components/google/test_config_flow.py @@ -116,7 +116,7 @@ async def primary_calendar_status() -> HTTPStatus | None: @pytest.fixture(autouse=True) async def primary_calendar( - mock_calendar_get: Callable[[...], None], + mock_calendar_get: Callable[..., None], primary_calendar_error: ClientError | None, primary_calendar_status: HTTPStatus | None, primary_calendar_email: str, diff --git a/tests/components/google/test_diagnostics.py b/tests/components/google/test_diagnostics.py index 5d6259309b8..78eb6d7ceea 100644 --- a/tests/components/google/test_diagnostics.py +++ b/tests/components/google/test_diagnostics.py @@ -11,7 +11,6 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.auth.models import Credentials from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .conftest import TEST_EVENT, ApiResult, ComponentSetup @@ -55,12 +54,6 @@ def _get_test_client_generator( return auth_client -@pytest.fixture(autouse=True) -async def setup_diag(hass): - """Set up diagnostics platform.""" - assert await async_setup_component(hass, "diagnostics", {}) - - @freeze_time("2023-03-13 12:05:00-07:00") @pytest.mark.usefixtures("socket_enabled") async def test_diagnostics( diff --git a/tests/components/google/test_init.py b/tests/components/google/test_init.py index de5e2ea9145..cfcda18df3a 100644 --- a/tests/components/google/test_init.py +++ b/tests/components/google/test_init.py @@ -82,7 +82,7 @@ def assert_state(actual: State | None, expected: State | None) -> None: def add_event_call_service( hass: HomeAssistant, request: pytest.FixtureRequest, -) -> Callable[dict[str, Any], Awaitable[None]]: +) -> Callable[[dict[str, Any]], Awaitable[None]]: """Fixture for calling the add or create event service.""" (domain, service_call, data, target) = request.param @@ -422,7 +422,7 @@ async def test_add_event_invalid_params( mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], date_fields: dict[str, Any], expected_error: type[Exception], error_match: str | None, @@ -457,14 +457,14 @@ async def test_add_event_date_in_x( hass: HomeAssistant, component_setup: ComponentSetup, mock_calendars_list: ApiResult, - mock_insert_event: Callable[[..., dict[str, Any]], None], + mock_insert_event: Callable[..., None], test_api_calendar: dict[str, Any], mock_events_list: ApiResult, date_fields: dict[str, Any], start_timedelta: datetime.timedelta, end_timedelta: datetime.timedelta, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service call that adds an event with various time ranges.""" @@ -496,10 +496,10 @@ async def test_add_event_date( component_setup: ComponentSetup, mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service call that sets a date range.""" @@ -535,11 +535,11 @@ async def test_add_event_date_time( hass: HomeAssistant, component_setup: ComponentSetup, mock_calendars_list: ApiResult, - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], test_api_calendar: dict[str, Any], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service call that adds an event with a date time range.""" @@ -599,7 +599,7 @@ async def test_unsupported_create_event( mock_calendars_yaml: Mock, component_setup: ComponentSetup, mock_calendars_list: ApiResult, - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], test_api_calendar: dict[str, Any], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, @@ -636,8 +636,8 @@ async def test_add_event_failure( mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - mock_insert_event: Callable[[..., dict[str, Any]], None], - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + mock_insert_event: Callable[..., None], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service calls with incorrect fields.""" @@ -661,10 +661,10 @@ async def test_add_event_location( component_setup: ComponentSetup, mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service call that sets a location field.""" @@ -879,7 +879,7 @@ async def test_assign_unique_id( mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - mock_calendar_get: Callable[[...], None], + mock_calendar_get: Callable[..., None], config_entry: MockConfigEntry, ) -> None: """Test an existing config is updated to have unique id if it does not exist.""" @@ -918,7 +918,7 @@ async def test_assign_unique_id_failure( test_api_calendar: dict[str, Any], config_entry: MockConfigEntry, mock_events_list: ApiResult, - mock_calendar_get: Callable[[...], None], + mock_calendar_get: Callable[..., None], request_status: http.HTTPStatus, config_entry_status: ConfigEntryState, ) -> None: diff --git a/tests/components/google_assistant/test_diagnostics.py b/tests/components/google_assistant/test_diagnostics.py index 26d91ce7920..1d68079563c 100644 --- a/tests/components/google_assistant/test_diagnostics.py +++ b/tests/components/google_assistant/test_diagnostics.py @@ -50,4 +50,4 @@ async def test_diagnostics( config_entry = hass.config_entries.async_entries("google_assistant")[0] assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry - ) == snapshot(exclude=props("entry_id")) + ) == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/google_assistant/test_http.py b/tests/components/google_assistant/test_http.py index b041f69828f..273aac1559e 100644 --- a/tests/components/google_assistant/test_http.py +++ b/tests/components/google_assistant/test_http.py @@ -4,6 +4,7 @@ from datetime import UTC, datetime, timedelta from http import HTTPStatus import json import os +from pathlib import Path from typing import Any from unittest.mock import ANY, patch from uuid import uuid4 @@ -655,9 +656,7 @@ async def test_async_get_users( ) path = hass.config.config_dir / ".storage" / GoogleConfigStore._STORAGE_KEY os.makedirs(os.path.dirname(path), exist_ok=True) - with open(path, "w", encoding="utf8") as f: - f.write(store_data) - + await hass.async_add_executor_job(Path(path).write_text, store_data) assert await async_get_users(hass) == expected_users await hass.async_stop() diff --git a/tests/components/google_assistant/test_smart_home.py b/tests/components/google_assistant/test_smart_home.py index 2eeb3d16b81..ea8f6957e38 100644 --- a/tests/components/google_assistant/test_smart_home.py +++ b/tests/components/google_assistant/test_smart_home.py @@ -25,11 +25,12 @@ from homeassistant.components.google_assistant import ( from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, + EVENT_CALL_SERVICE, Platform, UnitOfTemperature, __version__, ) -from homeassistant.core import EVENT_CALL_SERVICE, HomeAssistant, State +from homeassistant.core import HomeAssistant, State from homeassistant.helpers import ( area_registry as ar, device_registry as dr, diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 63a34c01dac..54aa4035670 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -60,6 +60,7 @@ from homeassistant.const import ( ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, + EVENT_CALL_SERVICE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, @@ -75,12 +76,7 @@ from homeassistant.const import ( STATE_UNKNOWN, UnitOfTemperature, ) -from homeassistant.core import ( - DOMAIN as HA_DOMAIN, - EVENT_CALL_SERVICE, - HomeAssistant, - State, -) +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.util import color, dt as dt_util from homeassistant.util.unit_conversion import TemperatureConverter @@ -190,12 +186,12 @@ async def test_onoff_group(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} - on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) + on_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} - off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) + off_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} diff --git a/tests/components/google_generative_ai_conversation/conftest.py b/tests/components/google_generative_ai_conversation/conftest.py index 1761516e4f5..28c21a9b791 100644 --- a/tests/components/google_generative_ai_conversation/conftest.py +++ b/tests/components/google_generative_ai_conversation/conftest.py @@ -1,5 +1,6 @@ """Tests helpers.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -14,14 +15,14 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_genai(): +def mock_genai() -> Generator[None]: """Mock the genai call in async_setup_entry.""" with patch("google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.get_model"): yield @pytest.fixture -def mock_config_entry(hass, mock_genai): +def mock_config_entry(hass: HomeAssistant, mock_genai: None) -> MockConfigEntry: """Mock a config entry.""" entry = MockConfigEntry( domain="google_generative_ai_conversation", @@ -35,7 +36,9 @@ def mock_config_entry(hass, mock_genai): @pytest.fixture -def mock_config_entry_with_assist(hass, mock_config_entry): +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: """Mock a config entry with assist.""" hass.config_entries.async_update_entry( mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} @@ -44,7 +47,9 @@ def mock_config_entry_with_assist(hass, mock_config_entry): @pytest.fixture -async def mock_init_component(hass: HomeAssistant, mock_config_entry: ConfigEntry): +async def mock_init_component( + hass: HomeAssistant, mock_config_entry: ConfigEntry +) -> None: """Initialize integration.""" assert await async_setup_component(hass, "google_generative_ai_conversation", {}) await hass.async_block_till_done() diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index aec8d088b20..65238c5212a 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -35,7 +35,6 @@ You are a voice assistant for Home Assistant. Answer questions about the world truthfully. Answer in plain text. Keep it simple and to the point. - Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant. ''', 'role': 'user', }), @@ -88,7 +87,6 @@ You are a voice assistant for Home Assistant. Answer questions about the world truthfully. Answer in plain text. Keep it simple and to the point. - Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant. ''', 'role': 'user', }), @@ -142,7 +140,6 @@ You are a voice assistant for Home Assistant. Answer questions about the world truthfully. Answer in plain text. Keep it simple and to the point. - Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant. ''', 'tools': None, }), @@ -187,7 +184,6 @@ You are a voice assistant for Home Assistant. Answer questions about the world truthfully. Answer in plain text. Keep it simple and to the point. - Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant. ''', 'tools': None, }), @@ -219,7 +215,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-None] +# name: test_default_prompt[config_entry_options0-0-None] list([ tuple( '', @@ -244,7 +240,6 @@ You are a voice assistant for Home Assistant. Answer questions about the world truthfully. Answer in plain text. Keep it simple and to the point. - ''', 'tools': None, }), @@ -268,7 +263,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options0-0-conversation.google_generative_ai_conversation] list([ tuple( '', @@ -293,7 +288,6 @@ You are a voice assistant for Home Assistant. Answer questions about the world truthfully. Answer in plain text. Keep it simple and to the point. - ''', 'tools': None, }), @@ -317,7 +311,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-None] +# name: test_default_prompt[config_entry_options1-1-None] list([ tuple( '', @@ -366,7 +360,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options1-1-conversation.google_generative_ai_conversation] list([ tuple( '', @@ -415,3 +409,186 @@ ), ]) # --- +# name: test_function_call + list([ + tuple( + '', + tuple( + ), + dict({ + 'generation_config': dict({ + 'max_output_tokens': 150, + 'temperature': 1.0, + 'top_k': 64, + 'top_p': 0.95, + }), + 'model_name': 'models/gemini-1.5-flash-latest', + 'safety_settings': dict({ + 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', + 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', + 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', + 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', + }), + 'system_instruction': ''' + Current time is 05:00:00. Today's date is 2024-05-24. + You are a voice assistant for Home Assistant. + Answer questions about the world truthfully. + Answer in plain text. Keep it simple and to the point. + Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant. + ''', + 'tools': list([ + function_declarations { + name: "test_tool" + description: "Test function" + parameters { + type_: OBJECT + properties { + key: "param3" + value { + type_: OBJECT + properties { + key: "json" + value { + type_: STRING + } + } + } + } + properties { + key: "param2" + value { + type_: NUMBER + } + } + properties { + key: "param1" + value { + type_: ARRAY + description: "Test parameters" + items { + type_: STRING + } + } + } + } + } + , + ]), + }), + ), + tuple( + '().start_chat', + tuple( + ), + dict({ + 'history': list([ + ]), + }), + ), + tuple( + '().start_chat().send_message_async', + tuple( + 'Please call the test function', + ), + dict({ + }), + ), + tuple( + '().start_chat().send_message_async', + tuple( + parts { + function_response { + name: "test_tool" + response { + fields { + key: "result" + value { + string_value: "Test response" + } + } + } + } + } + , + ), + dict({ + }), + ), + ]) +# --- +# name: test_function_call_without_parameters + list([ + tuple( + '', + tuple( + ), + dict({ + 'generation_config': dict({ + 'max_output_tokens': 150, + 'temperature': 1.0, + 'top_k': 64, + 'top_p': 0.95, + }), + 'model_name': 'models/gemini-1.5-flash-latest', + 'safety_settings': dict({ + 'DANGEROUS': 'BLOCK_MEDIUM_AND_ABOVE', + 'HARASSMENT': 'BLOCK_MEDIUM_AND_ABOVE', + 'HATE': 'BLOCK_MEDIUM_AND_ABOVE', + 'SEXUAL': 'BLOCK_MEDIUM_AND_ABOVE', + }), + 'system_instruction': ''' + Current time is 05:00:00. Today's date is 2024-05-24. + You are a voice assistant for Home Assistant. + Answer questions about the world truthfully. + Answer in plain text. Keep it simple and to the point. + Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant. + ''', + 'tools': list([ + function_declarations { + name: "test_tool" + description: "Test function" + } + , + ]), + }), + ), + tuple( + '().start_chat', + tuple( + ), + dict({ + 'history': list([ + ]), + }), + ), + tuple( + '().start_chat().send_message_async', + tuple( + 'Please call the test function', + ), + dict({ + }), + ), + tuple( + '().start_chat().send_message_async', + tuple( + parts { + function_response { + name: "test_tool" + response { + fields { + key: "result" + value { + string_value: "Test response" + } + } + } + } + } + , + ), + dict({ + }), + ), + ]) +# --- diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py index 24ed06a408f..d4992c732e1 100644 --- a/tests/components/google_generative_ai_conversation/test_config_flow.py +++ b/tests/components/google_generative_ai_conversation/test_config_flow.py @@ -3,7 +3,7 @@ from unittest.mock import AsyncMock, Mock, patch from google.api_core.exceptions import ClientError, DeadlineExceeded -from google.rpc.error_details_pb2 import ErrorInfo +from google.rpc.error_details_pb2 import ErrorInfo # pylint: disable=no-name-in-module import pytest from homeassistant import config_entries @@ -154,10 +154,10 @@ async def test_form(hass: HomeAssistant) -> None: ), ], ) +@pytest.mark.usefixtures("mock_init_component") async def test_options_switching( hass: HomeAssistant, - mock_config_entry, - mock_init_component, + mock_config_entry: MockConfigEntry, mock_models, current_options, new_options, diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 7f4fe886e90..a8eae34e08b 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from freezegun import freeze_time from google.ai.generativelanguage_v1beta.types.content import FunctionCall -from google.api_core.exceptions import GoogleAPICallError +from google.api_core.exceptions import GoogleAPIError import google.generativeai.types as genai_types import pytest from syrupy.assertion import SnapshotAssertion @@ -17,8 +17,9 @@ from homeassistant.components.google_generative_ai_conversation.const import ( ) from homeassistant.components.google_generative_ai_conversation.conversation import ( _escape_decode, + _format_schema, ) -from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent, llm @@ -38,19 +39,23 @@ def freeze_the_time(): "agent_id", [None, "conversation.google_generative_ai_conversation"] ) @pytest.mark.parametrize( - "config_entry_options", + ("config_entry_options", "expected_features"), [ - {}, - {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, + ({}, 0), + ( + {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, + conversation.ConversationEntityFeature.CONTROL, + ), ], ) +@pytest.mark.usefixtures("mock_init_component") async def test_default_prompt( hass: HomeAssistant, mock_config_entry: MockConfigEntry, - mock_init_component, snapshot: SnapshotAssertion, agent_id: str | None, config_entry_options: {}, + expected_features: conversation.ConversationEntityFeature, hass_ws_client: WebSocketGenerator, ) -> None: """Test that the default prompt works.""" @@ -75,10 +80,6 @@ async def test_default_prompt( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_api_prompt", return_value="", ), - patch( - "homeassistant.components.google_generative_ai_conversation.conversation.llm.async_render_no_api_prompt", - return_value="", - ), ): mock_chat = AsyncMock() mock_model.return_value.start_chat.return_value = mock_chat @@ -101,15 +102,18 @@ async def test_default_prompt( assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot assert mock_get_tools.called == (CONF_LLM_HASS_API in config_entry_options) + state = hass.states.get("conversation.google_generative_ai_conversation") + assert state.attributes[ATTR_SUPPORTED_FEATURES] == expected_features + @pytest.mark.parametrize( ("model_name", "supports_system_instruction"), [("models/gemini-1.5-pro", True), ("models/gemini-1.0-pro", False)], ) +@pytest.mark.usefixtures("mock_init_component") async def test_chat_history( hass: HomeAssistant, mock_config_entry: MockConfigEntry, - mock_init_component, model_name: str, supports_system_instruction: bool, snapshot: SnapshotAssertion, @@ -171,11 +175,12 @@ async def test_chat_history( @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" ) +@pytest.mark.usefixtures("mock_init_component") async def test_function_call( mock_get_tools, hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, + snapshot: SnapshotAssertion, ) -> None: """Test function calling.""" agent_id = mock_config_entry_with_assist.entry_id @@ -188,7 +193,9 @@ async def test_function_call( { vol.Optional("param1", description="Test parameters"): [ vol.All(str, vol.Lower) - ] + ], + vol.Optional("param2"): vol.Any(float, int), + vol.Optional("param3"): dict, } ) @@ -260,6 +267,7 @@ async def test_function_call( device_id="test_device", ), ) + assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot # Test conversating tracing traces = trace.async_get_traces() @@ -269,21 +277,103 @@ async def test_function_call( assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, trace.ConversationTraceEventType.AGENT_DETAIL, - trace.ConversationTraceEventType.LLM_TOOL_CALL, + trace.ConversationTraceEventType.TOOL_CALL, ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] assert "Answer in plain text" in detail_event["data"]["prompt"] + assert [t.name for t in detail_event["data"]["tools"]] == ["test_tool"] @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" ) +@pytest.mark.usefixtures("mock_init_component") +async def test_function_call_without_parameters( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test function calling without parameters.""" + agent_id = mock_config_entry_with_assist.entry_id + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema({}) + + mock_get_tools.return_value = [mock_tool] + + with patch("google.generativeai.GenerativeModel") as mock_model: + mock_chat = AsyncMock() + mock_model.return_value.start_chat.return_value = mock_chat + chat_response = MagicMock() + mock_chat.send_message_async.return_value = chat_response + mock_part = MagicMock() + mock_part.function_call = FunctionCall(name="test_tool", args={}) + + def tool_call(hass, tool_input, tool_context): + mock_part.function_call = None + mock_part.text = "Hi there!" + return {"result": "Test response"} + + mock_tool.async_call.side_effect = tool_call + chat_response.parts = [mock_part] + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + device_id="test_device", + ) + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" + mock_tool_call = mock_chat.send_message_async.mock_calls[1][1][0] + mock_tool_call = type(mock_tool_call).to_dict(mock_tool_call) + assert mock_tool_call == { + "parts": [ + { + "function_response": { + "name": "test_tool", + "response": { + "result": "Test response", + }, + }, + }, + ], + "role": "", + } + + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args={}, + ), + llm.LLMContext( + platform="google_generative_ai_conversation", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id="test_device", + ), + ) + assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot + + +@patch( + "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" +) +@pytest.mark.usefixtures("mock_init_component") async def test_function_exception( mock_get_tools, hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, ) -> None: """Test exception in function calling.""" agent_id = mock_config_entry_with_assist.entry_id @@ -361,14 +451,15 @@ async def test_function_exception( ) +@pytest.mark.usefixtures("mock_init_component") async def test_error_handling( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test that client errors are caught.""" with patch("google.generativeai.GenerativeModel") as mock_model: mock_chat = AsyncMock() mock_model.return_value.start_chat.return_value = mock_chat - mock_chat.send_message_async.side_effect = GoogleAPICallError("some error") + mock_chat.send_message_async.side_effect = GoogleAPIError("some error") result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id ) @@ -376,12 +467,13 @@ async def test_error_handling( assert result.response.response_type == intent.IntentResponseType.ERROR, result assert result.response.error_code == "unknown", result assert result.response.as_dict()["speech"]["plain"]["speech"] == ( - "Sorry, I had a problem talking to Google Generative AI: None some error" + "Sorry, I had a problem talking to Google Generative AI: some error" ) +@pytest.mark.usefixtures("mock_init_component") async def test_blocked_response( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test blocked response.""" with patch("google.generativeai.GenerativeModel") as mock_model: @@ -401,8 +493,9 @@ async def test_blocked_response( ) +@pytest.mark.usefixtures("mock_init_component") async def test_empty_response( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test empty response.""" with patch("google.generativeai.GenerativeModel") as mock_model: @@ -422,10 +515,9 @@ async def test_empty_response( ) +@pytest.mark.usefixtures("mock_init_component") async def test_invalid_llm_api( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test handling of invalid llm api.""" hass.config_entries.async_update_entry( @@ -514,10 +606,9 @@ async def test_template_variables( assert "The user id is 12345." in mock_model.mock_calls[0][2]["system_instruction"] +@pytest.mark.usefixtures("mock_init_component") async def test_conversation_agent( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test GoogleGenerativeAIAgent.""" agent = conversation.get_agent_manager(hass).async_get_agent( @@ -539,3 +630,61 @@ async def test_escape_decode() -> None: "param2": "param2's value", "param3": {"param31": "Cheminée", "param32": "Cheminée"}, } + + +@pytest.mark.parametrize( + ("openapi", "protobuf"), + [ + ( + {"type": "string", "enum": ["a", "b", "c"]}, + {"type_": "STRING", "enum": ["a", "b", "c"]}, + ), + ( + {"type": "integer", "enum": [1, 2, 3]}, + {"type_": "STRING", "enum": ["1", "2", "3"]}, + ), + ({"anyOf": [{"type": "integer"}, {"type": "number"}]}, {"type_": "INTEGER"}), + ( + { + "anyOf": [ + {"anyOf": [{"type": "integer"}, {"type": "number"}]}, + {"anyOf": [{"type": "integer"}, {"type": "number"}]}, + ] + }, + {"type_": "INTEGER"}, + ), + ({"type": "string", "format": "lower"}, {"type_": "STRING"}), + ({"type": "boolean", "format": "bool"}, {"type_": "BOOLEAN"}), + ( + {"type": "number", "format": "percent"}, + {"type_": "NUMBER", "format_": "percent"}, + ), + ( + { + "type": "object", + "properties": {"var": {"type": "string"}}, + "required": [], + }, + { + "type_": "OBJECT", + "properties": {"var": {"type_": "STRING"}}, + "required": [], + }, + ), + ( + {"type": "object", "additionalProperties": True}, + { + "type_": "OBJECT", + "properties": {"json": {"type_": "STRING"}}, + "required": [], + }, + ), + ( + {"type": "array", "items": {"type": "string"}}, + {"type_": "ARRAY", "items": {"type_": "STRING"}}, + ), + ], +) +async def test_format_schema(openapi, protobuf) -> None: + """Test _format_schema.""" + assert _format_schema(openapi) == protobuf diff --git a/tests/components/google_generative_ai_conversation/test_init.py b/tests/components/google_generative_ai_conversation/test_init.py index 7afa9b4a31e..4875323d094 100644 --- a/tests/components/google_generative_ai_conversation/test_init.py +++ b/tests/components/google_generative_ai_conversation/test_init.py @@ -3,7 +3,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from google.api_core.exceptions import ClientError, DeadlineExceeded -from google.rpc.error_details_pb2 import ErrorInfo +from google.rpc.error_details_pb2 import ErrorInfo # pylint: disable=no-name-in-module import pytest from syrupy.assertion import SnapshotAssertion @@ -14,11 +14,9 @@ from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry +@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_without_images( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, + hass: HomeAssistant, snapshot: SnapshotAssertion ) -> None: """Test generate content service.""" stubbed_generated_content = ( @@ -46,11 +44,9 @@ async def test_generate_content_service_without_images( assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot +@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_with_image( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, + hass: HomeAssistant, snapshot: SnapshotAssertion ) -> None: """Test generate content service.""" stubbed_generated_content = ( @@ -134,11 +130,9 @@ async def test_generate_content_response_has_empty_parts( ) +@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_with_image_not_allowed_path( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, ) -> None: """Test generate content service with an image in a not allowed path.""" with ( @@ -165,11 +159,9 @@ async def test_generate_content_service_with_image_not_allowed_path( ) +@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_with_image_not_exists( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, ) -> None: """Test generate content service with an image that does not exist.""" with ( @@ -192,12 +184,8 @@ async def test_generate_content_service_with_image_not_exists( ) -async def test_generate_content_service_with_non_image( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, -) -> None: +@pytest.mark.usefixtures("mock_init_component") +async def test_generate_content_service_with_non_image(hass: HomeAssistant) -> None: """Test generate content service with a non image.""" with ( patch("pathlib.Path.exists", return_value=True), @@ -254,5 +242,4 @@ async def test_config_entry_error( assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state == state - mock_config_entry.async_get_active_flows(hass, {"reauth"}) assert any(mock_config_entry.async_get_active_flows(hass, {"reauth"})) == reauth diff --git a/tests/components/google_pubsub/test_init.py b/tests/components/google_pubsub/test_init.py index a793ade5312..fba561f6df1 100644 --- a/tests/components/google_pubsub/test_init.py +++ b/tests/components/google_pubsub/test_init.py @@ -1,9 +1,10 @@ """The tests for the Google Pub/Sub component.""" +from collections.abc import Generator from dataclasses import dataclass from datetime import datetime import os -from unittest import mock +from unittest.mock import MagicMock, Mock, patch import pytest @@ -40,30 +41,30 @@ async def test_nested() -> None: @pytest.fixture(autouse=True, name="mock_client") -def mock_client_fixture(): +def mock_client_fixture() -> Generator[MagicMock]: """Mock the pubsub client.""" - with mock.patch(f"{GOOGLE_PUBSUB_PATH}.PublisherClient") as client: + with patch(f"{GOOGLE_PUBSUB_PATH}.PublisherClient") as client: setattr( client, "from_service_account_json", - mock.MagicMock(return_value=mock.MagicMock()), + MagicMock(return_value=MagicMock()), ) yield client @pytest.fixture(autouse=True, name="mock_is_file") -def mock_is_file_fixture(): +def mock_is_file_fixture() -> Generator[MagicMock]: """Mock os.path.isfile.""" - with mock.patch(f"{GOOGLE_PUBSUB_PATH}.os.path.isfile") as is_file: + with patch(f"{GOOGLE_PUBSUB_PATH}.os.path.isfile") as is_file: is_file.return_value = True yield is_file @pytest.fixture(autouse=True) -def mock_json(hass, monkeypatch): +def mock_json(monkeypatch: pytest.MonkeyPatch) -> None: """Mock the event bus listener and os component.""" monkeypatch.setattr( - f"{GOOGLE_PUBSUB_PATH}.json.dumps", mock.Mock(return_value=mock.MagicMock()) + f"{GOOGLE_PUBSUB_PATH}.json.dumps", Mock(return_value=MagicMock()) ) diff --git a/tests/components/google_sheets/test_config_flow.py b/tests/components/google_sheets/test_config_flow.py index 0da046645d2..a504d8c4280 100644 --- a/tests/components/google_sheets/test_config_flow.py +++ b/tests/components/google_sheets/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Google Sheets config flow.""" +from collections.abc import Generator from unittest.mock import Mock, patch from gspread import GSpreadException import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.application_credentials import ( diff --git a/tests/components/google_sheets/test_init.py b/tests/components/google_sheets/test_init.py index 014e89349e2..700783a2e30 100644 --- a/tests/components/google_sheets/test_init.py +++ b/tests/components/google_sheets/test_init.py @@ -214,6 +214,32 @@ async def test_append_sheet( assert len(mock_client.mock_calls) == 8 +async def test_append_sheet_multiple_rows( + hass: HomeAssistant, + setup_integration: ComponentSetup, + config_entry: MockConfigEntry, +) -> None: + """Test service call appending to a sheet.""" + await setup_integration() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].state is ConfigEntryState.LOADED + + with patch("homeassistant.components.google_sheets.Client") as mock_client: + await hass.services.async_call( + DOMAIN, + "append_sheet", + { + "config_entry": config_entry.entry_id, + "worksheet": "Sheet1", + "data": [{"foo": "bar"}, {"foo": "bar2"}], + }, + blocking=True, + ) + assert len(mock_client.mock_calls) == 8 + + async def test_append_sheet_api_error( hass: HomeAssistant, setup_integration: ComponentSetup, diff --git a/tests/components/google_tasks/snapshots/test_todo.ambr b/tests/components/google_tasks/snapshots/test_todo.ambr index af8dec6a182..76611ba4a31 100644 --- a/tests/components/google_tasks/snapshots/test_todo.ambr +++ b/tests/components/google_tasks/snapshots/test_todo.ambr @@ -79,9 +79,6 @@ }), ]) # --- -# name: test_move_todo_item[api_responses0].4 - None -# --- # name: test_parent_child_ordering[api_responses0] list([ dict({ diff --git a/tests/components/google_tasks/test_config_flow.py b/tests/components/google_tasks/test_config_flow.py index f2655afd602..f8ccc5e048f 100644 --- a/tests/components/google_tasks/test_config_flow.py +++ b/tests/components/google_tasks/test_config_flow.py @@ -1,11 +1,11 @@ """Test the Google Tasks config flow.""" +from collections.abc import Generator from unittest.mock import Mock, patch from googleapiclient.errors import HttpError from httplib2 import Response import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.google_tasks.const import ( diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index afbaabe5cd0..b0ee135d4a9 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -10,8 +10,16 @@ from httplib2 import Response import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -376,8 +384,8 @@ async def test_task_items_error_response( ("api_responses", "item_data"), [ (CREATE_API_RESPONSES, {}), - (CREATE_API_RESPONSES, {"due_date": "2023-11-18"}), - (CREATE_API_RESPONSES, {"description": "6-pack"}), + (CREATE_API_RESPONSES, {ATTR_DUE_DATE: "2023-11-18"}), + (CREATE_API_RESPONSES, {ATTR_DESCRIPTION: "6-pack"}), ], ids=["summary", "due", "description"], ) @@ -399,9 +407,9 @@ async def test_create_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda", **item_data}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda", **item_data}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -439,9 +447,9 @@ async def test_create_todo_list_item_error( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -464,9 +472,9 @@ async def test_update_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", "rename": "Soda", "status": "completed"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_RENAME: "Soda", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -504,9 +512,9 @@ async def test_update_todo_list_item_error( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", "rename": "Soda", "status": "completed"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_RENAME: "Soda", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -514,12 +522,12 @@ async def test_update_todo_list_item_error( @pytest.mark.parametrize( ("api_responses", "item_data"), [ - (UPDATE_API_RESPONSES, {"rename": "Soda"}), - (UPDATE_API_RESPONSES, {"due_date": "2023-11-18"}), - (UPDATE_API_RESPONSES, {"due_date": None}), - (UPDATE_API_RESPONSES, {"description": "At least one gallon"}), - (UPDATE_API_RESPONSES, {"description": ""}), - (UPDATE_API_RESPONSES, {"description": None}), + (UPDATE_API_RESPONSES, {ATTR_RENAME: "Soda"}), + (UPDATE_API_RESPONSES, {ATTR_DUE_DATE: "2023-11-18"}), + (UPDATE_API_RESPONSES, {ATTR_DUE_DATE: None}), + (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: "At least one gallon"}), + (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: ""}), + (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: None}), ], ids=( "rename", @@ -548,9 +556,9 @@ async def test_partial_update( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", **item_data}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", **item_data}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -578,9 +586,9 @@ async def test_partial_update_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", "status": "needs_action"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -622,9 +630,9 @@ async def test_delete_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -670,9 +678,9 @@ async def test_delete_partial_failure( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -711,9 +719,9 @@ async def test_delete_invalid_json_response( with pytest.raises(HomeAssistantError, match="unexpected response"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -750,9 +758,9 @@ async def test_delete_server_error( with pytest.raises(HomeAssistantError, match="responded with error"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -942,9 +950,9 @@ async def test_susbcribe( # Rename item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": uid, "rename": "Milk"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_RENAME: "Milk"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) diff --git a/tests/components/google_translate/conftest.py b/tests/components/google_translate/conftest.py index 82f8d50b83c..aa84c201f0e 100644 --- a/tests/components/google_translate/conftest.py +++ b/tests/components/google_translate/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Google Translate text-to-speech tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/google_translate/test_tts.py b/tests/components/google_translate/test_tts.py index d19b1269438..95313df6140 100644 --- a/tests/components/google_translate/test_tts.py +++ b/tests/components/google_translate/test_tts.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from http import HTTPStatus from pathlib import Path from typing import Any @@ -9,21 +10,16 @@ from unittest.mock import MagicMock, patch from gtts import gTTSError import pytest -from typing_extensions import Generator from homeassistant.components import tts from homeassistant.components.google_translate.const import CONF_TLD, DOMAIN -from homeassistant.components.media_player import ( - ATTR_MEDIA_CONTENT_ID, - DOMAIN as DOMAIN_MP, - SERVICE_PLAY_MEDIA, -) +from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_mock_service +from tests.common import MockConfigEntry from tests.components.tts.common import retrieve_media from tests.typing import ClientSessionGenerator @@ -34,15 +30,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Mock media player calls.""" - return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) @pytest.fixture(autouse=True) @@ -126,7 +115,7 @@ async def test_tts_service( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -139,9 +128,11 @@ async def test_tts_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -181,7 +172,7 @@ async def test_service_say_german_config( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -194,9 +185,11 @@ async def test_service_say_german_config( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -236,7 +229,7 @@ async def test_service_say_german_service( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -249,9 +242,11 @@ async def test_service_say_german_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -290,7 +285,7 @@ async def test_service_say_en_uk_config( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -303,9 +298,11 @@ async def test_service_say_en_uk_config( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -345,7 +342,7 @@ async def test_service_say_en_uk_service( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -358,9 +355,11 @@ async def test_service_say_en_uk_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -400,7 +399,7 @@ async def test_service_say_en_couk( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -413,9 +412,11 @@ async def test_service_say_en_couk( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -454,7 +455,7 @@ async def test_service_say_error( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -469,9 +470,11 @@ async def test_service_say_error( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.NOT_FOUND ) assert len(mock_gtts.mock_calls) == 2 diff --git a/tests/components/google_travel_time/conftest.py b/tests/components/google_travel_time/conftest.py index 141b40eff29..7d1e4791eee 100644 --- a/tests/components/google_travel_time/conftest.py +++ b/tests/components/google_travel_time/conftest.py @@ -1,17 +1,22 @@ """Fixtures for Google Time Travel tests.""" -from unittest.mock import patch +from collections.abc import Generator +from typing import Any +from unittest.mock import MagicMock, patch from googlemaps.exceptions import ApiError, Timeout, TransportError import pytest from homeassistant.components.google_travel_time.const import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture(name="mock_config") -async def mock_config_fixture(hass, data, options): +async def mock_config_fixture( + hass: HomeAssistant, data: dict[str, Any], options: dict[str, Any] +) -> MockConfigEntry: """Mock a Google Travel Time config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -26,7 +31,7 @@ async def mock_config_fixture(hass, data, options): @pytest.fixture(name="bypass_setup") -def bypass_setup_fixture(): +def bypass_setup_fixture() -> Generator[None]: """Bypass entry setup.""" with patch( "homeassistant.components.google_travel_time.async_setup_entry", @@ -36,7 +41,7 @@ def bypass_setup_fixture(): @pytest.fixture(name="bypass_platform_setup") -def bypass_platform_setup_fixture(): +def bypass_platform_setup_fixture() -> Generator[None]: """Bypass platform setup.""" with patch( "homeassistant.components.google_travel_time.sensor.async_setup_entry", @@ -46,7 +51,7 @@ def bypass_platform_setup_fixture(): @pytest.fixture(name="validate_config_entry") -def validate_config_entry_fixture(): +def validate_config_entry_fixture() -> Generator[MagicMock]: """Return valid config entry.""" with ( patch("homeassistant.components.google_travel_time.helpers.Client"), @@ -59,24 +64,24 @@ def validate_config_entry_fixture(): @pytest.fixture(name="invalidate_config_entry") -def invalidate_config_entry_fixture(validate_config_entry): +def invalidate_config_entry_fixture(validate_config_entry: MagicMock) -> None: """Return invalid config entry.""" validate_config_entry.side_effect = ApiError("test") @pytest.fixture(name="invalid_api_key") -def invalid_api_key_fixture(validate_config_entry): +def invalid_api_key_fixture(validate_config_entry: MagicMock) -> None: """Throw a REQUEST_DENIED ApiError.""" validate_config_entry.side_effect = ApiError("REQUEST_DENIED", "Invalid API key.") @pytest.fixture(name="timeout") -def timeout_fixture(validate_config_entry): +def timeout_fixture(validate_config_entry: MagicMock) -> None: """Throw a Timeout exception.""" validate_config_entry.side_effect = Timeout() @pytest.fixture(name="transport_error") -def transport_error_fixture(validate_config_entry): +def transport_error_fixture(validate_config_entry: MagicMock) -> None: """Throw a TransportError exception.""" validate_config_entry.side_effect = TransportError("Unknown.") diff --git a/tests/components/google_travel_time/test_config_flow.py b/tests/components/google_travel_time/test_config_flow.py index 270b82272d8..d16d1c1ffc9 100644 --- a/tests/components/google_travel_time/test_config_flow.py +++ b/tests/components/google_travel_time/test_config_flow.py @@ -29,6 +29,8 @@ from homeassistant.data_entry_flow import FlowResultType from .const import MOCK_CONFIG, RECONFIGURE_CONFIG +from tests.common import MockConfigEntry + async def assert_common_reconfigure_steps( hass: HomeAssistant, reconfigure_result: config_entries.ConfigFlowResult @@ -194,7 +196,7 @@ async def test_malformed_api_key(hass: HomeAssistant) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry", "bypass_setup") -async def test_reconfigure(hass: HomeAssistant, mock_config) -> None: +async def test_reconfigure(hass: HomeAssistant, mock_config: MockConfigEntry) -> None: """Test reconfigure flow.""" reconfigure_result = await hass.config_entries.flow.async_init( DOMAIN, @@ -223,7 +225,7 @@ async def test_reconfigure(hass: HomeAssistant, mock_config) -> None: ) @pytest.mark.usefixtures("invalidate_config_entry") async def test_reconfigure_invalid_config_entry( - hass: HomeAssistant, mock_config + hass: HomeAssistant, mock_config: MockConfigEntry ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -259,7 +261,9 @@ async def test_reconfigure_invalid_config_entry( ], ) @pytest.mark.usefixtures("invalid_api_key") -async def test_reconfigure_invalid_api_key(hass: HomeAssistant, mock_config) -> None: +async def test_reconfigure_invalid_api_key( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -293,7 +297,9 @@ async def test_reconfigure_invalid_api_key(hass: HomeAssistant, mock_config) -> ], ) @pytest.mark.usefixtures("transport_error") -async def test_reconfigure_transport_error(hass: HomeAssistant, mock_config) -> None: +async def test_reconfigure_transport_error( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -327,7 +333,9 @@ async def test_reconfigure_transport_error(hass: HomeAssistant, mock_config) -> ], ) @pytest.mark.usefixtures("timeout") -async def test_reconfigure_timeout(hass: HomeAssistant, mock_config) -> None: +async def test_reconfigure_timeout( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -361,7 +369,7 @@ async def test_reconfigure_timeout(hass: HomeAssistant, mock_config) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_options_flow(hass: HomeAssistant, mock_config) -> None: +async def test_options_flow(hass: HomeAssistant, mock_config: MockConfigEntry) -> None: """Test options flow.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -422,7 +430,9 @@ async def test_options_flow(hass: HomeAssistant, mock_config) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_options_flow_departure_time(hass: HomeAssistant, mock_config) -> None: +async def test_options_flow_departure_time( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test options flow with departure time.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -492,7 +502,9 @@ async def test_options_flow_departure_time(hass: HomeAssistant, mock_config) -> ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_reset_departure_time(hass: HomeAssistant, mock_config) -> None: +async def test_reset_departure_time( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test resetting departure time.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -538,7 +550,9 @@ async def test_reset_departure_time(hass: HomeAssistant, mock_config) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_reset_arrival_time(hass: HomeAssistant, mock_config) -> None: +async def test_reset_arrival_time( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test resetting arrival time.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -582,7 +596,9 @@ async def test_reset_arrival_time(hass: HomeAssistant, mock_config) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_reset_options_flow_fields(hass: HomeAssistant, mock_config) -> None: +async def test_reset_options_flow_fields( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test resetting options flow fields that are not time related to None.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None diff --git a/tests/components/google_travel_time/test_sensor.py b/tests/components/google_travel_time/test_sensor.py index 57f3d7a0b98..5ac9ecad482 100644 --- a/tests/components/google_travel_time/test_sensor.py +++ b/tests/components/google_travel_time/test_sensor.py @@ -1,6 +1,7 @@ """Test the Google Maps Travel Time sensors.""" -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import MagicMock, patch import pytest @@ -25,7 +26,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="mock_update") -def mock_update_fixture(): +def mock_update_fixture() -> Generator[MagicMock]: """Mock an update to the sensor.""" with ( patch("homeassistant.components.google_travel_time.sensor.Client"), @@ -56,7 +57,7 @@ def mock_update_fixture(): @pytest.fixture(name="mock_update_duration") -def mock_update_duration_fixture(mock_update): +def mock_update_duration_fixture(mock_update: MagicMock) -> MagicMock: """Mock an update to the sensor returning no duration_in_traffic.""" mock_update.return_value = { "rows": [ @@ -77,7 +78,7 @@ def mock_update_duration_fixture(mock_update): @pytest.fixture(name="mock_update_empty") -def mock_update_empty_fixture(mock_update): +def mock_update_empty_fixture(mock_update: MagicMock) -> MagicMock: """Mock an update to the sensor with an empty response.""" mock_update.return_value = None return mock_update diff --git a/tests/components/govee_ble/__init__.py b/tests/components/govee_ble/__init__.py index 60930d1dd0e..66c5b0b832c 100644 --- a/tests/components/govee_ble/__init__.py +++ b/tests/components/govee_ble/__init__.py @@ -83,3 +83,136 @@ GVH5106_SERVICE_INFO = BluetoothServiceInfo( service_data={}, source="local", ) + + +GV5125_BUTTON_0_SERVICE_INFO = BluetoothServiceInfo( + name="GV51255367", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 60552: b"\x01\n.\xaf\xd9085Sg\x01\x01", + 61320: b".\xaf\x00\x00b\\\xae\x92\x15\xb6\xa8\n\xd4\x81K\xcaK_s\xd9E40\x02", + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + +GV5125_BUTTON_1_SERVICE_INFO = BluetoothServiceInfo( + name="GV51255367", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 60552: b"\x01\n.\xaf\xd9085Sg\x01\x01", + 61320: b".\xaf\x00\x00\xfb\x0e\xc9h\xd7\x05l\xaf*\xf3\x1b\xe8w\xf1\xe1\xe8\xe3\xa7\xf8\xc6", + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GV5121_MOTION_SERVICE_INFO = BluetoothServiceInfo( + name="GV5121195A", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 61320: b"Y\x94\x00\x00\xf0\xb9\x197\xaeP\xb67,\x86j\xc2\xf3\xd0a\xe7\x17\xc0,\xef" + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GV5121_MOTION_SERVICE_INFO_2 = BluetoothServiceInfo( + name="GV5121195A", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 61320: b"Y\x94\x00\x06\xa3f6e\xc8\xe6\xfdv\x04\xaf\xe7k\xbf\xab\xeb\xbf\xb3\xa3\xd5\x19" + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GV5123_OPEN_SERVICE_INFO = BluetoothServiceInfo( + name="GV51230B3D", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 61320: b"=\xec\x00\x00\xdeCw\xd5^U\xf9\x91In6\xbd\xc6\x7f\x8b,'\x06t\x97" + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GV5123_CLOSED_SERVICE_INFO = BluetoothServiceInfo( + name="GV51230B3D", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 61320: b"=\xec\x00\x01Y\xdbk\xd9\xbe\xd7\xaf\xf7*&\xaaK\xd7-\xfa\x94W>[\xe9" + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GVH5124_SERVICE_INFO = BluetoothServiceInfo( + name="GV51242F68", + address="D3:32:39:37:2F:68", + rssi=-67, + manufacturer_data={ + 61320: b"\x08\xa2\x00\x01%\xc2YW\xfdzu\x0e\xf24\xa2\x18\xbb\x15F|[s{\x04" + }, + service_data={}, + service_uuids=[], + source="local", +) + +GVH5124_2_SERVICE_INFO = BluetoothServiceInfo( + name="GV51242F68", + address="D3:32:39:37:2F:68", + rssi=-67, + manufacturer_data={ + 61320: b"\x08\xa2\x00\x13^Sso\xaeC\x9aU\xcf\xd8\x02\x1b\xdf\xd5\xded;+\xd6\x13" + }, + service_data={}, + service_uuids=[], + source="local", +) + + +GVH5127_MOTION_SERVICE_INFO = BluetoothServiceInfo( + name="GVH51275E3F", + address="D0:C9:07:1B:5E:3F", + rssi=-61, + manufacturer_data={34819: b"\xec\x00\x01\x01\x01\x11"}, + service_data={}, + service_uuids=[], + source="Core Bluetooth", +) +GVH5127_PRESENT_SERVICE_INFO = BluetoothServiceInfo( + name="GVH51275E3F", + address="D0:C9:07:1B:5E:3F", + rssi=-60, + manufacturer_data={34819: b"\xec\x00\x01\x01\x01\x01"}, + service_data={}, + service_uuids=[], + source="Core Bluetooth", +) +GVH5127_ABSENT_SERVICE_INFO = BluetoothServiceInfo( + name="GVH51275E3F", + address="D0:C9:07:1B:5E:3F", + rssi=-53, + manufacturer_data={34819: b"\xec\x00\x01\x01\x00\x00"}, + service_data={}, + service_uuids=[], + source="Core Bluetooth", +) diff --git a/tests/components/govee_ble/test_binary_sensor.py b/tests/components/govee_ble/test_binary_sensor.py new file mode 100644 index 00000000000..cf8b54ef54f --- /dev/null +++ b/tests/components/govee_ble/test_binary_sensor.py @@ -0,0 +1,84 @@ +"""Test the Govee BLE binary_sensor.""" + +from homeassistant.components.govee_ble.const import CONF_DEVICE_TYPE, DOMAIN +from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant + +from . import ( + GV5123_CLOSED_SERVICE_INFO, + GV5123_OPEN_SERVICE_INFO, + GVH5127_ABSENT_SERVICE_INFO, + GVH5127_MOTION_SERVICE_INFO, + GVH5127_PRESENT_SERVICE_INFO, +) + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + + +async def test_window_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the window sensor.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GV5123_OPEN_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5123"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 + inject_bluetooth_service_info(hass, GV5123_OPEN_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 + + motion_sensor = hass.states.get("binary_sensor.51230f45_window") + assert motion_sensor.state == STATE_ON + + inject_bluetooth_service_info(hass, GV5123_CLOSED_SERVICE_INFO) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("binary_sensor.51230f45_window") + assert motion_sensor.state == STATE_OFF + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + +async def test_presence_sensor(hass: HomeAssistant) -> None: + """Test the presence sensor.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GVH5127_ABSENT_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5127"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 + inject_bluetooth_service_info(hass, GVH5127_ABSENT_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 + + motion_sensor = hass.states.get("binary_sensor.h51275e3f_motion") + assert motion_sensor.state == STATE_OFF + occupancy_sensor = hass.states.get("binary_sensor.h51275e3f_occupancy") + assert occupancy_sensor.state == STATE_OFF + + inject_bluetooth_service_info(hass, GVH5127_PRESENT_SERVICE_INFO) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("binary_sensor.h51275e3f_motion") + assert motion_sensor.state == STATE_OFF + occupancy_sensor = hass.states.get("binary_sensor.h51275e3f_occupancy") + assert occupancy_sensor.state == STATE_ON + + inject_bluetooth_service_info(hass, GVH5127_MOTION_SERVICE_INFO) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("binary_sensor.h51275e3f_motion") + assert motion_sensor.state == STATE_ON + occupancy_sensor = hass.states.get("binary_sensor.h51275e3f_occupancy") + assert occupancy_sensor.state == STATE_ON diff --git a/tests/components/govee_ble/test_config_flow.py b/tests/components/govee_ble/test_config_flow.py index 0c340c01f2a..eb0719f832c 100644 --- a/tests/components/govee_ble/test_config_flow.py +++ b/tests/components/govee_ble/test_config_flow.py @@ -3,7 +3,7 @@ from unittest.mock import patch from homeassistant import config_entries -from homeassistant.components.govee_ble.const import DOMAIN +from homeassistant.components.govee_ble.const import CONF_DEVICE_TYPE, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -29,7 +29,7 @@ async def test_async_step_bluetooth_valid_device(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "H5075 2762" - assert result2["data"] == {} + assert result2["data"] == {CONF_DEVICE_TYPE: "H5075"} assert result2["result"].unique_id == "61DE521B-F0BF-9F44-64D4-75BBE1738105" @@ -75,7 +75,7 @@ async def test_async_step_user_with_found_devices(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "H5177 2EC8" - assert result2["data"] == {} + assert result2["data"] == {CONF_DEVICE_TYPE: "H5177"} assert result2["result"].unique_id == "4125DDBA-2774-4851-9889-6AADDD4CAC3D" @@ -198,7 +198,7 @@ async def test_async_step_user_takes_precedence_over_discovery( ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "H5177 2EC8" - assert result2["data"] == {} + assert result2["data"] == {CONF_DEVICE_TYPE: "H5177"} assert result2["result"].unique_id == "4125DDBA-2774-4851-9889-6AADDD4CAC3D" # Verify the original one was aborted diff --git a/tests/components/govee_ble/test_event.py b/tests/components/govee_ble/test_event.py new file mode 100644 index 00000000000..c41cdad3c89 --- /dev/null +++ b/tests/components/govee_ble/test_event.py @@ -0,0 +1,108 @@ +"""Test the Govee BLE events.""" + +from homeassistant.components.govee_ble.const import CONF_DEVICE_TYPE, DOMAIN +from homeassistant.const import STATE_UNKNOWN +from homeassistant.core import HomeAssistant + +from . import ( + GV5121_MOTION_SERVICE_INFO, + GV5121_MOTION_SERVICE_INFO_2, + GV5125_BUTTON_0_SERVICE_INFO, + GV5125_BUTTON_1_SERVICE_INFO, + GVH5124_2_SERVICE_INFO, + GVH5124_SERVICE_INFO, +) + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + + +async def test_motion_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the motion sensor.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GV5121_MOTION_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5121"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 1 + inject_bluetooth_service_info(hass, GV5121_MOTION_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 + + motion_sensor = hass.states.get("event.h5121_motion") + first_time = motion_sensor.state + assert motion_sensor.state != STATE_UNKNOWN + + inject_bluetooth_service_info(hass, GV5121_MOTION_SERVICE_INFO_2) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("event.h5121_motion") + assert motion_sensor.state != first_time + assert motion_sensor.state != STATE_UNKNOWN + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + +async def test_button(hass: HomeAssistant) -> None: + """Test setting up creates the buttons.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GV5125_BUTTON_1_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5125"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 6 + inject_bluetooth_service_info(hass, GV5125_BUTTON_1_SERVICE_INFO) + await hass.async_block_till_done() + + button_1 = hass.states.get("event.h5125_button_1") + assert button_1.state == STATE_UNKNOWN + + inject_bluetooth_service_info(hass, GV5125_BUTTON_0_SERVICE_INFO) + await hass.async_block_till_done() + button_1 = hass.states.get("event.h5125_button_1") + assert button_1.state != STATE_UNKNOWN + assert len(hass.states.async_all()) == 7 + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + +async def test_vibration_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the vibration sensor.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GVH5124_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5124"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 1 + inject_bluetooth_service_info(hass, GVH5124_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 + + motion_sensor = hass.states.get("event.h5124_vibration") + first_time = motion_sensor.state + assert motion_sensor.state != STATE_UNKNOWN + + inject_bluetooth_service_info(hass, GVH5124_2_SERVICE_INFO) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("event.h5124_vibration") + assert motion_sensor.state != first_time + assert motion_sensor.state != STATE_UNKNOWN + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/govee_light_local/conftest.py b/tests/components/govee_light_local/conftest.py index 90a9f8e6827..6a8ee99b764 100644 --- a/tests/components/govee_light_local/conftest.py +++ b/tests/components/govee_light_local/conftest.py @@ -1,11 +1,11 @@ """Tests configuration for Govee Local API.""" from asyncio import Event +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from govee_local_api import GoveeLightCapability import pytest -from typing_extensions import Generator from homeassistant.components.govee_light_local.coordinator import GoveeController diff --git a/tests/components/gpsd/conftest.py b/tests/components/gpsd/conftest.py index c323365e8fd..c15ef7f0258 100644 --- a/tests/components/gpsd/conftest.py +++ b/tests/components/gpsd/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the GPSD tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/gpsd/test_config_flow.py b/tests/components/gpsd/test_config_flow.py index 6f330571076..4d832e120e4 100644 --- a/tests/components/gpsd/test_config_flow.py +++ b/tests/components/gpsd/test_config_flow.py @@ -6,7 +6,7 @@ from gps3.agps3threaded import GPSD_PORT as DEFAULT_PORT from homeassistant import config_entries from homeassistant.components.gpsd.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -43,10 +43,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: async def test_connection_error(hass: HomeAssistant) -> None: """Test connection to host error.""" - with patch("socket.socket") as mock_socket: - mock_connect = mock_socket.return_value.connect - mock_connect.side_effect = OSError - + with patch("socket.socket", side_effect=OSError): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, @@ -55,23 +52,3 @@ async def test_connection_error(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" - - -async def test_import(hass: HomeAssistant) -> None: - """Test import step.""" - with patch("homeassistant.components.gpsd.config_flow.socket") as mock_socket: - mock_connect = mock_socket.return_value.connect - mock_connect.return_value = None - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST, CONF_PORT: 1234, CONF_NAME: "MyGPS"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "MyGPS" - assert result["data"] == { - CONF_HOST: HOST, - CONF_NAME: "MyGPS", - CONF_PORT: 1234, - } diff --git a/tests/components/gpslogger/test_init.py b/tests/components/gpslogger/test_init.py index 68b95df1702..fab6aaa4e84 100644 --- a/tests/components/gpslogger/test_init.py +++ b/tests/components/gpslogger/test_init.py @@ -45,7 +45,7 @@ async def gpslogger_client( @pytest.fixture(autouse=True) -async def setup_zones(hass): +async def setup_zones(hass: HomeAssistant) -> None: """Set up Zone config in HA.""" assert await async_setup_component( hass, @@ -63,7 +63,7 @@ async def setup_zones(hass): @pytest.fixture -async def webhook_id(hass, gpslogger_client): +async def webhook_id(hass: HomeAssistant, gpslogger_client: TestClient) -> str: """Initialize the GPSLogger component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -81,7 +81,9 @@ async def webhook_id(hass, gpslogger_client): return result["result"].data["webhook_id"] -async def test_missing_data(hass: HomeAssistant, gpslogger_client, webhook_id) -> None: +async def test_missing_data( + hass: HomeAssistant, gpslogger_client: TestClient, webhook_id: str +) -> None: """Test missing data.""" url = f"/api/webhook/{webhook_id}" @@ -111,8 +113,8 @@ async def test_enter_and_exit( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - gpslogger_client, - webhook_id, + gpslogger_client: TestClient, + webhook_id: str, ) -> None: """Test when there is a known zone.""" url = f"/api/webhook/{webhook_id}" @@ -148,7 +150,7 @@ async def test_enter_and_exit( async def test_enter_with_attrs( - hass: HomeAssistant, gpslogger_client, webhook_id + hass: HomeAssistant, gpslogger_client: TestClient, webhook_id: str ) -> None: """Test when additional attributes are present.""" url = f"/api/webhook/{webhook_id}" @@ -210,7 +212,7 @@ async def test_enter_with_attrs( reason="The device_tracker component does not support unloading yet." ) async def test_load_unload_entry( - hass: HomeAssistant, gpslogger_client, webhook_id + hass: HomeAssistant, gpslogger_client: TestClient, webhook_id: str ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" diff --git a/tests/components/gree/conftest.py b/tests/components/gree/conftest.py index 88bcaea33c2..a9e2fc9e5d4 100644 --- a/tests/components/gree/conftest.py +++ b/tests/components/gree/conftest.py @@ -1,9 +1,9 @@ """Pytest module configuration.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from .common import FakeDiscovery, build_device_mock diff --git a/tests/components/gree/test_bridge.py b/tests/components/gree/test_bridge.py index 37b0b0dc15e..32372bebf37 100644 --- a/tests/components/gree/test_bridge.py +++ b/tests/components/gree/test_bridge.py @@ -5,8 +5,12 @@ from datetime import timedelta from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.climate import DOMAIN -from homeassistant.components.gree.const import COORDINATORS, DOMAIN as GREE +from homeassistant.components.climate import DOMAIN, HVACMode +from homeassistant.components.gree.const import ( + COORDINATORS, + DOMAIN as GREE, + UPDATE_INTERVAL, +) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util @@ -69,3 +73,30 @@ async def test_discovery_after_setup( device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.2" assert device_infos[1].ip == "2.2.2.1" + + +async def test_coordinator_updates( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device +) -> None: + """Test gree devices update their state.""" + await async_setup_gree(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all(DOMAIN)) == 1 + + callback = device().add_handler.call_args_list[0][0][1] + + async def fake_update_state(*args) -> None: + """Fake update state.""" + device().power = True + callback() + + device().update_state.side_effect = fake_update_state + + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID_1) + assert state is not None + assert state.state != HVACMode.OFF diff --git a/tests/components/gree/test_climate.py b/tests/components/gree/test_climate.py index 0bd767e4f35..1bf49bbca26 100644 --- a/tests/components/gree/test_climate.py +++ b/tests/components/gree/test_climate.py @@ -4,13 +4,19 @@ from datetime import timedelta from unittest.mock import DEFAULT as DEFAULT_MOCK, AsyncMock, patch from freezegun.api import FrozenDateTimeFactory -from greeclimate.device import HorizontalSwing, VerticalSwing +from greeclimate.device import ( + TEMP_MAX, + TEMP_MAX_F, + TEMP_MIN, + TEMP_MIN_F, + HorizontalSwing, + VerticalSwing, +) from greeclimate.exceptions import DeviceNotBoundError, DeviceTimeoutError import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.climate import ( - ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, @@ -40,11 +46,18 @@ from homeassistant.components.gree.climate import ( FAN_MODES_REVERSE, HVAC_MODES, HVAC_MODES_REVERSE, + GreeClimateEntity, +) +from homeassistant.components.gree.const import ( + DISCOVERY_SCAN_INTERVAL, + FAN_MEDIUM_HIGH, + FAN_MEDIUM_LOW, + UPDATE_INTERVAL, ) -from homeassistant.components.gree.const import FAN_MEDIUM_HIGH, FAN_MEDIUM_LOW from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, + ATTR_UNIT_OF_MEASUREMENT, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_UNAVAILABLE, @@ -53,7 +66,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er -import homeassistant.util.dt as dt_util from .common import async_setup_gree, build_device_mock @@ -62,12 +74,6 @@ from tests.common import async_fire_time_changed ENTITY_ID = f"{DOMAIN}.fake_device_1" -@pytest.fixture -def mock_now(): - """Fixture for dtutil.now.""" - return dt_util.utcnow() - - async def test_discovery_called_once(hass: HomeAssistant, discovery, device) -> None: """Test discovery is only ever called once.""" await async_setup_gree(hass) @@ -96,7 +102,7 @@ async def test_discovery_setup(hass: HomeAssistant, discovery, device) -> None: async def test_discovery_setup_connection_error( - hass: HomeAssistant, discovery, device, mock_now + hass: HomeAssistant, discovery, device ) -> None: """Test gree integration is setup.""" MockDevice1 = build_device_mock( @@ -118,7 +124,7 @@ async def test_discovery_setup_connection_error( async def test_discovery_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices don't change after multiple discoveries.""" MockDevice1 = build_device_mock( @@ -134,8 +140,7 @@ async def test_discovery_after_setup( discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - await async_setup_gree(hass) - await hass.async_block_till_done() + await async_setup_gree(hass) # Update 1 assert discovery.return_value.scan_count == 1 assert len(hass.states.async_all(DOMAIN)) == 2 @@ -144,9 +149,8 @@ async def test_discovery_after_setup( discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - next_update = mock_now + timedelta(minutes=6) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 @@ -154,7 +158,7 @@ async def test_discovery_after_setup( async def test_discovery_add_device_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices can be added after initial setup.""" MockDevice1 = build_device_mock( @@ -170,6 +174,8 @@ async def test_discovery_add_device_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.side_effect = [MockDevice1] + await async_setup_gree(hass) # Update 1 + await async_setup_gree(hass) await hass.async_block_till_done() @@ -180,9 +186,8 @@ async def test_discovery_add_device_after_setup( discovery.return_value.mock_devices = [MockDevice2] device.side_effect = [MockDevice2] - next_update = mock_now + timedelta(minutes=6) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 @@ -190,7 +195,7 @@ async def test_discovery_add_device_after_setup( async def test_discovery_device_bind_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices can be added after a late device bind.""" MockDevice1 = build_device_mock( @@ -202,8 +207,7 @@ async def test_discovery_device_bind_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.return_value = MockDevice1 - await async_setup_gree(hass) - await hass.async_block_till_done() + await async_setup_gree(hass) # Update 1 assert len(hass.states.async_all(DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) @@ -214,9 +218,8 @@ async def test_discovery_device_bind_after_setup( MockDevice1.bind.side_effect = None MockDevice1.update_state.side_effect = None - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -224,7 +227,7 @@ async def test_discovery_device_bind_after_setup( async def test_update_connection_failure( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection failure exception.""" device().update_state.side_effect = [ @@ -233,36 +236,32 @@ async def test_update_connection_failure( DeviceTimeoutError, ] - await async_setup_gree(hass) + await async_setup_gree(hass) # Update 1 + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) await hass.async_block_till_done() - # First update to make the device available + # Update 2 + await run_update() state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + # Update 3 + await run_update() - next_update = mock_now + timedelta(minutes=15) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - - # Then two more update failures to make the device unavailable + # Update 4 + await run_update() state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE -async def test_update_connection_failure_recovery( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now +async def test_update_connection_send_failure_recovery( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection failure recovery.""" device().update_state.side_effect = [ @@ -271,31 +270,27 @@ async def test_update_connection_failure_recovery( DEFAULT_MOCK, ] - await async_setup_gree(hass) + await async_setup_gree(hass) # Update 1 + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) - # First update becomes unavailable - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) await hass.async_block_till_done() + await run_update() # Update 2 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE - # Second update restores the connection - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - + await run_update() # Update 3 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE async def test_update_unhandled_exception( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection unhandled response exception.""" device().update_state.side_effect = [DEFAULT_MOCK, Exception] @@ -306,9 +301,8 @@ async def test_update_unhandled_exception( assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -317,15 +311,13 @@ async def test_update_unhandled_exception( async def test_send_command_device_timeout( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test for sending power on command to the device with a device timeout.""" await async_setup_gree(hass) - # First update to make the device available - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -347,7 +339,40 @@ async def test_send_command_device_timeout( assert state.state != STATE_UNAVAILABLE -async def test_send_power_on(hass: HomeAssistant, discovery, device, mock_now) -> None: +async def test_unresponsive_device( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device +) -> None: + """Test for unresponsive device.""" + await async_setup_gree(hass) + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Update 2 + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state != STATE_UNAVAILABLE + + # Update 3, 4, 5 + await run_update() + await run_update() + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state == STATE_UNAVAILABLE + + # Receiving update from device will reset the state to available again + device().device_state_updated("test") + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state != STATE_UNAVAILABLE + + +async def test_send_power_on(hass: HomeAssistant, discovery, device) -> None: """Test for sending power on command to the device.""" await async_setup_gree(hass) @@ -364,7 +389,7 @@ async def test_send_power_on(hass: HomeAssistant, discovery, device, mock_now) - async def test_send_power_off_device_timeout( - hass: HomeAssistant, discovery, device, mock_now + hass: HomeAssistant, discovery, device ) -> None: """Test for sending power off command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -385,7 +410,7 @@ async def test_send_power_off_device_timeout( @pytest.mark.parametrize( ("units", "temperature"), - [(UnitOfTemperature.CELSIUS, 26), (UnitOfTemperature.FAHRENHEIT, 74)], + [(UnitOfTemperature.CELSIUS, 26), (UnitOfTemperature.FAHRENHEIT, 73)], ) async def test_send_target_temperature( hass: HomeAssistant, discovery, device, units, temperature @@ -405,6 +430,14 @@ async def test_send_target_temperature( # Make sure we're trying to test something that isn't the default assert fake_device.current_temperature != temperature + hass.states.async_set( + ENTITY_ID, + "off", + { + ATTR_UNIT_OF_MEASUREMENT: units, + }, + ) + await hass.services.async_call( DOMAIN, SERVICE_SET_TEMPERATURE, @@ -415,10 +448,6 @@ async def test_send_target_temperature( state = hass.states.get(ENTITY_ID) assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature - assert ( - state.attributes.get(ATTR_CURRENT_TEMPERATURE) - == fake_device.current_temperature - ) assert state.state == HVAC_MODES.get(fake_device.mode) # Reset config temperature_unit back to CELSIUS, required for @@ -462,7 +491,11 @@ async def test_send_target_temperature_with_hvac_mode( @pytest.mark.parametrize( ("units", "temperature"), - [(UnitOfTemperature.CELSIUS, 25), (UnitOfTemperature.FAHRENHEIT, 74)], + [ + (UnitOfTemperature.CELSIUS, 25), + (UnitOfTemperature.FAHRENHEIT, 73), + (UnitOfTemperature.FAHRENHEIT, 74), + ], ) async def test_send_target_temperature_device_timeout( hass: HomeAssistant, discovery, device, units, temperature @@ -492,7 +525,11 @@ async def test_send_target_temperature_device_timeout( @pytest.mark.parametrize( ("units", "temperature"), - [(UnitOfTemperature.CELSIUS, 25), (UnitOfTemperature.FAHRENHEIT, 74)], + [ + (UnitOfTemperature.CELSIUS, 25), + (UnitOfTemperature.FAHRENHEIT, 73), + (UnitOfTemperature.FAHRENHEIT, 74), + ], ) async def test_update_target_temperature( hass: HomeAssistant, discovery, device, units, temperature @@ -505,6 +542,13 @@ async def test_update_target_temperature( await async_setup_gree(hass) + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, + blocking=True, + ) + state = hass.states.get(ENTITY_ID) assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature @@ -516,9 +560,7 @@ async def test_update_target_temperature( @pytest.mark.parametrize( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) -async def test_send_preset_mode( - hass: HomeAssistant, discovery, device, mock_now, preset -) -> None: +async def test_send_preset_mode(hass: HomeAssistant, discovery, device, preset) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) @@ -534,9 +576,7 @@ async def test_send_preset_mode( assert state.attributes.get(ATTR_PRESET_MODE) == preset -async def test_send_invalid_preset_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_preset_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) @@ -557,7 +597,7 @@ async def test_send_invalid_preset_mode( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_send_preset_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, preset + hass: HomeAssistant, discovery, device, preset ) -> None: """Test for sending preset mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -580,7 +620,7 @@ async def test_send_preset_mode_device_timeout( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_update_preset_mode( - hass: HomeAssistant, discovery, device, mock_now, preset + hass: HomeAssistant, discovery, device, preset ) -> None: """Test for updating preset mode from the device.""" device().steady_heat = preset == PRESET_AWAY @@ -607,7 +647,7 @@ async def test_update_preset_mode( ], ) async def test_send_hvac_mode( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for sending hvac mode command to the device.""" await async_setup_gree(hass) @@ -629,7 +669,7 @@ async def test_send_hvac_mode( [HVACMode.AUTO, HVACMode.COOL, HVACMode.DRY, HVACMode.FAN_ONLY, HVACMode.HEAT], ) async def test_send_hvac_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for sending hvac mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -660,7 +700,7 @@ async def test_send_hvac_mode_device_timeout( ], ) async def test_update_hvac_mode( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for updating hvac mode from the device.""" device().power = hvac_mode != HVACMode.OFF @@ -677,9 +717,7 @@ async def test_update_hvac_mode( "fan_mode", [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) -async def test_send_fan_mode( - hass: HomeAssistant, discovery, device, mock_now, fan_mode -) -> None: +async def test_send_fan_mode(hass: HomeAssistant, discovery, device, fan_mode) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) @@ -695,9 +733,7 @@ async def test_send_fan_mode( assert state.attributes.get(ATTR_FAN_MODE) == fan_mode -async def test_send_invalid_fan_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_fan_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) @@ -719,7 +755,7 @@ async def test_send_invalid_fan_mode( [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_send_fan_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, fan_mode + hass: HomeAssistant, discovery, device, fan_mode ) -> None: """Test for sending fan mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -743,7 +779,7 @@ async def test_send_fan_mode_device_timeout( [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_update_fan_mode( - hass: HomeAssistant, discovery, device, mock_now, fan_mode + hass: HomeAssistant, discovery, device, fan_mode ) -> None: """Test for updating fan mode from the device.""" device().fan_speed = FAN_MODES_REVERSE.get(fan_mode) @@ -759,7 +795,7 @@ async def test_update_fan_mode( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) @@ -776,9 +812,7 @@ async def test_send_swing_mode( assert state.attributes.get(ATTR_SWING_MODE) == swing_mode -async def test_send_invalid_swing_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_swing_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) @@ -799,7 +833,7 @@ async def test_send_invalid_swing_mode( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for sending swing mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -822,7 +856,7 @@ async def test_send_swing_mode_device_timeout( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_update_swing_mode( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for updating swing mode from the device.""" device().horizontal_swing = ( @@ -843,6 +877,40 @@ async def test_update_swing_mode( assert state.attributes.get(ATTR_SWING_MODE) == swing_mode +async def test_coordinator_update_handler( + hass: HomeAssistant, discovery, device +) -> None: + """Test for coordinator update handler.""" + await async_setup_gree(hass) + await hass.async_block_till_done() + + entity: GreeClimateEntity = hass.data[DOMAIN].get_entity(ENTITY_ID) + assert entity is not None + + # Initial state + assert entity.temperature_unit == UnitOfTemperature.CELSIUS + assert entity.min_temp == TEMP_MIN + assert entity.max_temp == TEMP_MAX + + # Set unit to FAHRENHEIT + device().temperature_units = 1 + entity.coordinator.async_set_updated_data(UnitOfTemperature.FAHRENHEIT) + await hass.async_block_till_done() + + assert entity.temperature_unit == UnitOfTemperature.FAHRENHEIT + assert entity.min_temp == TEMP_MIN_F + assert entity.max_temp == TEMP_MAX_F + + # Set unit back to CELSIUS + device().temperature_units = 0 + entity.coordinator.async_set_updated_data(UnitOfTemperature.CELSIUS) + await hass.async_block_till_done() + + assert entity.temperature_unit == UnitOfTemperature.CELSIUS + assert entity.min_temp == TEMP_MIN + assert entity.max_temp == TEMP_MAX + + @patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) async def test_registry_settings( hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion diff --git a/tests/components/greeneye_monitor/conftest.py b/tests/components/greeneye_monitor/conftest.py index ad8a98ce3fe..343a15346e7 100644 --- a/tests/components/greeneye_monitor/conftest.py +++ b/tests/components/greeneye_monitor/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for testing greeneye_monitor.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.greeneye_monitor import DOMAIN from homeassistant.components.sensor import SensorDeviceClass diff --git a/tests/components/group/test_button.py b/tests/components/group/test_button.py new file mode 100644 index 00000000000..c3f4a720d53 --- /dev/null +++ b/tests/components/group/test_button.py @@ -0,0 +1,122 @@ +"""The tests for the group button platform.""" + +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.group import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util + + +async def test_default_state( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test button group default state.""" + hass.states.async_set("button.notify_light", "2021-01-01T23:59:59.123+00:00") + await async_setup_component( + hass, + BUTTON_DOMAIN, + { + BUTTON_DOMAIN: { + "platform": DOMAIN, + "entities": ["button.notify_light", "button.self_destruct"], + "name": "Button group", + "unique_id": "unique_identifier", + } + }, + ) + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + state = hass.states.get("button.button_group") + assert state is not None + assert state.state == STATE_UNKNOWN + assert state.attributes.get(ATTR_ENTITY_ID) == [ + "button.notify_light", + "button.self_destruct", + ] + + entry = entity_registry.async_get("button.button_group") + assert entry + assert entry.unique_id == "unique_identifier" + + +async def test_state_reporting(hass: HomeAssistant) -> None: + """Test the state reporting. + + The group state is unavailable if all group members are unavailable. + Otherwise, the group state represents the last time the grouped button was pressed. + """ + await async_setup_component( + hass, + BUTTON_DOMAIN, + { + BUTTON_DOMAIN: { + "platform": DOMAIN, + "entities": ["button.test1", "button.test2"], + } + }, + ) + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + # Initial state with no group member in the state machine -> unavailable + assert hass.states.get("button.button_group").state == STATE_UNAVAILABLE + + # All group members unavailable -> unavailable + hass.states.async_set("button.test1", STATE_UNAVAILABLE) + hass.states.async_set("button.test2", STATE_UNAVAILABLE) + await hass.async_block_till_done() + assert hass.states.get("button.button_group").state == STATE_UNAVAILABLE + + # All group members available, but no group member pressed -> unknown + hass.states.async_set("button.test1", "2021-01-01T23:59:59.123+00:00") + hass.states.async_set("button.test2", "2022-02-02T23:59:59.123+00:00") + await hass.async_block_till_done() + assert hass.states.get("button.button_group").state == STATE_UNKNOWN + + +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_service_calls( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test service calls.""" + await async_setup_component( + hass, + BUTTON_DOMAIN, + { + BUTTON_DOMAIN: [ + {"platform": "demo"}, + { + "platform": DOMAIN, + "entities": [ + "button.push", + "button.self_destruct", + ], + }, + ] + }, + ) + await hass.async_block_till_done() + + assert hass.states.get("button.button_group").state == STATE_UNKNOWN + assert hass.states.get("button.push").state == STATE_UNKNOWN + + now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") + freezer.move_to(now) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.button_group"}, + blocking=True, + ) + + assert hass.states.get("button.button_group").state == now.isoformat() + assert hass.states.get("button.push").state == now.isoformat() diff --git a/tests/components/group/test_config_flow.py b/tests/components/group/test_config_flow.py index c6ee4ae5a87..461df19ebf8 100644 --- a/tests/components/group/test_config_flow.py +++ b/tests/components/group/test_config_flow.py @@ -29,6 +29,7 @@ from tests.typing import WebSocketGenerator [ ("binary_sensor", "on", "on", {}, {}, {"all": False}, {}), ("binary_sensor", "on", "on", {}, {"all": True}, {"all": True}, {}), + ("button", STATE_UNKNOWN, "2021-01-01T23:59:59.123+00:00", {}, {}, {}, {}), ("cover", "open", "open", {}, {}, {}, {}), ( "event", @@ -45,6 +46,7 @@ from tests.typing import WebSocketGenerator ("fan", "on", "on", {}, {}, {}, {}), ("light", "on", "on", {}, {}, {}, {}), ("lock", "locked", "locked", {}, {}, {}, {}), + ("notify", STATE_UNKNOWN, "2021-01-01T23:59:59.123+00:00", {}, {}, {}, {}), ("media_player", "on", "on", {}, {}, {}, {}), ( "sensor", @@ -135,11 +137,13 @@ async def test_config_flow( ("group_type", "extra_input"), [ ("binary_sensor", {"all": False}), + ("button", {}), ("cover", {}), ("event", {}), ("fan", {}), ("light", {}), ("lock", {}), + ("notify", {}), ("media_player", {}), ("switch", {}), ], @@ -212,11 +216,13 @@ def get_suggested(schema, key): ("group_type", "member_state", "extra_options", "options_options"), [ ("binary_sensor", "on", {"all": False}, {}), + ("button", "2021-01-01T23:59:59.123+00:00", {}, {}), ("cover", "open", {}, {}), ("event", "2021-01-01T23:59:59.123+00:00", {}, {}), ("fan", "on", {}, {}), ("light", "on", {"all": False}, {}), ("lock", "locked", {}, {}), + ("notify", "2021-01-01T23:59:59.123+00:00", {}, {}), ("media_player", "on", {}, {}), ( "sensor", @@ -396,11 +402,13 @@ async def test_all_options( ("group_type", "extra_input"), [ ("binary_sensor", {"all": False}), + ("button", {}), ("cover", {}), ("event", {}), ("fan", {}), ("light", {}), ("lock", {}), + ("notify", {}), ("media_player", {}), ("switch", {}), ], @@ -483,6 +491,7 @@ LIGHT_ATTRS = [ {"color_mode": "unknown"}, ] LOCK_ATTRS = [{"supported_features": 1}, {}] +NOTIFY_ATTRS = [{"supported_features": 0}, {}] MEDIA_PLAYER_ATTRS = [{"supported_features": 0}, {}] SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two"}] @@ -491,11 +500,13 @@ SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two" ("domain", "extra_user_input", "input_states", "group_state", "extra_attributes"), [ ("binary_sensor", {"all": True}, ["on", "off"], "off", [{}, {}]), + ("button", {}, ["", ""], "unknown", [{}, {}]), ("cover", {}, ["open", "closed"], "open", COVER_ATTRS), ("event", {}, ["", ""], "unknown", EVENT_ATTRS), ("fan", {}, ["on", "off"], "on", FAN_ATTRS), ("light", {}, ["on", "off"], "on", LIGHT_ATTRS), ("lock", {}, ["unlocked", "locked"], "unlocked", LOCK_ATTRS), + ("notify", {}, ["", ""], "unknown", NOTIFY_ATTRS), ("media_player", {}, ["on", "off"], "on", MEDIA_PLAYER_ATTRS), ("sensor", {"type": "max"}, ["10", "20"], "20.0", SENSOR_ATTRS), ("switch", {}, ["on", "off"], "on", [{}, {}]), @@ -600,11 +611,13 @@ async def test_config_flow_preview( ), [ ("binary_sensor", {"all": True}, {"all": False}, ["on", "off"], "on", [{}, {}]), + ("button", {}, {}, ["", ""], "unknown", [{}, {}]), ("cover", {}, {}, ["open", "closed"], "open", COVER_ATTRS), ("event", {}, {}, ["", ""], "unknown", EVENT_ATTRS), ("fan", {}, {}, ["on", "off"], "on", FAN_ATTRS), ("light", {}, {}, ["on", "off"], "on", LIGHT_ATTRS), ("lock", {}, {}, ["unlocked", "locked"], "unlocked", LOCK_ATTRS), + ("notify", {}, {}, ["", ""], "unknown", NOTIFY_ATTRS), ("media_player", {}, {}, ["on", "off"], "on", MEDIA_PLAYER_ATTRS), ( "sensor", diff --git a/tests/components/group/test_cover.py b/tests/components/group/test_cover.py index 5b5d8fa873c..c687ca21e2d 100644 --- a/tests/components/group/test_cover.py +++ b/tests/components/group/test_cover.py @@ -2,6 +2,7 @@ import asyncio from datetime import timedelta +from typing import Any import pytest @@ -90,7 +91,9 @@ CONFIG_ATTRIBUTES = { @pytest.fixture -async def setup_comp(hass, config_count): +async def setup_comp( + hass: HomeAssistant, config_count: tuple[dict[str, Any], int] +) -> None: """Set up group cover component.""" config, count = config_count with assert_setup_component(count, DOMAIN): @@ -101,7 +104,8 @@ async def setup_comp(hass, config_count): @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -async def test_state(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_state(hass: HomeAssistant) -> None: """Test handling of state. The group state is unknown if all group members are unknown or unavailable. @@ -250,8 +254,9 @@ async def test_state(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) +@pytest.mark.usefixtures("setup_comp") async def test_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test handling of state attributes.""" state = hass.states.get(COVER_GROUP) @@ -416,9 +421,8 @@ async def test_attributes( @pytest.mark.parametrize("config_count", [(CONFIG_TILT_ONLY, 2)]) -async def test_cover_that_only_supports_tilt_removed( - hass: HomeAssistant, setup_comp -) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> None: """Test removing a cover that support tilt.""" hass.states.async_set( DEMO_COVER_TILT, @@ -446,7 +450,8 @@ async def test_cover_that_only_supports_tilt_removed( @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_open_covers(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_open_covers(hass: HomeAssistant) -> None: """Test open cover function.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -467,7 +472,8 @@ async def test_open_covers(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_close_covers(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_close_covers(hass: HomeAssistant) -> None: """Test close cover function.""" await hass.services.async_call( DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -488,7 +494,8 @@ async def test_close_covers(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_toggle_covers(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_toggle_covers(hass: HomeAssistant) -> None: """Test toggle cover function.""" # Start covers in open state await hass.services.async_call( @@ -538,7 +545,8 @@ async def test_toggle_covers(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_stop_covers(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_stop_covers(hass: HomeAssistant) -> None: """Test stop cover function.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -564,7 +572,8 @@ async def test_stop_covers(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_set_cover_position(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_set_cover_position(hass: HomeAssistant) -> None: """Test set cover position function.""" await hass.services.async_call( DOMAIN, @@ -587,7 +596,8 @@ async def test_set_cover_position(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_open_tilts(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_open_tilts(hass: HomeAssistant) -> None: """Test open tilt function.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -607,7 +617,8 @@ async def test_open_tilts(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_close_tilts(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_close_tilts(hass: HomeAssistant) -> None: """Test close tilt function.""" await hass.services.async_call( DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -625,7 +636,8 @@ async def test_close_tilts(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_toggle_tilts(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_toggle_tilts(hass: HomeAssistant) -> None: """Test toggle tilt function.""" # Start tilted open await hass.services.async_call( @@ -678,7 +690,8 @@ async def test_toggle_tilts(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_stop_tilts(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_stop_tilts(hass: HomeAssistant) -> None: """Test stop tilts function.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -702,7 +715,8 @@ async def test_stop_tilts(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_set_tilt_positions(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_set_tilt_positions(hass: HomeAssistant) -> None: """Test set tilt position function.""" await hass.services.async_call( DOMAIN, @@ -723,7 +737,8 @@ async def test_set_tilt_positions(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_POS, 2)]) -async def test_is_opening_closing(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_is_opening_closing(hass: HomeAssistant) -> None: """Test is_opening property.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True diff --git a/tests/components/group/test_fan.py b/tests/components/group/test_fan.py index 6aa6fc2933d..184693f7618 100644 --- a/tests/components/group/test_fan.py +++ b/tests/components/group/test_fan.py @@ -1,6 +1,7 @@ """The tests for the group fan platform.""" import asyncio +from typing import Any from unittest.mock import patch import pytest @@ -102,7 +103,9 @@ CONFIG_ATTRIBUTES = { @pytest.fixture -async def setup_comp(hass, config_count): +async def setup_comp( + hass: HomeAssistant, config_count: tuple[dict[str, Any], int] +) -> None: """Set up group fan component.""" config, count = config_count with assert_setup_component(count, DOMAIN): @@ -113,9 +116,8 @@ async def setup_comp(hass, config_count): @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -async def test_state( - hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp -) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_state(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test handling of state. The group state is on if at least one group member is on. @@ -210,7 +212,8 @@ async def test_state( @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -async def test_attributes(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_attributes(hass: HomeAssistant) -> None: """Test handling of state attributes.""" state = hass.states.get(FAN_GROUP) assert state.state == STATE_UNAVAILABLE @@ -267,7 +270,8 @@ async def test_attributes(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_FULL_SUPPORT, 2)]) -async def test_direction_oscillating(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_direction_oscillating(hass: HomeAssistant) -> None: """Test handling of direction and oscillating attributes.""" hass.states.async_set( @@ -378,7 +382,8 @@ async def test_direction_oscillating(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_MISSING_FAN, 2)]) -async def test_state_missing_entity_id(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_state_missing_entity_id(hass: HomeAssistant) -> None: """Test we can still setup with a missing entity id.""" state = hass.states.get(FAN_GROUP) await hass.async_block_till_done() @@ -398,7 +403,8 @@ async def test_setup_before_started(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_MISSING_FAN, 2)]) -async def test_reload(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_reload(hass: HomeAssistant) -> None: """Test the ability to reload fans.""" await hass.async_block_till_done() await hass.async_start() @@ -421,7 +427,8 @@ async def test_reload(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_FULL_SUPPORT, 2)]) -async def test_service_calls(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_service_calls(hass: HomeAssistant) -> None: """Test calling services.""" await hass.services.async_call( DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index 7434de74f63..bbbe22cba83 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -405,13 +405,13 @@ async def test_expand_entity_ids_does_not_return_duplicates( order=None, ) - assert ["light.bowl", "light.ceiling"] == sorted( + assert sorted( group.expand_entity_ids(hass, [test_group.entity_id, "light.Ceiling"]) - ) + ) == ["light.bowl", "light.ceiling"] - assert ["light.bowl", "light.ceiling"] == sorted( + assert sorted( group.expand_entity_ids(hass, ["light.bowl", test_group.entity_id]) - ) + ) == ["light.bowl", "light.ceiling"] async def test_expand_entity_ids_recursive(hass: HomeAssistant) -> None: @@ -439,7 +439,7 @@ async def test_expand_entity_ids_recursive(hass: HomeAssistant) -> None: async def test_expand_entity_ids_ignores_non_strings(hass: HomeAssistant) -> None: """Test that non string elements in lists are ignored.""" - assert [] == group.expand_entity_ids(hass, [5, True]) + assert group.expand_entity_ids(hass, [5, True]) == [] async def test_get_entity_ids(hass: HomeAssistant) -> None: @@ -460,9 +460,10 @@ async def test_get_entity_ids(hass: HomeAssistant) -> None: order=None, ) - assert ["light.bowl", "light.ceiling"] == sorted( - group.get_entity_ids(hass, test_group.entity_id) - ) + assert sorted(group.get_entity_ids(hass, test_group.entity_id)) == [ + "light.bowl", + "light.ceiling", + ] async def test_get_entity_ids_with_domain_filter(hass: HomeAssistant) -> None: @@ -482,19 +483,19 @@ async def test_get_entity_ids_with_domain_filter(hass: HomeAssistant) -> None: order=None, ) - assert ["switch.ac"] == group.get_entity_ids( + assert group.get_entity_ids( hass, mixed_group.entity_id, domain_filter="switch" - ) + ) == ["switch.ac"] async def test_get_entity_ids_with_non_existing_group_name(hass: HomeAssistant) -> None: """Test get_entity_ids with a non existing group.""" - assert [] == group.get_entity_ids(hass, "non_existing") + assert group.get_entity_ids(hass, "non_existing") == [] async def test_get_entity_ids_with_non_group_state(hass: HomeAssistant) -> None: """Test get_entity_ids with a non group state.""" - assert [] == group.get_entity_ids(hass, "switch.AC") + assert group.get_entity_ids(hass, "switch.AC") == [] async def test_group_being_init_before_first_tracked_state_is_set_to_on( @@ -620,12 +621,12 @@ async def test_expand_entity_ids_expands_nested_groups(hass: HomeAssistant) -> N order=None, ) - assert [ + assert sorted(group.expand_entity_ids(hass, ["group.group_of_groups"])) == [ "light.test_1", "light.test_2", "switch.test_1", "switch.test_2", - ] == sorted(group.expand_entity_ids(hass, ["group.group_of_groups"])) + ] async def test_set_assumed_state_based_on_tracked(hass: HomeAssistant) -> None: diff --git a/tests/components/group/test_media_player.py b/tests/components/group/test_media_player.py index 451aae200b3..23cdd1598dd 100644 --- a/tests/components/group/test_media_player.py +++ b/tests/components/group/test_media_player.py @@ -1,14 +1,16 @@ """The tests for the Media group platform.""" import asyncio -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest from homeassistant.components.group import DOMAIN from homeassistant.components.media_player import ( + ATTR_MEDIA_ANNOUNCE, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_EXTRA, ATTR_MEDIA_SEEK_POSITION, ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_TRACK, @@ -45,7 +47,7 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_platform, entity_registry as er from homeassistant.setup import async_setup_component @@ -598,3 +600,59 @@ async def test_nested_group(hass: HomeAssistant) -> None: assert hass.states.get("media_player.kitchen").state == STATE_OFF assert hass.states.get("media_player.group_1").state == STATE_OFF assert hass.states.get("media_player.nested_group").state == STATE_OFF + + +async def test_service_play_media_kwargs(hass: HomeAssistant) -> None: + """Test that kwargs get passed through on play_media service call.""" + await async_setup_component( + hass, + MEDIA_DOMAIN, + { + MEDIA_DOMAIN: [ + {"platform": "demo"}, + { + "platform": DOMAIN, + "entities": [ + "media_player.bedroom", + "media_player.living_room", + ], + }, + ] + }, + ) + + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + platform = entity_platform.async_get_platforms(hass, "media_player")[0] + mp_bedroom = platform.domain_entities["media_player.bedroom"] + mp_bedroom.play_media = MagicMock() + + mp_living_room = platform.domain_entities["media_player.living_room"] + mp_living_room.play_media = MagicMock() + + await hass.services.async_call( + MEDIA_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.media_group", + ATTR_MEDIA_CONTENT_TYPE: "some_type", + ATTR_MEDIA_CONTENT_ID: "some_id", + ATTR_MEDIA_ANNOUNCE: "true", + ATTR_MEDIA_EXTRA: { + "volume": 20, + }, + }, + ) + await hass.async_block_till_done() + + assert mp_bedroom.play_media.call_count == 1 + mp_bedroom.play_media.assert_called_with( + "some_type", "some_id", announce=True, extra={"volume": 20} + ) + + assert mp_living_room.play_media.call_count == 1 + mp_living_room.play_media.assert_called_with( + "some_type", "some_id", announce=True, extra={"volume": 20} + ) diff --git a/tests/components/group/test_notify.py b/tests/components/group/test_notify.py index dfd200a1542..bbf2d98b492 100644 --- a/tests/components/group/test_notify.py +++ b/tests/components/group/test_notify.py @@ -1,18 +1,44 @@ """The tests for the notify.group platform.""" -from collections.abc import Mapping +from collections.abc import Generator, Mapping from pathlib import Path from typing import Any from unittest.mock import MagicMock, call, patch +import pytest + from homeassistant import config as hass_config from homeassistant.components import notify -from homeassistant.components.group import SERVICE_RELOAD +from homeassistant.components.group import DOMAIN, SERVICE_RELOAD +from homeassistant.components.notify import ( + ATTR_MESSAGE, + ATTR_TITLE, + DOMAIN as NOTIFY_DOMAIN, + SERVICE_SEND_MESSAGE, + NotifyEntity, +) +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from tests.common import MockPlatform, get_fixture_path, mock_platform +from tests.common import ( + MockConfigEntry, + MockEntity, + MockModule, + MockPlatform, + get_fixture_path, + mock_config_flow, + mock_integration, + mock_platform, + setup_test_component_platform, +) class MockNotifyPlatform(MockPlatform): @@ -96,7 +122,7 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No "services": [ {"service": "test_service1"}, { - "service": "test_service2", + "action": "test_service2", "data": { "target": "unnamed device", "data": {"test": "message", "default": "default"}, @@ -176,6 +202,41 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No ) +async def test_invalid_configuration( + hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture +) -> None: + """Test failing to set up group with an invalid configuration.""" + assert await async_setup_component( + hass, + "group", + {}, + ) + await hass.async_block_till_done() + + group_setup = [ + { + "platform": "group", + "name": "My invalid notification group", + "services": [ + { + "service": "test_service1", + "action": "test_service2", + "data": { + "target": "unnamed device", + "data": {"test": "message", "default": "default"}, + }, + }, + ], + } + ] + await help_setup_notify(hass, tmp_path, {"service1": 1, "service2": 2}, group_setup) + assert not hass.services.has_service("notify", "my_invalid_notification_group") + assert ( + "Invalid config for 'notify' from integration 'group':" + " Cannot specify both 'service' and 'action'." in caplog.text + ) + + async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None: """Verify we can reload the notify service.""" assert await async_setup_component( @@ -193,7 +254,7 @@ async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None: { "name": "group_notify", "platform": "group", - "services": [{"service": "test_service1"}], + "services": [{"action": "test_service1"}], } ], ) @@ -217,3 +278,144 @@ async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None: assert hass.services.has_service(notify.DOMAIN, "test_service2") assert not hass.services.has_service(notify.DOMAIN, "group_notify") assert hass.services.has_service(notify.DOMAIN, "new_group_notify") + + +class MockFlow(ConfigFlow): + """Test flow.""" + + +@pytest.fixture +def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: + """Mock config flow.""" + mock_platform(hass, "test.config_flow") + + with mock_config_flow("test", MockFlow): + yield + + +class MockNotifyEntity(MockEntity, NotifyEntity): + """Mock Email notifier entity to use in tests.""" + + def __init__(self, **values: Any) -> None: + """Initialize the mock entity.""" + super().__init__(**values) + self.send_message_mock_calls = MagicMock() + + async def async_send_message(self, message: str, title: str | None = None) -> None: + """Send a notification message.""" + self.send_message_mock_calls(message, title=title) + + +async def help_async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [Platform.NOTIFY] + ) + return True + + +async def help_async_unload_entry( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Unload test config entry.""" + return await hass.config_entries.async_unload_platforms( + config_entry, [Platform.NOTIFY] + ) + + +@pytest.fixture +async def mock_notifiers( + hass: HomeAssistant, config_flow_fixture: None +) -> list[NotifyEntity]: + """Set up the notify entities.""" + entity = MockNotifyEntity(name="test", entity_id="notify.test") + entity2 = MockNotifyEntity(name="test2", entity_id="notify.test2") + entities = [entity, entity2] + test_entry = MockConfigEntry(domain="test") + test_entry.add_to_hass(hass) + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, NOTIFY_DOMAIN, entities, from_config_entry=True) + assert await hass.config_entries.async_setup(test_entry.entry_id) + await hass.async_block_till_done() + return entities + + +async def test_notify_entity_group( + hass: HomeAssistant, mock_notifiers: list[NotifyEntity] +) -> None: + """Test sending a message to a notify group.""" + entity, entity2 = mock_notifiers + assert entity.send_message_mock_calls.call_count == 0 + assert entity2.send_message_mock_calls.call_count == 0 + + config_entry = MockConfigEntry( + domain=DOMAIN, + options={ + "group_type": "notify", + "name": "Test Group", + "entities": ["notify.test", "notify.test2"], + "hide_members": True, + }, + title="Test Group", + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + NOTIFY_DOMAIN, + SERVICE_SEND_MESSAGE, + { + ATTR_MESSAGE: "Hello", + ATTR_TITLE: "Test notification", + ATTR_ENTITY_ID: "notify.test_group", + }, + blocking=True, + ) + + assert entity.send_message_mock_calls.call_count == 1 + assert entity.send_message_mock_calls.call_args == call( + "Hello", title="Test notification" + ) + assert entity2.send_message_mock_calls.call_count == 1 + assert entity2.send_message_mock_calls.call_args == call( + "Hello", title="Test notification" + ) + + +async def test_state_reporting(hass: HomeAssistant) -> None: + """Test sending a message to a notify group.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + options={ + "group_type": "notify", + "name": "Test Group", + "entities": ["notify.test", "notify.test2"], + "hide_members": True, + }, + title="Test Group", + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("notify.test_group").state == STATE_UNAVAILABLE + + hass.states.async_set("notify.test", STATE_UNAVAILABLE) + hass.states.async_set("notify.test2", STATE_UNAVAILABLE) + await hass.async_block_till_done() + assert hass.states.get("notify.test_group").state == STATE_UNAVAILABLE + + hass.states.async_set("notify.test", "2021-01-01T23:59:59.123+00:00") + hass.states.async_set("notify.test2", "2021-01-01T23:59:59.123+00:00") + await hass.async_block_till_done() + assert hass.states.get("notify.test_group").state == STATE_UNKNOWN diff --git a/tests/components/guardian/conftest.py b/tests/components/guardian/conftest.py index 87ff96aff45..61813cb1df5 100644 --- a/tests/components/guardian/conftest.py +++ b/tests/components/guardian/conftest.py @@ -1,16 +1,18 @@ """Define fixtures for Elexa Guardian tests.""" -import json +from collections.abc import AsyncGenerator, Generator +from typing import Any from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.guardian import CONF_UID, DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -23,7 +25,9 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config, unique_id): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any], unique_id: str +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -35,7 +39,7 @@ def config_entry_fixture(hass, config, unique_id): @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_IP_ADDRESS: "192.168.1.100", @@ -44,68 +48,68 @@ def config_fixture(hass): @pytest.fixture(name="data_sensor_pair_dump", scope="package") -def data_sensor_pair_dump_fixture(): +def data_sensor_pair_dump_fixture() -> JsonObjectType: """Define data from a successful sensor_pair_dump response.""" - return json.loads(load_fixture("sensor_pair_dump_data.json", "guardian")) + return load_json_object_fixture("sensor_pair_dump_data.json", "guardian") @pytest.fixture(name="data_sensor_pair_sensor", scope="package") -def data_sensor_pair_sensor_fixture(): +def data_sensor_pair_sensor_fixture() -> JsonObjectType: """Define data from a successful sensor_pair_sensor response.""" - return json.loads(load_fixture("sensor_pair_sensor_data.json", "guardian")) + return load_json_object_fixture("sensor_pair_sensor_data.json", "guardian") @pytest.fixture(name="data_sensor_paired_sensor_status", scope="package") -def data_sensor_paired_sensor_status_fixture(): +def data_sensor_paired_sensor_status_fixture() -> JsonObjectType: """Define data from a successful sensor_paired_sensor_status response.""" - return json.loads(load_fixture("sensor_paired_sensor_status_data.json", "guardian")) + return load_json_object_fixture("sensor_paired_sensor_status_data.json", "guardian") @pytest.fixture(name="data_system_diagnostics", scope="package") -def data_system_diagnostics_fixture(): +def data_system_diagnostics_fixture() -> JsonObjectType: """Define data from a successful system_diagnostics response.""" - return json.loads(load_fixture("system_diagnostics_data.json", "guardian")) + return load_json_object_fixture("system_diagnostics_data.json", "guardian") @pytest.fixture(name="data_system_onboard_sensor_status", scope="package") -def data_system_onboard_sensor_status_fixture(): +def data_system_onboard_sensor_status_fixture() -> JsonObjectType: """Define data from a successful system_onboard_sensor_status response.""" - return json.loads( - load_fixture("system_onboard_sensor_status_data.json", "guardian") + return load_json_object_fixture( + "system_onboard_sensor_status_data.json", "guardian" ) @pytest.fixture(name="data_system_ping", scope="package") -def data_system_ping_fixture(): +def data_system_ping_fixture() -> JsonObjectType: """Define data from a successful system_ping response.""" - return json.loads(load_fixture("system_ping_data.json", "guardian")) + return load_json_object_fixture("system_ping_data.json", "guardian") @pytest.fixture(name="data_valve_status", scope="package") -def data_valve_status_fixture(): +def data_valve_status_fixture() -> JsonObjectType: """Define data from a successful valve_status response.""" - return json.loads(load_fixture("valve_status_data.json", "guardian")) + return load_json_object_fixture("valve_status_data.json", "guardian") @pytest.fixture(name="data_wifi_status", scope="package") -def data_wifi_status_fixture(): +def data_wifi_status_fixture() -> JsonObjectType: """Define data from a successful wifi_status response.""" - return json.loads(load_fixture("wifi_status_data.json", "guardian")) + return load_json_object_fixture("wifi_status_data.json", "guardian") @pytest.fixture(name="setup_guardian") async def setup_guardian_fixture( - hass, - config, - data_sensor_pair_dump, - data_sensor_pair_sensor, - data_sensor_paired_sensor_status, - data_system_diagnostics, - data_system_onboard_sensor_status, - data_system_ping, - data_valve_status, - data_wifi_status, -): + hass: HomeAssistant, + config: dict[str, Any], + data_sensor_pair_dump: JsonObjectType, + data_sensor_pair_sensor: JsonObjectType, + data_sensor_paired_sensor_status: JsonObjectType, + data_system_diagnostics: JsonObjectType, + data_system_onboard_sensor_status: JsonObjectType, + data_system_ping: JsonObjectType, + data_valve_status: JsonObjectType, + data_wifi_status: JsonObjectType, +) -> AsyncGenerator[None]: """Define a fixture to set up Guardian.""" with ( patch("aioguardian.client.Client.connect"), @@ -155,6 +159,6 @@ async def setup_guardian_fixture( @pytest.fixture(name="unique_id") -def unique_id_fixture(hass): +def unique_id_fixture() -> str: """Define a config entry unique ID fixture.""" return "guardian_3456" diff --git a/tests/components/guardian/test_config_flow.py b/tests/components/guardian/test_config_flow.py index 0f99578768a..6c06171a45f 100644 --- a/tests/components/guardian/test_config_flow.py +++ b/tests/components/guardian/test_config_flow.py @@ -1,6 +1,7 @@ """Define tests for the Elexa Guardian config flow.""" from ipaddress import ip_address +from typing import Any from unittest.mock import patch from aioguardian.errors import GuardianError @@ -22,9 +23,8 @@ from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -async def test_duplicate_error( - hass: HomeAssistant, config, config_entry, setup_guardian -) -> None: +@pytest.mark.usefixtures("config_entry", "setup_guardian") +async def test_duplicate_error(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test that errors are shown when duplicate entries are added.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=config @@ -33,7 +33,7 @@ async def test_duplicate_error( assert result["reason"] == "already_configured" -async def test_connect_error(hass: HomeAssistant, config) -> None: +async def test_connect_error(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test that the config entry errors out if the device cannot connect.""" with patch( "aioguardian.client.Client.connect", @@ -58,7 +58,8 @@ async def test_get_pin_from_uid() -> None: assert pin == "3456" -async def test_step_user(hass: HomeAssistant, config, setup_guardian) -> None: +@pytest.mark.usefixtures("setup_guardian") +async def test_step_user(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test the user step.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -78,7 +79,8 @@ async def test_step_user(hass: HomeAssistant, config, setup_guardian) -> None: } -async def test_step_zeroconf(hass: HomeAssistant, setup_guardian) -> None: +@pytest.mark.usefixtures("setup_guardian") +async def test_step_zeroconf(hass: HomeAssistant) -> None: """Test the zeroconf step.""" zeroconf_data = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.100"), @@ -133,7 +135,8 @@ async def test_step_zeroconf_already_in_progress(hass: HomeAssistant) -> None: assert result["reason"] == "already_in_progress" -async def test_step_dhcp(hass: HomeAssistant, setup_guardian) -> None: +@pytest.mark.usefixtures("setup_guardian") +async def test_step_dhcp(hass: HomeAssistant) -> None: """Test the dhcp step.""" dhcp_data = dhcp.DhcpServiceInfo( ip="192.168.1.100", diff --git a/tests/components/guardian/test_diagnostics.py b/tests/components/guardian/test_diagnostics.py index 02b620b8e01..3b3ed21bc65 100644 --- a/tests/components/guardian/test_diagnostics.py +++ b/tests/components/guardian/test_diagnostics.py @@ -4,15 +4,16 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.guardian import DOMAIN, GuardianData from homeassistant.core import HomeAssistant +from tests.common import ANY, MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, - config_entry, + config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, - setup_guardian, + setup_guardian: None, # relies on config_entry fixture ) -> None: """Test config entry diagnostics.""" data: GuardianData = hass.data[DOMAIN][config_entry.entry_id] @@ -38,6 +39,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": { "valve_controller": { diff --git a/tests/components/habitica/test_init.py b/tests/components/habitica/test_init.py index 24c55c473b9..31c3a1fae39 100644 --- a/tests/components/habitica/test_init.py +++ b/tests/components/habitica/test_init.py @@ -14,7 +14,7 @@ from homeassistant.components.habitica.const import ( SERVICE_API_CALL, ) from homeassistant.const import ATTR_NAME -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import MockConfigEntry, async_capture_events from tests.test_util.aiohttp import AiohttpClientMocker @@ -24,13 +24,13 @@ TEST_USER_NAME = "test_user" @pytest.fixture -def capture_api_call_success(hass): +def capture_api_call_success(hass: HomeAssistant) -> list[Event]: """Capture api_call events.""" return async_capture_events(hass, EVENT_API_CALL_SUCCESS) @pytest.fixture -def habitica_entry(hass): +def habitica_entry(hass: HomeAssistant) -> MockConfigEntry: """Test entry for the following tests.""" entry = MockConfigEntry( domain=DOMAIN, @@ -88,6 +88,19 @@ def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: ] }, ) + aioclient_mock.get( + "https://habitica.com/api/v3/tasks/user?type=completedTodos", + json={ + "data": [ + { + "text": "this is a mock todo #5", + "id": 5, + "type": "todo", + "completed": True, + } + ] + }, + ) aioclient_mock.post( "https://habitica.com/api/v3/tasks/user", @@ -98,8 +111,9 @@ def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: return aioclient_mock +@pytest.mark.usefixtures("common_requests") async def test_entry_setup_unload( - hass: HomeAssistant, habitica_entry, common_requests + hass: HomeAssistant, habitica_entry: MockConfigEntry ) -> None: """Test integration setup and unload.""" assert await hass.config_entries.async_setup(habitica_entry.entry_id) @@ -112,8 +126,11 @@ async def test_entry_setup_unload( assert not hass.services.has_service(DOMAIN, SERVICE_API_CALL) +@pytest.mark.usefixtures("common_requests") async def test_service_call( - hass: HomeAssistant, habitica_entry, common_requests, capture_api_call_success + hass: HomeAssistant, + habitica_entry: MockConfigEntry, + capture_api_call_success: list[Event], ) -> None: """Test integration setup, service call and unload.""" diff --git a/tests/components/hardware/test_websocket_api.py b/tests/components/hardware/test_websocket_api.py index e8099069a9c..1379bdba120 100644 --- a/tests/components/hardware/test_websocket_api.py +++ b/tests/components/hardware/test_websocket_api.py @@ -61,7 +61,7 @@ async def test_system_status_subscription( response = await client.receive_json() assert response["success"] - VirtualMem = namedtuple("VirtualMemory", ["available", "percent", "total"]) + VirtualMem = namedtuple("VirtualMemory", ["available", "percent", "total"]) # noqa: PYI024 vmem = VirtualMem(10 * 1024**2, 50, 30 * 1024**2) with ( diff --git a/tests/components/harmony/conftest.py b/tests/components/harmony/conftest.py index fb4be73aa72..759770e9746 100644 --- a/tests/components/harmony/conftest.py +++ b/tests/components/harmony/conftest.py @@ -1,10 +1,10 @@ """Fixtures for harmony tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from aioharmony.const import ClientCallbackType import pytest -from typing_extensions import Generator from homeassistant.components.harmony.const import ACTIVITY_POWER_OFF, DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME diff --git a/tests/components/hassio/conftest.py b/tests/components/hassio/conftest.py index 7b79dfe6179..db1a07c4df3 100644 --- a/tests/components/hassio/conftest.py +++ b/tests/components/hassio/conftest.py @@ -1,5 +1,6 @@ """Fixtures for Hass.io.""" +from collections.abc import Generator import os import re from unittest.mock import Mock, patch @@ -7,6 +8,7 @@ from unittest.mock import Mock, patch from aiohttp.test_utils import TestClient import pytest +from homeassistant.auth.models import RefreshToken from homeassistant.components.hassio.handler import HassIO, HassioAPIError from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -19,7 +21,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def disable_security_filter(): +def disable_security_filter() -> Generator[None]: """Disable the security filter to ensure the integration is secure.""" with patch( "homeassistant.components.http.security_filter.FILTERS", @@ -29,7 +31,7 @@ def disable_security_filter(): @pytest.fixture -def hassio_env(): +def hassio_env() -> Generator[None]: """Fixture to inject hassio env.""" with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), @@ -48,11 +50,11 @@ def hassio_env(): @pytest.fixture def hassio_stubs( - hassio_env, + hassio_env: None, hass: HomeAssistant, hass_client: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, -): +) -> RefreshToken: """Create mock hassio http client.""" with ( patch( @@ -86,7 +88,7 @@ def hassio_stubs( @pytest.fixture def hassio_client( - hassio_stubs, hass: HomeAssistant, hass_client: ClientSessionGenerator + hassio_stubs: RefreshToken, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> TestClient: """Return a Hass.io HTTP client.""" return hass.loop.run_until_complete(hass_client()) @@ -94,7 +96,9 @@ def hassio_client( @pytest.fixture def hassio_noauth_client( - hassio_stubs, hass: HomeAssistant, aiohttp_client: ClientSessionGenerator + hassio_stubs: RefreshToken, + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, ) -> TestClient: """Return a Hass.io HTTP client without auth.""" return hass.loop.run_until_complete(aiohttp_client(hass.http.app)) @@ -102,7 +106,9 @@ def hassio_noauth_client( @pytest.fixture async def hassio_client_supervisor( - hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, hassio_stubs + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + hassio_stubs: RefreshToken, ) -> TestClient: """Return an authenticated HTTP client.""" access_token = hass.auth.async_create_access_token(hassio_stubs) @@ -113,7 +119,9 @@ async def hassio_client_supervisor( @pytest.fixture -async def hassio_handler(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker): +def hassio_handler( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> Generator[HassIO]: """Create mock hassio handler.""" with patch.dict(os.environ, {"SUPERVISOR_TOKEN": SUPERVISOR_TOKEN}): yield HassIO(hass.loop, async_get_clientsession(hass), "127.0.0.1") diff --git a/tests/components/hassio/test_addon_manager.py b/tests/components/hassio/test_addon_manager.py index 55c663d66cc..6a20c6eec88 100644 --- a/tests/components/hassio/test_addon_manager.py +++ b/tests/components/hassio/test_addon_manager.py @@ -3,12 +3,12 @@ from __future__ import annotations import asyncio +from collections.abc import Generator import logging from typing import Any from unittest.mock import AsyncMock, call, patch import pytest -from typing_extensions import Generator from homeassistant.components.hassio.addon_manager import ( AddonError, diff --git a/tests/components/hassio/test_addon_panel.py b/tests/components/hassio/test_addon_panel.py index 8436b3393b9..f7407152f7e 100644 --- a/tests/components/hassio/test_addon_panel.py +++ b/tests/components/hassio/test_addon_panel.py @@ -24,8 +24,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: ) +@pytest.mark.usefixtures("hassio_env") async def test_hassio_addon_panel_startup( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_env + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test startup and panel setup after event.""" aioclient_mock.get( @@ -68,10 +69,10 @@ async def test_hassio_addon_panel_startup( ) +@pytest.mark.usefixtures("hassio_env") async def test_hassio_addon_panel_api( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - hassio_env, hass_client: ClientSessionGenerator, ) -> None: """Test panel api after event.""" diff --git a/tests/components/hassio/test_auth.py b/tests/components/hassio/test_auth.py index 175d9061d56..ad96b58e99d 100644 --- a/tests/components/hassio/test_auth.py +++ b/tests/components/hassio/test_auth.py @@ -3,11 +3,12 @@ from http import HTTPStatus from unittest.mock import Mock, patch +from aiohttp.test_utils import TestClient + from homeassistant.auth.providers.homeassistant import InvalidAuth -from homeassistant.core import HomeAssistant -async def test_auth_success(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_auth_success(hassio_client_supervisor: TestClient) -> None: """Test no auth needed for .""" with patch( "homeassistant.auth.providers.homeassistant." @@ -23,7 +24,7 @@ async def test_auth_success(hass: HomeAssistant, hassio_client_supervisor) -> No mock_login.assert_called_with("test", "123456") -async def test_auth_fails_no_supervisor(hass: HomeAssistant, hassio_client) -> None: +async def test_auth_fails_no_supervisor(hassio_client: TestClient) -> None: """Test if only supervisor can access.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -39,7 +40,7 @@ async def test_auth_fails_no_supervisor(hass: HomeAssistant, hassio_client) -> N assert not mock_login.called -async def test_auth_fails_no_auth(hass: HomeAssistant, hassio_noauth_client) -> None: +async def test_auth_fails_no_auth(hassio_noauth_client: TestClient) -> None: """Test if only supervisor can access.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -55,7 +56,7 @@ async def test_auth_fails_no_auth(hass: HomeAssistant, hassio_noauth_client) -> assert not mock_login.called -async def test_login_error(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_login_error(hassio_client_supervisor: TestClient) -> None: """Test no auth needed for error.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -72,7 +73,7 @@ async def test_login_error(hass: HomeAssistant, hassio_client_supervisor) -> Non mock_login.assert_called_with("test", "123456") -async def test_login_no_data(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_login_no_data(hassio_client_supervisor: TestClient) -> None: """Test auth with no data -> error.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -86,7 +87,7 @@ async def test_login_no_data(hass: HomeAssistant, hassio_client_supervisor) -> N assert not mock_login.called -async def test_login_no_username(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_login_no_username(hassio_client_supervisor: TestClient) -> None: """Test auth with no username in data -> error.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -102,9 +103,7 @@ async def test_login_no_username(hass: HomeAssistant, hassio_client_supervisor) assert not mock_login.called -async def test_login_success_extra( - hass: HomeAssistant, hassio_client_supervisor -) -> None: +async def test_login_success_extra(hassio_client_supervisor: TestClient) -> None: """Test auth with extra data.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -125,7 +124,7 @@ async def test_login_success_extra( mock_login.assert_called_with("test", "123456") -async def test_password_success(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_password_success(hassio_client_supervisor: TestClient) -> None: """Test no auth needed for .""" with patch( "homeassistant.auth.providers.homeassistant." @@ -141,7 +140,7 @@ async def test_password_success(hass: HomeAssistant, hassio_client_supervisor) - mock_change.assert_called_with("test", "123456") -async def test_password_fails_no_supervisor(hass: HomeAssistant, hassio_client) -> None: +async def test_password_fails_no_supervisor(hassio_client: TestClient) -> None: """Test if only supervisor can access.""" resp = await hassio_client.post( "/api/hassio_auth/password_reset", @@ -152,9 +151,7 @@ async def test_password_fails_no_supervisor(hass: HomeAssistant, hassio_client) assert resp.status == HTTPStatus.UNAUTHORIZED -async def test_password_fails_no_auth( - hass: HomeAssistant, hassio_noauth_client -) -> None: +async def test_password_fails_no_auth(hassio_noauth_client: TestClient) -> None: """Test if only supervisor can access.""" resp = await hassio_noauth_client.post( "/api/hassio_auth/password_reset", @@ -165,7 +162,7 @@ async def test_password_fails_no_auth( assert resp.status == HTTPStatus.UNAUTHORIZED -async def test_password_no_user(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_password_no_user(hassio_client_supervisor: TestClient) -> None: """Test changing password for invalid user.""" resp = await hassio_client_supervisor.post( "/api/hassio_auth/password_reset", diff --git a/tests/components/hassio/test_discovery.py b/tests/components/hassio/test_discovery.py index 0783ee77932..305b863b3af 100644 --- a/tests/components/hassio/test_discovery.py +++ b/tests/components/hassio/test_discovery.py @@ -1,8 +1,10 @@ """Test config flow.""" +from collections.abc import Generator from http import HTTPStatus from unittest.mock import AsyncMock, Mock, patch +from aiohttp.test_utils import TestClient import pytest from homeassistant import config_entries @@ -18,7 +20,9 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(name="mock_mqtt") -async def mock_mqtt_fixture(hass): +def mock_mqtt_fixture( + hass: HomeAssistant, +) -> Generator[type[config_entries.ConfigFlow]]: """Mock the MQTT integration's config flow.""" mock_integration(hass, MockModule(MQTT_DOMAIN)) mock_platform(hass, f"{MQTT_DOMAIN}.config_flow", None) @@ -34,8 +38,11 @@ async def mock_mqtt_fixture(hass): yield MqttFlow +@pytest.mark.usefixtures("hassio_client") async def test_hassio_discovery_startup( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_client, mock_mqtt + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + mock_mqtt: type[config_entries.ConfigFlow], ) -> None: """Test startup and discovery after event.""" aioclient_mock.get( @@ -90,8 +97,11 @@ async def test_hassio_discovery_startup( ) +@pytest.mark.usefixtures("hassio_client") async def test_hassio_discovery_startup_done( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_client, mock_mqtt + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + mock_mqtt: type[config_entries.ConfigFlow], ) -> None: """Test startup and discovery with hass discovery.""" aioclient_mock.post( @@ -159,7 +169,10 @@ async def test_hassio_discovery_startup_done( async def test_hassio_discovery_webhook( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_client, mock_mqtt + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hassio_client: TestClient, + mock_mqtt: type[config_entries.ConfigFlow], ) -> None: """Test discovery webhook.""" aioclient_mock.get( diff --git a/tests/components/hassio/test_handler.py b/tests/components/hassio/test_handler.py index c418576a802..c5fa6ff8254 100644 --- a/tests/components/hassio/test_handler.py +++ b/tests/components/hassio/test_handler.py @@ -365,8 +365,9 @@ async def test_api_headers( assert received_request.headers[hdrs.CONTENT_TYPE] == "application/octet-stream" +@pytest.mark.usefixtures("hassio_stubs") async def test_api_get_green_settings( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.get( @@ -389,8 +390,9 @@ async def test_api_get_green_settings( assert aioclient_mock.call_count == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_api_set_green_settings( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.post( @@ -407,8 +409,9 @@ async def test_api_set_green_settings( assert aioclient_mock.call_count == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_api_get_yellow_settings( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.get( @@ -427,8 +430,9 @@ async def test_api_get_yellow_settings( assert aioclient_mock.call_count == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_api_set_yellow_settings( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.post( @@ -445,8 +449,9 @@ async def test_api_set_yellow_settings( assert aioclient_mock.call_count == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_api_reboot_host( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.post( @@ -458,7 +463,8 @@ async def test_api_reboot_host( assert aioclient_mock.call_count == 1 -async def test_send_command_invalid_command(hass: HomeAssistant, hassio_stubs) -> None: +@pytest.mark.usefixtures("hassio_stubs") +async def test_send_command_invalid_command(hass: HomeAssistant) -> None: """Test send command fails when command is invalid.""" hassio: HassIO = hass.data["hassio"] with pytest.raises(HassioAPIError): diff --git a/tests/components/hassio/test_http.py b/tests/components/hassio/test_http.py index a5ffb4f0d83..404c047a56c 100644 --- a/tests/components/hassio/test_http.py +++ b/tests/components/hassio/test_http.py @@ -1,9 +1,11 @@ """The tests for the hassio component.""" +from collections.abc import Generator from http import HTTPStatus from unittest.mock import patch from aiohttp import StreamReader +from aiohttp.test_utils import TestClient import pytest from tests.common import MockUser @@ -11,7 +13,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture -def mock_not_onboarded(): +def mock_not_onboarded() -> Generator[None]: """Mock that we're not onboarded.""" with patch( "homeassistant.components.hassio.http.async_is_onboarded", return_value=False @@ -20,7 +22,9 @@ def mock_not_onboarded(): @pytest.fixture -def hassio_user_client(hassio_client, hass_admin_user: MockUser): +def hassio_user_client( + hassio_client: TestClient, hass_admin_user: MockUser +) -> TestClient: """Return a Hass.io HTTP client tied to a non-admin user.""" hass_admin_user.groups = [] return hassio_client @@ -35,7 +39,7 @@ def hassio_user_client(hassio_client, hass_admin_user: MockUser): ], ) async def test_forward_request_onboarded_user_get( - hassio_user_client, aioclient_mock: AiohttpClientMocker, path: str + hassio_user_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str ) -> None: """Test fetching normal path.""" aioclient_mock.get(f"http://127.0.0.1/{path}", text="response") @@ -55,7 +59,7 @@ async def test_forward_request_onboarded_user_get( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_onboarded_user_unallowed_methods( - hassio_user_client, aioclient_mock: AiohttpClientMocker, method: str + hassio_user_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_user_client.post("/api/hassio/app/entrypoint.js") @@ -82,7 +86,7 @@ async def test_forward_request_onboarded_user_unallowed_methods( ], ) async def test_forward_request_onboarded_user_unallowed_paths( - hassio_user_client, + hassio_user_client: TestClient, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -105,7 +109,7 @@ async def test_forward_request_onboarded_user_unallowed_paths( ], ) async def test_forward_request_onboarded_noauth_get( - hassio_noauth_client, aioclient_mock: AiohttpClientMocker, path: str + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str ) -> None: """Test fetching normal path.""" aioclient_mock.get(f"http://127.0.0.1/{path}", text="response") @@ -125,7 +129,7 @@ async def test_forward_request_onboarded_noauth_get( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_onboarded_noauth_unallowed_methods( - hassio_noauth_client, aioclient_mock: AiohttpClientMocker, method: str + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_noauth_client.post("/api/hassio/app/entrypoint.js") @@ -152,7 +156,7 @@ async def test_forward_request_onboarded_noauth_unallowed_methods( ], ) async def test_forward_request_onboarded_noauth_unallowed_paths( - hassio_noauth_client, + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -176,7 +180,7 @@ async def test_forward_request_onboarded_noauth_unallowed_paths( ], ) async def test_forward_request_not_onboarded_get( - hassio_noauth_client, + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str, authenticated: bool, @@ -212,7 +216,7 @@ async def test_forward_request_not_onboarded_get( ], ) async def test_forward_request_not_onboarded_post( - hassio_noauth_client, + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str, mock_not_onboarded, @@ -238,7 +242,7 @@ async def test_forward_request_not_onboarded_post( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_not_onboarded_unallowed_methods( - hassio_noauth_client, aioclient_mock: AiohttpClientMocker, method: str + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_noauth_client.post("/api/hassio/app/entrypoint.js") @@ -265,7 +269,7 @@ async def test_forward_request_not_onboarded_unallowed_methods( ], ) async def test_forward_request_not_onboarded_unallowed_paths( - hassio_noauth_client, + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -294,7 +298,7 @@ async def test_forward_request_not_onboarded_unallowed_paths( ], ) async def test_forward_request_admin_get( - hassio_client, + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str, authenticated: bool, @@ -329,7 +333,7 @@ async def test_forward_request_admin_get( ], ) async def test_forward_request_admin_post( - hassio_client, + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str, ) -> None: @@ -354,7 +358,7 @@ async def test_forward_request_admin_post( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_admin_unallowed_methods( - hassio_client, aioclient_mock: AiohttpClientMocker, method: str + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_client.post("/api/hassio/app/entrypoint.js") @@ -379,7 +383,7 @@ async def test_forward_request_admin_unallowed_methods( ], ) async def test_forward_request_admin_unallowed_paths( - hassio_client, + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -394,7 +398,7 @@ async def test_forward_request_admin_unallowed_paths( async def test_bad_gateway_when_cannot_find_supervisor( - hassio_client, aioclient_mock: AiohttpClientMocker + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker ) -> None: """Test we get a bad gateway error if we can't find supervisor.""" aioclient_mock.get("http://127.0.0.1/app/entrypoint.js", exc=TimeoutError) @@ -404,9 +408,8 @@ async def test_bad_gateway_when_cannot_find_supervisor( async def test_backup_upload_headers( - hassio_client, + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, - caplog: pytest.LogCaptureFixture, mock_not_onboarded, ) -> None: """Test that we forward the full header for backup upload.""" @@ -427,7 +430,7 @@ async def test_backup_upload_headers( async def test_backup_download_headers( - hassio_client, aioclient_mock: AiohttpClientMocker, mock_not_onboarded + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, mock_not_onboarded ) -> None: """Test that we forward the full header for backup download.""" content_disposition = "attachment; filename=test.tar" @@ -449,7 +452,9 @@ async def test_backup_download_headers( assert resp.headers["Content-Disposition"] == content_disposition -async def test_stream(hassio_client, aioclient_mock: AiohttpClientMocker) -> None: +async def test_stream( + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker +) -> None: """Verify that the request is a stream.""" content_type = "multipart/form-data; boundary='--webkit'" aioclient_mock.post("http://127.0.0.1/backups/new/upload") @@ -462,7 +467,7 @@ async def test_stream(hassio_client, aioclient_mock: AiohttpClientMocker) -> Non async def test_simple_get_no_stream( - hassio_client, aioclient_mock: AiohttpClientMocker + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker ) -> None: """Verify that a simple GET request is not a stream.""" aioclient_mock.get("http://127.0.0.1/app/entrypoint.js") @@ -472,7 +477,7 @@ async def test_simple_get_no_stream( async def test_entrypoint_cache_control( - hassio_client, aioclient_mock: AiohttpClientMocker + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker ) -> None: """Test that we return cache control for requests to the entrypoint only.""" aioclient_mock.get("http://127.0.0.1/app/entrypoint.js") diff --git a/tests/components/hassio/test_init.py b/tests/components/hassio/test_init.py index 0246b557ee4..d71e8acfbe0 100644 --- a/tests/components/hassio/test_init.py +++ b/tests/components/hassio/test_init.py @@ -486,7 +486,8 @@ async def test_warn_when_cannot_connect( assert "Not connected with the supervisor / system too busy!" in caplog.text -async def test_service_register(hassio_env, hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("hassio_env") +async def test_service_register(hass: HomeAssistant) -> None: """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") @@ -717,8 +718,9 @@ async def test_addon_service_call_with_complex_slug( await hass.services.async_call("hassio", "addon_start", {"addon": "test.a_1-2"}) +@pytest.mark.usefixtures("hassio_env") async def test_service_calls_core( - hassio_env, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -1116,8 +1118,9 @@ async def test_setup_hardware_integration( assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_get_store_addon_info( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test get store add-on info from Supervisor API.""" aioclient_mock.clear_requests() diff --git a/tests/components/hassio/test_issues.py b/tests/components/hassio/test_issues.py index ff0e4a8dd92..1a3d3d83f95 100644 --- a/tests/components/hassio/test_issues.py +++ b/tests/components/hassio/test_issues.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from datetime import timedelta from http import HTTPStatus import os @@ -22,13 +23,13 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -async def setup_repairs(hass): +async def setup_repairs(hass: HomeAssistant) -> None: """Set up the repairs integration.""" assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) @pytest.fixture(autouse=True) -async def fixture_supervisor_environ(): +def fixture_supervisor_environ() -> Generator[None]: """Mock os environ for supervisor.""" with patch.dict(os.environ, MOCK_ENVIRON): yield @@ -40,7 +41,7 @@ def mock_resolution_info( unhealthy: list[str] | None = None, issues: list[dict[str, str]] | None = None, suggestion_result: str = "ok", -): +) -> None: """Mock resolution/info endpoint with unsupported/unhealthy reasons and/or issues.""" aioclient_mock.get( "http://127.0.0.1/resolution/info", @@ -80,7 +81,9 @@ def mock_resolution_info( ) -def assert_repair_in_list(issues: list[dict[str, Any]], unhealthy: bool, reason: str): +def assert_repair_in_list( + issues: list[dict[str, Any]], unhealthy: bool, reason: str +) -> None: """Assert repair for unhealthy/unsupported in list.""" repair_type = "unhealthy" if unhealthy else "unsupported" assert { @@ -108,7 +111,7 @@ def assert_issue_repair_in_list( *, reference: str | None = None, placeholders: dict[str, str] | None = None, -): +) -> None: """Assert repair for unhealthy/unsupported in list.""" if reference: placeholders = (placeholders or {}) | {"reference": reference} @@ -128,11 +131,11 @@ def assert_issue_repair_in_list( } in issues +@pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test issues added for unhealthy systems.""" mock_resolution_info(aioclient_mock, unhealthy=["docker", "setup"]) @@ -150,11 +153,11 @@ async def test_unhealthy_issues( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="setup") +@pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test issues added for unsupported systems.""" mock_resolution_info(aioclient_mock, unsupported=["content_trust", "os"]) @@ -174,11 +177,11 @@ async def test_unsupported_issues( assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") +@pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues_add_remove( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test unhealthy issues added and removed from dispatches.""" mock_resolution_info(aioclient_mock) @@ -231,11 +234,11 @@ async def test_unhealthy_issues_add_remove( assert msg["result"] == {"issues": []} +@pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues_add_remove( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test unsupported issues added and removed from dispatches.""" mock_resolution_info(aioclient_mock) @@ -288,11 +291,11 @@ async def test_unsupported_issues_add_remove( assert msg["result"] == {"issues": []} +@pytest.mark.usefixtures("all_setup_requests") async def test_reset_issues_supervisor_restart( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """All issues reset on supervisor restart.""" mock_resolution_info( @@ -352,11 +355,11 @@ async def test_reset_issues_supervisor_restart( assert msg["result"] == {"issues": []} +@pytest.mark.usefixtures("all_setup_requests") async def test_reasons_added_and_removed( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test an unsupported/unhealthy reasons being added and removed at same time.""" mock_resolution_info(aioclient_mock, unsupported=["os"], unhealthy=["docker"]) @@ -402,11 +405,11 @@ async def test_reasons_added_and_removed( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_ignored_unsupported_skipped( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Unsupported reasons which have an identical unhealthy reason are ignored.""" mock_resolution_info( @@ -425,11 +428,11 @@ async def test_ignored_unsupported_skipped( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="privileged") +@pytest.mark.usefixtures("all_setup_requests") async def test_new_unsupported_unhealthy_reason( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """New unsupported/unhealthy reasons result in a generic repair until next core update.""" mock_resolution_info( @@ -475,11 +478,11 @@ async def test_new_unsupported_unhealthy_reason( } in msg["result"]["issues"] +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test repairs added for supervisor issue.""" mock_resolution_info( @@ -541,12 +544,12 @@ async def test_supervisor_issues( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_initial_failure( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, - all_setup_requests, ) -> None: """Test issues manager retries after initial update failure.""" responses = [ @@ -619,11 +622,11 @@ async def test_supervisor_issues_initial_failure( assert len(msg["result"]["issues"]) == 1 +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_add_remove( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test supervisor issues added and removed from dispatches.""" mock_resolution_info(aioclient_mock) @@ -730,11 +733,11 @@ async def test_supervisor_issues_add_remove( assert msg["result"] == {"issues": []} +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_suggestions_fail( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test failing to get suggestions for issue skips it.""" aioclient_mock.get( @@ -776,11 +779,11 @@ async def test_supervisor_issues_suggestions_fail( assert len(msg["result"]["issues"]) == 0 +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_remove_missing_issue_without_error( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test HA skips message to remove issue that it didn't know about (sync issue).""" mock_resolution_info(aioclient_mock) @@ -810,11 +813,11 @@ async def test_supervisor_remove_missing_issue_without_error( await hass.async_block_till_done() +@pytest.mark.usefixtures("all_setup_requests") async def test_system_is_not_ready( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - all_setup_requests, ) -> None: """Ensure hassio starts despite error.""" aioclient_mock.get( @@ -832,11 +835,11 @@ async def test_system_is_not_ready( @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_detached_addon_missing( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test supervisor issue for detached addon due to missing repository.""" mock_resolution_info(aioclient_mock) diff --git a/tests/components/hassio/test_repairs.py b/tests/components/hassio/test_repairs.py index 8d0bbfac87c..907529ec9c4 100644 --- a/tests/components/hassio/test_repairs.py +++ b/tests/components/hassio/test_repairs.py @@ -1,5 +1,6 @@ """Test supervisor repairs.""" +from collections.abc import Generator from http import HTTPStatus import os from unittest.mock import patch @@ -18,18 +19,18 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -async def fixture_supervisor_environ(): +def fixture_supervisor_environ() -> Generator[None]: """Mock os environ for supervisor.""" with patch.dict(os.environ, MOCK_ENVIRON): yield +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( @@ -103,12 +104,12 @@ async def test_supervisor_issue_repair_flow( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue with multiple suggestions.""" mock_resolution_info( @@ -197,12 +198,12 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confirmation( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue with multiple suggestions and choice requires confirmation.""" mock_resolution_info( @@ -310,12 +311,12 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confir ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_skip_confirmation( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test confirmation skipped for fix flow for supervisor issue with one suggestion.""" mock_resolution_info( @@ -389,12 +390,12 @@ async def test_supervisor_issue_repair_flow_skip_confirmation( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow_error( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test repair flow fails when repair fails to apply.""" mock_resolution_info( @@ -461,12 +462,12 @@ async def test_mount_failed_repair_flow_error( assert issue_registry.async_get_issue(domain="hassio", issue_id="1234") +@pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test repair flow for mount_failed issue.""" mock_resolution_info( @@ -562,12 +563,12 @@ async def test_mount_failed_repair_flow( @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_docker_config_repair_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( @@ -669,12 +670,12 @@ async def test_supervisor_issue_docker_config_repair_flow( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_multiple_data_disks( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for multiple data disks supervisor issue.""" mock_resolution_info( @@ -785,12 +786,12 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_detached_addon_removed( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( diff --git a/tests/components/hassio/test_websocket_api.py b/tests/components/hassio/test_websocket_api.py index f3be391d9b7..7d8f07bfaec 100644 --- a/tests/components/hassio/test_websocket_api.py +++ b/tests/components/hassio/test_websocket_api.py @@ -79,8 +79,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: ) +@pytest.mark.usefixtures("hassio_env") async def test_ws_subscription( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test websocket subscription.""" assert await async_setup_component(hass, "hassio", {}) @@ -116,8 +117,8 @@ async def test_ws_subscription( assert response["success"] +@pytest.mark.usefixtures("hassio_env") async def test_websocket_supervisor_api( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, @@ -160,8 +161,8 @@ async def test_websocket_supervisor_api( } +@pytest.mark.usefixtures("hassio_env") async def test_websocket_supervisor_api_error( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, @@ -189,8 +190,8 @@ async def test_websocket_supervisor_api_error( assert msg["error"]["message"] == "example error" +@pytest.mark.usefixtures("hassio_env") async def test_websocket_supervisor_api_error_without_msg( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, @@ -218,8 +219,8 @@ async def test_websocket_supervisor_api_error_without_msg( assert msg["error"]["message"] == "" +@pytest.mark.usefixtures("hassio_env") async def test_websocket_non_admin_user( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, diff --git a/tests/components/hddtemp/test_sensor.py b/tests/components/hddtemp/test_sensor.py index 2bd0519c12c..15740ffa0ea 100644 --- a/tests/components/hddtemp/test_sensor.py +++ b/tests/components/hddtemp/test_sensor.py @@ -60,7 +60,7 @@ REFERENCE = { class TelnetMock: """Mock class for the telnetlib.Telnet object.""" - def __init__(self, host, port, timeout=0): + def __init__(self, host, port, timeout=0) -> None: """Initialize Telnet object.""" self.host = host self.port = port diff --git a/tests/components/hdmi_cec/__init__.py b/tests/components/hdmi_cec/__init__.py index 5cf8ed18b6a..1d51fa0cc50 100644 --- a/tests/components/hdmi_cec/__init__.py +++ b/tests/components/hdmi_cec/__init__.py @@ -8,7 +8,7 @@ from homeassistant.components.hdmi_cec import KeyPressCommand, KeyReleaseCommand class MockHDMIDevice: """Mock of a HDMIDevice.""" - def __init__(self, *, logical_address, **values): + def __init__(self, *, logical_address, **values) -> None: """Mock of a HDMIDevice.""" self.set_update_callback = Mock(side_effect=self._set_update_callback) self.logical_address = logical_address diff --git a/tests/components/hdmi_cec/conftest.py b/tests/components/hdmi_cec/conftest.py index 0756ea639b7..058525f2448 100644 --- a/tests/components/hdmi_cec/conftest.py +++ b/tests/components/hdmi_cec/conftest.py @@ -1,16 +1,22 @@ """Tests for the HDMI-CEC component.""" -from unittest.mock import patch +from collections.abc import Callable, Coroutine, Generator +from typing import Any +from unittest.mock import MagicMock, patch import pytest from homeassistant.components.hdmi_cec import DOMAIN from homeassistant.const import EVENT_HOMEASSISTANT_START +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +type CecEntityCreator = Callable[..., Coroutine[Any, Any, None]] +type HDMINetworkCreator = Callable[..., Coroutine[Any, Any, MagicMock]] + @pytest.fixture(name="mock_cec_adapter", autouse=True) -def mock_cec_adapter_fixture(): +def mock_cec_adapter_fixture() -> Generator[MagicMock]: """Mock CecAdapter. Always mocked as it imports the `cec` library which is part of `libcec`. @@ -22,7 +28,7 @@ def mock_cec_adapter_fixture(): @pytest.fixture(name="mock_hdmi_network") -def mock_hdmi_network_fixture(): +def mock_hdmi_network_fixture() -> Generator[MagicMock]: """Mock HDMINetwork.""" with patch( "homeassistant.components.hdmi_cec.HDMINetwork", autospec=True @@ -31,7 +37,9 @@ def mock_hdmi_network_fixture(): @pytest.fixture -def create_hdmi_network(hass, mock_hdmi_network): +def create_hdmi_network( + hass: HomeAssistant, mock_hdmi_network: MagicMock +) -> HDMINetworkCreator: """Create an initialized mock hdmi_network.""" async def hdmi_network(config=None): @@ -49,7 +57,7 @@ def create_hdmi_network(hass, mock_hdmi_network): @pytest.fixture -def create_cec_entity(hass): +def create_cec_entity(hass: HomeAssistant) -> CecEntityCreator: """Create a CecEntity.""" async def cec_entity(hdmi_network, device): diff --git a/tests/components/hdmi_cec/test_init.py b/tests/components/hdmi_cec/test_init.py index 1263078c196..1b1861b0ef8 100644 --- a/tests/components/hdmi_cec/test_init.py +++ b/tests/components/hdmi_cec/test_init.py @@ -1,7 +1,9 @@ """Tests for the HDMI-CEC component.""" +from collections.abc import Generator from datetime import timedelta -from unittest.mock import ANY, PropertyMock, call, patch +from typing import Any +from unittest.mock import ANY, MagicMock, PropertyMock, call, patch import pytest import voluptuous as vol @@ -28,6 +30,7 @@ from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow from . import assert_key_press_release +from .conftest import HDMINetworkCreator from tests.common import ( MockEntity, @@ -38,7 +41,7 @@ from tests.common import ( @pytest.fixture(name="mock_tcp_adapter") -def mock_tcp_adapter_fixture(): +def mock_tcp_adapter_fixture() -> Generator[MagicMock]: """Mock TcpAdapter.""" with patch( "homeassistant.components.hdmi_cec.TcpAdapter", autospec=True @@ -88,7 +91,9 @@ def mock_tcp_adapter_fixture(): ), ], ) -def test_parse_mapping_physical_address(mapping, expected) -> None: +def test_parse_mapping_physical_address( + mapping: dict[str, Any], expected: list[tuple[str, list[int]]] +) -> None: """Test the device config mapping function.""" result = parse_mapping(mapping) result = [ @@ -101,7 +106,7 @@ def test_parse_mapping_physical_address(mapping, expected) -> None: async def test_setup_cec_adapter( - hass: HomeAssistant, mock_cec_adapter, mock_hdmi_network + hass: HomeAssistant, mock_cec_adapter: MagicMock, mock_hdmi_network: MagicMock ) -> None: """Test the general setup of this component.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) @@ -125,7 +130,7 @@ async def test_setup_cec_adapter( @pytest.mark.parametrize("osd_name", ["test", "test_a_long_name"]) async def test_setup_set_osd_name( - hass: HomeAssistant, osd_name, mock_cec_adapter + hass: HomeAssistant, osd_name: str, mock_cec_adapter: MagicMock ) -> None: """Test the setup of this component with the `osd_name` config setting.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {"osd_name": osd_name}}) @@ -134,7 +139,7 @@ async def test_setup_set_osd_name( async def test_setup_tcp_adapter( - hass: HomeAssistant, mock_tcp_adapter, mock_hdmi_network + hass: HomeAssistant, mock_tcp_adapter: MagicMock, mock_hdmi_network: MagicMock ) -> None: """Test the setup of this component with the TcpAdapter (`host` config setting).""" host = "0.0.0.0" @@ -161,7 +166,9 @@ async def test_setup_tcp_adapter( # Test services -async def test_service_power_on(hass: HomeAssistant, create_hdmi_network) -> None: +async def test_service_power_on( + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator +) -> None: """Test the power on service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -175,7 +182,9 @@ async def test_service_power_on(hass: HomeAssistant, create_hdmi_network) -> Non mock_hdmi_network_instance.power_on.assert_called_once_with() -async def test_service_standby(hass: HomeAssistant, create_hdmi_network) -> None: +async def test_service_standby( + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator +) -> None: """Test the standby service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -190,7 +199,7 @@ async def test_service_standby(hass: HomeAssistant, create_hdmi_network) -> None async def test_service_select_device_alias( - hass: HomeAssistant, create_hdmi_network + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator ) -> None: """Test the select device service call with a known alias.""" mock_hdmi_network_instance = await create_hdmi_network( @@ -220,7 +229,7 @@ class MockCecEntity(MockEntity): async def test_service_select_device_entity( - hass: HomeAssistant, create_hdmi_network + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator ) -> None: """Test the select device service call with an existing entity.""" platform = MockEntityPlatform(hass) @@ -244,7 +253,7 @@ async def test_service_select_device_entity( async def test_service_select_device_physical_address( - hass: HomeAssistant, create_hdmi_network + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator ) -> None: """Test the select device service call with a raw physical address.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -262,7 +271,9 @@ async def test_service_select_device_physical_address( assert str(physical_address) == "1.1.0.0" -async def test_service_update_devices(hass: HomeAssistant, create_hdmi_network) -> None: +async def test_service_update_devices( + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator +) -> None: """Test the update devices service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -295,11 +306,11 @@ async def test_service_update_devices(hass: HomeAssistant, create_hdmi_network) @pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)]) async def test_service_volume_x_times( hass: HomeAssistant, - create_hdmi_network, + create_hdmi_network: HDMINetworkCreator, count: int, call_count: int, - direction, - key, + direction: str, + key: int, ) -> None: """Test the volume service call with steps.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -320,7 +331,10 @@ async def test_service_volume_x_times( @pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)]) async def test_service_volume_press( - hass: HomeAssistant, create_hdmi_network, direction, key + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + direction: str, + key: int, ) -> None: """Test the volume service call with press attribute.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -341,7 +355,10 @@ async def test_service_volume_press( @pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)]) async def test_service_volume_release( - hass: HomeAssistant, create_hdmi_network, direction, key + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + direction: str, + key: int, ) -> None: """Test the volume service call with release attribute.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -376,7 +393,7 @@ async def test_service_volume_release( ], ) async def test_service_volume_mute( - hass: HomeAssistant, create_hdmi_network, attr, key + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator, attr: str, key: int ) -> None: """Test the volume service call with mute.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -447,7 +464,10 @@ async def test_service_volume_mute( ], ) async def test_service_send_command( - hass: HomeAssistant, create_hdmi_network, data, expected + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + data: dict[str, Any], + expected: str, ) -> None: """Test the send command service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -470,10 +490,10 @@ async def test_service_send_command( ) async def test_watchdog( hass: HomeAssistant, - create_hdmi_network, - mock_cec_adapter, - adapter_initialized_value, - watchdog_actions, + create_hdmi_network: HDMINetworkCreator, + mock_cec_adapter: MagicMock, + adapter_initialized_value: bool, + watchdog_actions: int, ) -> None: """Test the watchdog when adapter is down/up.""" adapter_initialized = PropertyMock(return_value=adapter_initialized_value) diff --git a/tests/components/hdmi_cec/test_media_player.py b/tests/components/hdmi_cec/test_media_player.py index 988279a235f..f193651c305 100644 --- a/tests/components/hdmi_cec/test_media_player.py +++ b/tests/components/hdmi_cec/test_media_player.py @@ -1,6 +1,7 @@ """Tests for the HDMI-CEC media player platform.""" from collections.abc import Callable +from typing import Any from pycec.const import ( DEVICE_TYPE_NAMES, @@ -55,6 +56,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from . import MockHDMIDevice, assert_key_press_release +from .conftest import CecEntityCreator, HDMINetworkCreator type AssertState = Callable[[str, str], None] @@ -91,7 +93,9 @@ def assert_state_fixture(request: pytest.FixtureRequest) -> AssertState: async def test_load_platform( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test that media_player entity is loaded.""" hdmi_network = await create_hdmi_network(config={"platform": "media_player"}) @@ -107,7 +111,10 @@ async def test_load_platform( @pytest.mark.parametrize("platform", [{}, {"platform": "switch"}]) async def test_load_types( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, platform + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + platform: dict[str, Any], ) -> None: """Test that media_player entity is loaded when types is set.""" config = platform | {"types": {"hdmi_cec.hdmi_4": "media_player"}} @@ -133,8 +140,8 @@ async def test_load_types( async def test_service_on( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, assert_state: AssertState, ) -> None: """Test that media_player triggers on `on` service.""" @@ -160,8 +167,8 @@ async def test_service_on( async def test_service_off( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, assert_state: AssertState, ) -> None: """Test that media_player triggers on `off` service.""" @@ -260,10 +267,10 @@ async def test_service_off( ) async def test_supported_features( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, - type_id, - expected_features, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + type_id: int, + expected_features: MPEF, ) -> None: """Test that features load as expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -289,11 +296,11 @@ async def test_supported_features( ) async def test_volume_services( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, - service, - extra_data, - key, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + service: str, + extra_data: dict[str, Any] | None, + key: int, ) -> None: """Test volume related commands.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -324,7 +331,11 @@ async def test_volume_services( ], ) async def test_track_change_services( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, service, key + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + service: str, + key: int, ) -> None: """Test track change related commands.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -360,8 +371,8 @@ async def test_track_change_services( ) async def test_playback_services( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, assert_state: AssertState, service: str, key: int, @@ -390,8 +401,8 @@ async def test_playback_services( @pytest.mark.xfail(reason="PLAY feature isn't enabled") async def test_play_pause_service( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, assert_state: AssertState, ) -> None: """Test play pause service.""" @@ -452,11 +463,11 @@ async def test_play_pause_service( ) async def test_update_state( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, - type_id, - update_data, - expected_state, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + type_id: int, + update_data: dict[str, Any], + expected_state: str, ) -> None: """Test state updates work as expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -502,7 +513,11 @@ async def test_update_state( ], ) async def test_starting_state( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, data, expected_state + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + data: dict[str, Any], + expected_state: str, ) -> None: """Test starting states are set as expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -516,7 +531,9 @@ async def test_starting_state( reason="The code only sets the state to unavailable, doesn't set the `_attr_available` to false." ) async def test_unavailable_status( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test entity goes into unavailable status when expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) diff --git a/tests/components/hdmi_cec/test_switch.py b/tests/components/hdmi_cec/test_switch.py index d54d6cc103b..6ef6ce835ce 100644 --- a/tests/components/hdmi_cec/test_switch.py +++ b/tests/components/hdmi_cec/test_switch.py @@ -17,11 +17,15 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from . import MockHDMIDevice +from .conftest import CecEntityCreator, HDMINetworkCreator @pytest.mark.parametrize("config", [{}, {"platform": "switch"}]) async def test_load_platform( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, config + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + config, ) -> None: """Test that switch entity is loaded.""" hdmi_network = await create_hdmi_network(config=config) @@ -36,7 +40,9 @@ async def test_load_platform( async def test_load_types( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test that switch entity is loaded when types is set.""" config = {"platform": "media_player", "types": {"hdmi_cec.hdmi_3": "switch"}} @@ -61,7 +67,9 @@ async def test_load_types( async def test_service_on( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test that switch triggers on `on` service.""" hdmi_network = await create_hdmi_network() @@ -81,7 +89,9 @@ async def test_service_on( async def test_service_off( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test that switch triggers on `off` service.""" hdmi_network = await create_hdmi_network() @@ -118,8 +128,8 @@ async def test_service_off( ) async def test_device_status_change( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, power_status, expected_state, status, @@ -154,7 +164,11 @@ async def test_device_status_change( ], ) async def test_friendly_name( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, device_values, expected + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + device_values, + expected, ) -> None: """Test friendly name setup.""" hdmi_network = await create_hdmi_network() @@ -207,8 +221,8 @@ async def test_friendly_name( ) async def test_extra_state_attributes( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, device_values, expected_attributes, ) -> None: @@ -239,8 +253,8 @@ async def test_extra_state_attributes( ) async def test_icon( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, device_type, expected_icon, ) -> None: @@ -254,7 +268,9 @@ async def test_icon( async def test_unavailable_status( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test entity goes into unavailable status when expected.""" hdmi_network = await create_hdmi_network() diff --git a/tests/components/here_travel_time/test_config_flow.py b/tests/components/here_travel_time/test_config_flow.py index eb958991c71..ea3de64ed0c 100644 --- a/tests/components/here_travel_time/test_config_flow.py +++ b/tests/components/here_travel_time/test_config_flow.py @@ -6,17 +6,20 @@ from here_routing import HERERoutingError, HERERoutingUnauthorizedError import pytest from homeassistant import config_entries +from homeassistant.components.here_travel_time.config_flow import DEFAULT_OPTIONS from homeassistant.components.here_travel_time.const import ( CONF_ARRIVAL_TIME, CONF_DEPARTURE_TIME, CONF_DESTINATION_ENTITY_ID, CONF_DESTINATION_LATITUDE, CONF_DESTINATION_LONGITUDE, + CONF_ORIGIN_ENTITY_ID, CONF_ORIGIN_LATITUDE, CONF_ORIGIN_LONGITUDE, CONF_ROUTE_MODE, DOMAIN, ROUTE_MODE_FASTEST, + TRAVEL_MODE_BICYCLE, TRAVEL_MODE_CAR, TRAVEL_MODE_PUBLIC, ) @@ -47,7 +50,9 @@ def bypass_setup_fixture(): @pytest.fixture(name="user_step_result") -async def user_step_result_fixture(hass: HomeAssistant) -> FlowResultType: +async def user_step_result_fixture( + hass: HomeAssistant, +) -> config_entries.ConfigFlowResult: """Provide the result of a completed user step.""" init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -65,7 +70,9 @@ async def user_step_result_fixture(hass: HomeAssistant) -> FlowResultType: @pytest.fixture(name="option_init_result") -async def option_init_result_fixture(hass: HomeAssistant) -> FlowResultType: +async def option_init_result_fixture( + hass: HomeAssistant, +) -> config_entries.ConfigFlowResult: """Provide the result of a completed options init step.""" entry = MockConfigEntry( domain=DOMAIN, @@ -94,8 +101,8 @@ async def option_init_result_fixture(hass: HomeAssistant) -> FlowResultType: @pytest.fixture(name="origin_step_result") async def origin_step_result_fixture( - hass: HomeAssistant, user_step_result: FlowResultType -) -> FlowResultType: + hass: HomeAssistant, user_step_result: config_entries.ConfigFlowResult +) -> config_entries.ConfigFlowResult: """Provide the result of a completed origin by coordinates step.""" origin_menu_result = await hass.config_entries.flow.async_configure( user_step_result["flow_id"], {"next_step_id": "origin_coordinates"} @@ -142,7 +149,7 @@ async def test_step_user(hass: HomeAssistant, menu_options) -> None: @pytest.mark.usefixtures("valid_response") async def test_step_origin_coordinates( - hass: HomeAssistant, user_step_result: FlowResultType + hass: HomeAssistant, user_step_result: config_entries.ConfigFlowResult ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -165,7 +172,7 @@ async def test_step_origin_coordinates( @pytest.mark.usefixtures("valid_response") async def test_step_origin_entity( - hass: HomeAssistant, user_step_result: FlowResultType + hass: HomeAssistant, user_step_result: config_entries.ConfigFlowResult ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -182,7 +189,7 @@ async def test_step_origin_entity( @pytest.mark.usefixtures("valid_response") async def test_step_destination_coordinates( - hass: HomeAssistant, origin_step_result: FlowResultType + hass: HomeAssistant, origin_step_result: config_entries.ConfigFlowResult ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -216,7 +223,7 @@ async def test_step_destination_coordinates( @pytest.mark.usefixtures("valid_response") async def test_step_destination_entity( hass: HomeAssistant, - origin_step_result: FlowResultType, + origin_step_result: config_entries.ConfigFlowResult, ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -245,6 +252,105 @@ async def test_step_destination_entity( } +@pytest.mark.usefixtures("valid_response") +async def test_reconfigure_destination_entity(hass: HomeAssistant) -> None: + """Test reconfigure flow when choosing a destination entity.""" + origin_entity_selector_result = await do_common_reconfiguration_steps(hass) + menu_result = await hass.config_entries.flow.async_configure( + origin_entity_selector_result["flow_id"], {"next_step_id": "destination_entity"} + ) + assert menu_result["type"] is FlowResultType.FORM + + destination_entity_selector_result = await hass.config_entries.flow.async_configure( + menu_result["flow_id"], + {"destination_entity_id": "zone.home"}, + ) + assert destination_entity_selector_result["type"] is FlowResultType.ABORT + assert destination_entity_selector_result["reason"] == "reconfigure_successful" + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.data == { + CONF_NAME: "test", + CONF_API_KEY: API_KEY, + CONF_ORIGIN_ENTITY_ID: "zone.home", + CONF_DESTINATION_ENTITY_ID: "zone.home", + CONF_MODE: TRAVEL_MODE_BICYCLE, + } + + +@pytest.mark.usefixtures("valid_response") +async def test_reconfigure_destination_coordinates(hass: HomeAssistant) -> None: + """Test reconfigure flow when choosing destination coordinates.""" + origin_entity_selector_result = await do_common_reconfiguration_steps(hass) + menu_result = await hass.config_entries.flow.async_configure( + origin_entity_selector_result["flow_id"], + {"next_step_id": "destination_coordinates"}, + ) + assert menu_result["type"] is FlowResultType.FORM + + destination_entity_selector_result = await hass.config_entries.flow.async_configure( + menu_result["flow_id"], + { + "destination": { + "latitude": 43.0, + "longitude": -80.3, + "radius": 5.0, + } + }, + ) + assert destination_entity_selector_result["type"] is FlowResultType.ABORT + assert destination_entity_selector_result["reason"] == "reconfigure_successful" + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.data == { + CONF_NAME: "test", + CONF_API_KEY: API_KEY, + CONF_ORIGIN_ENTITY_ID: "zone.home", + CONF_DESTINATION_LATITUDE: 43.0, + CONF_DESTINATION_LONGITUDE: -80.3, + CONF_MODE: TRAVEL_MODE_BICYCLE, + } + + +async def do_common_reconfiguration_steps(hass: HomeAssistant) -> None: + """Walk through common flow steps for reconfiguring.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="0123456789", + data=DEFAULT_CONFIG, + options=DEFAULT_OPTIONS, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + reconfigure_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "user" + + user_step_result = await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + { + CONF_API_KEY: API_KEY, + CONF_MODE: TRAVEL_MODE_BICYCLE, + CONF_NAME: "test", + }, + ) + await hass.async_block_till_done() + menu_result = await hass.config_entries.flow.async_configure( + user_step_result["flow_id"], {"next_step_id": "origin_entity"} + ) + return await hass.config_entries.flow.async_configure( + menu_result["flow_id"], + {"origin_entity_id": "zone.home"}, + ) + + async def test_form_invalid_auth(hass: HomeAssistant) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( @@ -322,7 +428,7 @@ async def test_options_flow(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("valid_response") async def test_options_flow_arrival_time_step( - hass: HomeAssistant, option_init_result: FlowResultType + hass: HomeAssistant, option_init_result: config_entries.ConfigFlowResult ) -> None: """Test the options flow arrival time type.""" menu_result = await hass.config_entries.options.async_configure( @@ -346,7 +452,7 @@ async def test_options_flow_arrival_time_step( @pytest.mark.usefixtures("valid_response") async def test_options_flow_departure_time_step( - hass: HomeAssistant, option_init_result: FlowResultType + hass: HomeAssistant, option_init_result: config_entries.ConfigFlowResult ) -> None: """Test the options flow departure time type.""" menu_result = await hass.config_entries.options.async_configure( @@ -370,7 +476,7 @@ async def test_options_flow_departure_time_step( @pytest.mark.usefixtures("valid_response") async def test_options_flow_no_time_step( - hass: HomeAssistant, option_init_result: FlowResultType + hass: HomeAssistant, option_init_result: config_entries.ConfigFlowResult ) -> None: """Test the options flow arrival time type.""" menu_result = await hass.config_entries.options.async_configure( diff --git a/tests/components/history/conftest.py b/tests/components/history/conftest.py index 075909dfd63..dd10fccccdc 100644 --- a/tests/components/history/conftest.py +++ b/tests/components/history/conftest.py @@ -13,7 +13,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/history/test_websocket_api.py b/tests/components/history/test_websocket_api.py index e5c33d0e7af..717840c6b05 100644 --- a/tests/components/history/test_websocket_api.py +++ b/tests/components/history/test_websocket_api.py @@ -2,7 +2,7 @@ import asyncio from datetime import timedelta -from unittest.mock import patch +from unittest.mock import ANY, patch from freezegun import freeze_time import pytest @@ -10,8 +10,9 @@ import pytest from homeassistant.components import history from homeassistant.components.history import websocket_api from homeassistant.components.recorder import Recorder -from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE -from homeassistant.core import HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.event import async_track_state_change_event from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -2072,3 +2073,84 @@ async def test_history_stream_historical_only_with_start_time_state_past( "id": 1, "type": "event", } + + +async def test_history_stream_live_chained_events( + hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator +) -> None: + """Test history stream with history with a chained event.""" + now = dt_util.utcnow() + await async_setup_component(hass, "history", {}) + + await async_wait_recording_done(hass) + hass.states.async_set("binary_sensor.is_light", STATE_OFF) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "history/stream", + "entity_ids": ["binary_sensor.is_light"], + "start_time": now.isoformat(), + "include_start_time_state": True, + "significant_changes_only": False, + "no_attributes": False, + "minimal_response": True, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 1 + assert response["type"] == "result" + + response = await client.receive_json() + + assert response == { + "event": { + "end_time": ANY, + "start_time": ANY, + "states": { + "binary_sensor.is_light": [ + { + "a": {}, + "lu": ANY, + "s": STATE_OFF, + }, + ], + }, + }, + "id": 1, + "type": "event", + } + + await async_recorder_block_till_done(hass) + + @callback + def auto_off_listener(event): + hass.states.async_set("binary_sensor.is_light", STATE_OFF) + + async_track_state_change_event(hass, ["binary_sensor.is_light"], auto_off_listener) + + hass.states.async_set("binary_sensor.is_light", STATE_ON) + + response = await client.receive_json() + assert response == { + "event": { + "states": { + "binary_sensor.is_light": [ + { + "lu": ANY, + "s": STATE_ON, + "a": {}, + }, + { + "lu": ANY, + "s": STATE_OFF, + "a": {}, + }, + ], + }, + }, + "id": 1, + "type": "event", + } diff --git a/tests/components/history_stats/conftest.py b/tests/components/history_stats/conftest.py new file mode 100644 index 00000000000..f8075179e94 --- /dev/null +++ b/tests/components/history_stats/conftest.py @@ -0,0 +1,93 @@ +"""Fixtures for the History stats integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from datetime import timedelta +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.history_stats.const import ( + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers.entity_component import async_update_entity +from homeassistant.util import dt as dt_util + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically patch history stats setup.""" + with patch( + "homeassistant.components.history_stats.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + } + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the History stats integration in Home Assistant.""" + start_time = dt_util.utcnow() - timedelta(minutes=60) + t0 = start_time + timedelta(minutes=20) + t1 = t0 + timedelta(minutes=10) + t2 = t1 + timedelta(minutes=10) + + def _fake_states(*args, **kwargs): + return { + "binary_sensor.test_monitored": [ + State("binary_sensor.test_monitored", "off", last_changed=start_time), + State("binary_sensor.test_monitored", "on", last_changed=t0), + State("binary_sensor.test_monitored", "off", last_changed=t1), + State("binary_sensor.test_monitored", "on", last_changed=t2), + ] + } + + config_entry = MockConfigEntry( + domain=DOMAIN, + title=DEFAULT_NAME, + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states, + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + await async_update_entity(hass, "sensor.test") + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/history_stats/test_config_flow.py b/tests/components/history_stats/test_config_flow.py new file mode 100644 index 00000000000..a695a06995e --- /dev/null +++ b/tests/components/history_stats/test_config_flow.py @@ -0,0 +1,195 @@ +"""Test the History stats config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant import config_entries +from homeassistant.components.history_stats.const import ( + CONF_DURATION, + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.components.recorder import Recorder +from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form( + recorder_mock: Recorder, hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow( + recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_END: "{{ utcnow() }}", + CONF_DURATION: {"hours": 8, "minutes": 0, "seconds": 0, "days": 20}, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_END: "{{ utcnow() }}", + CONF_DURATION: {"hours": 8, "minutes": 0, "seconds": 0, "days": 20}, + } + + await hass.async_block_till_done() + + # Check the entity was updated, no new entity was created + assert len(hass.states.async_all()) == 1 + + state = hass.states.get("sensor.unnamed_statistics") + assert state is not None + + +async def test_validation_options( + recorder_mock: Recorder, hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test validation.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + CONF_DURATION: {"hours": 8, "minutes": 0, "seconds": 0, "days": 20}, + }, + ) + await hass.async_block_till_done() + + assert result["step_id"] == "options" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "only_two_keys_allowed"} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_entry_already_exist( + recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + }, + ) + await hass.async_block_till_done() + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/history_stats/test_init.py b/tests/components/history_stats/test_init.py new file mode 100644 index 00000000000..4cd999ba31c --- /dev/null +++ b/tests/components/history_stats/test_init.py @@ -0,0 +1,118 @@ +"""Test History stats component setup process.""" + +from __future__ import annotations + +from homeassistant.components.history_stats.const import ( + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN as HISTORY_STATS_DOMAIN, +) +from homeassistant.components.recorder import Recorder +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_unload_entry( + recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device_cleaning( + recorder_mock: Recorder, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the cleaning of devices linked to the helper History stats.""" + + # Source entity device config entry + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + + # Device entry of the source entity + source_device1_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("binary_sensor", "identifier_test1")}, + connections={("mac", "30:31:32:33:34:01")}, + ) + + # Source entity registry + source_entity = entity_registry.async_get_or_create( + "binary_sensor", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device1_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("binary_sensor.test_source") is not None + + # Configure the configuration entry for History stats + history_stats_config_entry = MockConfigEntry( + data={}, + domain=HISTORY_STATS_DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_source", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + title="History stats", + ) + history_stats_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(history_stats_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the History stats sensor + history_stats_entity = entity_registry.async_get("sensor.history_stats") + assert history_stats_entity is not None + assert history_stats_entity.device_id == source_entity.device_id + + # Device entry incorrectly linked to History stats config entry + device_registry.async_get_or_create( + config_entry_id=history_stats_config_entry.entry_id, + identifiers={("sensor", "identifier_test2")}, + connections={("mac", "30:31:32:33:34:02")}, + ) + device_registry.async_get_or_create( + config_entry_id=history_stats_config_entry.entry_id, + identifiers={("sensor", "identifier_test3")}, + connections={("mac", "30:31:32:33:34:03")}, + ) + await hass.async_block_till_done() + + # Before reloading the config entry, two devices are expected to be linked + devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( + history_stats_config_entry.entry_id + ) + assert len(devices_before_reload) == 3 + + # Config entry reload + await hass.config_entries.async_reload(history_stats_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the History stats sensor + history_stats_entity = entity_registry.async_get("sensor.history_stats") + assert history_stats_entity is not None + assert history_stats_entity.device_id == source_entity.device_id + + # After reloading the config entry, only one linked device is expected + devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( + history_stats_config_entry.entry_id + ) + assert len(devices_after_reload) == 1 + + assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index c18fb2ff784..f86c04b3e5b 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -8,20 +8,33 @@ import pytest import voluptuous as vol from homeassistant import config as hass_config -from homeassistant.components.history_stats import DOMAIN +from homeassistant.components.history_stats.const import ( + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN, +) from homeassistant.components.history_stats.sensor import ( PLATFORM_SCHEMA as SENSOR_SCHEMA, ) from homeassistant.components.recorder import Recorder -from homeassistant.const import ATTR_DEVICE_CLASS, SERVICE_RELOAD, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + CONF_ENTITY_ID, + CONF_NAME, + CONF_STATE, + CONF_TYPE, + SERVICE_RELOAD, + STATE_UNKNOWN, +) import homeassistant.core as ha from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, get_fixture_path +from tests.common import MockConfigEntry, async_fire_time_changed, get_fixture_path from tests.components.recorder.common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator @@ -48,6 +61,15 @@ async def test_setup(recorder_mock: Recorder, hass: HomeAssistant) -> None: assert state.state == "0.0" +async def test_setup_config_entry( + recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test the history statistics sensor setup from a config entry.""" + + state = hass.states.get("sensor.unnamed_statistics") + assert state.state == "2" + + async def test_setup_multiple_states( recorder_mock: Recorder, hass: HomeAssistant ) -> None: @@ -1727,3 +1749,50 @@ async def test_unique_id( entity_registry.async_get("sensor.test").unique_id == "some_history_stats_unique_id" ) + + +async def test_device_id( + recorder_mock: Recorder, + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test for source entity device for History stats.""" + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + source_device_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("sensor", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + source_entity = entity_registry.async_get_or_create( + "binary_sensor", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("binary_sensor.test_source") is not None + + history_stats_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_source", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + title="History stats", + ) + history_stats_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(history_stats_config_entry.entry_id) + await hass.async_block_till_done() + + history_stats_entity = entity_registry.async_get("sensor.history_stats") + assert history_stats_entity is not None + assert history_stats_entity.device_id == source_entity.device_id diff --git a/tests/components/hlk_sw16/test_config_flow.py b/tests/components/hlk_sw16/test_config_flow.py index 6a758ec5066..2225ea1b79a 100644 --- a/tests/components/hlk_sw16/test_config_flow.py +++ b/tests/components/hlk_sw16/test_config_flow.py @@ -12,7 +12,7 @@ from homeassistant.data_entry_flow import FlowResultType class MockSW16Client: """Class to mock the SW16Client client.""" - def __init__(self, fail): + def __init__(self, fail) -> None: """Initialise client with failure modes.""" self.fail = fail self.disconnect_callback = None diff --git a/tests/components/holiday/conftest.py b/tests/components/holiday/conftest.py index 1ac595aa1f9..005756695fe 100644 --- a/tests/components/holiday/conftest.py +++ b/tests/components/holiday/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Holiday tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/holiday/test_calendar.py b/tests/components/holiday/test_calendar.py index b5067a467ed..db58b7b1f73 100644 --- a/tests/components/holiday/test_calendar.py +++ b/tests/components/holiday/test_calendar.py @@ -3,6 +3,7 @@ from datetime import datetime, timedelta from freezegun.api import FrozenDateTimeFactory +import pytest from homeassistant.components.calendar import ( DOMAIN as CALENDAR_DOMAIN, @@ -17,12 +18,18 @@ from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry, async_fire_time_changed +@pytest.mark.parametrize( + "time_zone", ["Asia/Tokyo", "Europe/Berlin", "America/Chicago", "US/Hawaii"] +) async def test_holiday_calendar_entity( hass: HomeAssistant, freezer: FrozenDateTimeFactory, + time_zone: str, ) -> None: """Test HolidayCalendarEntity functionality.""" - freezer.move_to(datetime(2023, 1, 1, 12, tzinfo=dt_util.UTC)) + await hass.config.async_set_time_zone(time_zone) + zone = await dt_util.async_get_time_zone(time_zone) + freezer.move_to(datetime(2023, 1, 1, 0, 1, 1, tzinfo=zone)) # New Years Day config_entry = MockConfigEntry( domain=DOMAIN, @@ -64,8 +71,16 @@ async def test_holiday_calendar_entity( assert state is not None assert state.state == "on" + freezer.move_to( + datetime(2023, 1, 2, 0, 1, 1, tzinfo=zone) + ) # Day after New Years Day + + state = hass.states.get("calendar.united_states_ak") + assert state is not None + assert state.state == "on" + # Test holidays for the next year - freezer.move_to(datetime(2023, 12, 31, 12, tzinfo=dt_util.UTC)) + freezer.move_to(datetime(2023, 12, 31, 12, tzinfo=zone)) response = await hass.services.async_call( CALENDAR_DOMAIN, @@ -91,12 +106,18 @@ async def test_holiday_calendar_entity( } +@pytest.mark.parametrize( + "time_zone", ["Asia/Tokyo", "Europe/Berlin", "America/Chicago", "US/Hawaii"] +) async def test_default_language( hass: HomeAssistant, freezer: FrozenDateTimeFactory, + time_zone: str, ) -> None: """Test default language.""" - freezer.move_to(datetime(2023, 1, 1, 12, tzinfo=dt_util.UTC)) + await hass.config.async_set_time_zone(time_zone) + zone = await dt_util.async_get_time_zone(time_zone) + freezer.move_to(datetime(2023, 1, 1, 12, tzinfo=zone)) config_entry = MockConfigEntry( domain=DOMAIN, @@ -162,12 +183,18 @@ async def test_default_language( } +@pytest.mark.parametrize( + "time_zone", ["Asia/Tokyo", "Europe/Berlin", "America/Chicago", "US/Hawaii"] +) async def test_no_language( hass: HomeAssistant, freezer: FrozenDateTimeFactory, + time_zone: str, ) -> None: """Test language defaults to English if language not exist.""" - freezer.move_to(datetime(2023, 1, 1, 12, tzinfo=dt_util.UTC)) + await hass.config.async_set_time_zone(time_zone) + zone = await dt_util.async_get_time_zone(time_zone) + freezer.move_to(datetime(2023, 1, 1, 12, tzinfo=zone)) config_entry = MockConfigEntry( domain=DOMAIN, @@ -203,12 +230,18 @@ async def test_no_language( } +@pytest.mark.parametrize( + "time_zone", ["Asia/Tokyo", "Europe/Berlin", "America/Chicago", "US/Hawaii"] +) async def test_no_next_event( hass: HomeAssistant, freezer: FrozenDateTimeFactory, + time_zone: str, ) -> None: """Test if there is no next event.""" - freezer.move_to(datetime(2023, 1, 1, 12, tzinfo=dt_util.UTC)) + await hass.config.async_set_time_zone(time_zone) + zone = await dt_util.async_get_time_zone(time_zone) + freezer.move_to(datetime(2023, 1, 1, 12, tzinfo=zone)) config_entry = MockConfigEntry( domain=DOMAIN, @@ -221,7 +254,7 @@ async def test_no_next_event( await hass.async_block_till_done() # Move time to out of reach - freezer.move_to(datetime(dt_util.now().year + 5, 1, 1, 12, tzinfo=dt_util.UTC)) + freezer.move_to(datetime(dt_util.now().year + 5, 1, 1, 12, tzinfo=zone)) async_fire_time_changed(hass) state = hass.states.get("calendar.germany") @@ -230,15 +263,22 @@ async def test_no_next_event( assert state.attributes == {"friendly_name": "Germany"} +@pytest.mark.parametrize( + "time_zone", ["Asia/Tokyo", "Europe/Berlin", "America/Chicago", "US/Hawaii"] +) async def test_language_not_exist( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + time_zone: str, ) -> None: """Test when language doesn't exist it will fallback to country default language.""" + await hass.config.async_set_time_zone(time_zone) + zone = await dt_util.async_get_time_zone(time_zone) hass.config.language = "nb" # Norweigan language "Norks bokmål" hass.config.country = "NO" - freezer.move_to(datetime(2023, 1, 1, 12, tzinfo=dt_util.UTC)) + freezer.move_to(datetime(2023, 1, 1, 12, tzinfo=zone)) config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/home_connect/conftest.py b/tests/components/home_connect/conftest.py index f4c19320826..c8137a044a1 100644 --- a/tests/components/home_connect/conftest.py +++ b/tests/components/home_connect/conftest.py @@ -94,7 +94,7 @@ async def bypass_throttle(hass: HomeAssistant, config_entry: MockConfigEntry): @pytest.fixture(name="bypass_throttle") -def mock_bypass_throttle(): +def mock_bypass_throttle() -> Generator[None]: """Fixture to bypass the throttle decorator in __init__.""" with patch( "homeassistant.components.home_connect.update_all_devices", @@ -122,7 +122,7 @@ async def mock_integration_setup( @pytest.fixture(name="get_appliances") -def mock_get_appliances() -> Generator[None, Any, None]: +def mock_get_appliances() -> Generator[MagicMock]: """Mock ConfigEntryAuth parent (HomeAssistantAPI) method.""" with patch( "homeassistant.components.home_connect.api.ConfigEntryAuth.get_appliances", @@ -152,15 +152,18 @@ def mock_appliance(request: pytest.FixtureRequest) -> MagicMock: @pytest.fixture(name="problematic_appliance") -def mock_problematic_appliance() -> Mock: +def mock_problematic_appliance(request: pytest.FixtureRequest) -> Mock: """Fixture to mock a problematic Appliance.""" app = "Washer" + if hasattr(request, "param") and request.param: + app = request.param + mock = Mock( - spec=HomeConnectAppliance, + autospec=HomeConnectAppliance, **MOCK_APPLIANCES_PROPERTIES.get(app), ) mock.name = app - setattr(mock, "status", {}) + type(mock).status = PropertyMock(return_value={}) mock.get_programs_active.side_effect = HomeConnectError mock.get_programs_available.side_effect = HomeConnectError mock.start_program.side_effect = HomeConnectError diff --git a/tests/components/home_connect/fixtures/programs-available.json b/tests/components/home_connect/fixtures/programs-available.json index b99ee5c6add..bba1a5d2721 100644 --- a/tests/components/home_connect/fixtures/programs-available.json +++ b/tests/components/home_connect/fixtures/programs-available.json @@ -26,7 +26,7 @@ ] } }, - "DishWasher": { + "Dishwasher": { "data": { "programs": [ { diff --git a/tests/components/home_connect/fixtures/settings.json b/tests/components/home_connect/fixtures/settings.json index 5dc0f0e0599..eb6a5f5ff98 100644 --- a/tests/components/home_connect/fixtures/settings.json +++ b/tests/components/home_connect/fixtures/settings.json @@ -95,5 +95,21 @@ } ] } + }, + "Washer": { + "data": { + "settings": [ + { + "key": "BSH.Common.Setting.PowerState", + "value": "BSH.Common.EnumType.PowerState.On", + "type": "BSH.Common.EnumType.PowerState" + }, + { + "key": "BSH.Common.Setting.ChildLock", + "value": false, + "type": "Boolean" + } + ] + } } } diff --git a/tests/components/home_connect/test_binary_sensor.py b/tests/components/home_connect/test_binary_sensor.py index d21aec35045..39502507439 100644 --- a/tests/components/home_connect/test_binary_sensor.py +++ b/tests/components/home_connect/test_binary_sensor.py @@ -1,7 +1,6 @@ """Tests for home_connect binary_sensor entities.""" -from collections.abc import Awaitable, Callable, Generator -from typing import Any +from collections.abc import Awaitable, Callable from unittest.mock import MagicMock, Mock import pytest @@ -26,9 +25,8 @@ def platforms() -> list[str]: return [Platform.BINARY_SENSOR] +@pytest.mark.usefixtures("bypass_throttle") async def test_binary_sensors( - bypass_throttle: Generator[None, Any, None], - hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, @@ -51,10 +49,10 @@ async def test_binary_sensors( ("", "unavailable"), ], ) +@pytest.mark.usefixtures("bypass_throttle") async def test_binary_sensors_door_states( expected: str, state: str, - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], diff --git a/tests/components/home_connect/test_init.py b/tests/components/home_connect/test_init.py index 616a82edebc..02d9bcaa208 100644 --- a/tests/components/home_connect/test_init.py +++ b/tests/components/home_connect/test_init.py @@ -1,16 +1,16 @@ """Test the integration init functionality.""" -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable from typing import Any from unittest.mock import MagicMock, Mock +from freezegun.api import FrozenDateTimeFactory import pytest from requests import HTTPError import requests_mock from homeassistant.components.home_connect.const import DOMAIN, OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -117,8 +117,8 @@ SERVICE_APPLIANCE_METHOD_MAPPING = { } +@pytest.mark.usefixtures("bypass_throttle") async def test_api_setup( - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -137,9 +137,38 @@ async def test_api_setup( assert config_entry.state == ConfigEntryState.NOT_LOADED -async def test_exception_handling( - bypass_throttle: Generator[None, Any, None], +async def test_update_throttle( + appliance: Mock, + freezer: FrozenDateTimeFactory, hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test to check Throttle functionality.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + get_appliances_call_count = get_appliances.call_count + + # First re-load after 1 minute is not blocked. + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state == ConfigEntryState.NOT_LOADED + freezer.tick(60) + assert await hass.config_entries.async_setup(config_entry.entry_id) + assert get_appliances.call_count == get_appliances_call_count + 1 + + # Second re-load is blocked by Throttle. + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state == ConfigEntryState.NOT_LOADED + freezer.tick(59) + assert await hass.config_entries.async_setup(config_entry.entry_id) + assert get_appliances.call_count == get_appliances_call_count + 1 + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_exception_handling( integration_setup: Callable[[], Awaitable[bool]], config_entry: MockConfigEntry, setup_credentials: None, @@ -154,8 +183,8 @@ async def test_exception_handling( @pytest.mark.parametrize("token_expiration_time", [12345]) +@pytest.mark.usefixtures("bypass_throttle") async def test_token_refresh_success( - bypass_throttle: Generator[None, Any, None], integration_setup: Callable[[], Awaitable[bool]], config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, @@ -192,44 +221,8 @@ async def test_token_refresh_success( ) -async def test_setup( - hass: HomeAssistant, - integration_setup: Callable[[], Awaitable[bool]], - config_entry: MockConfigEntry, - setup_credentials: None, -) -> None: - """Test setting up the integration.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - assert await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state == ConfigEntryState.NOT_LOADED - - -async def test_update_throttle( - appliance: Mock, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - platforms: list[Platform], - get_appliances: MagicMock, -) -> None: - """Test to check Throttle functionality.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - assert get_appliances.call_count == 0 - - +@pytest.mark.usefixtures("bypass_throttle") async def test_http_error( - bypass_throttle: Generator[None, Any, None], - hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, @@ -247,9 +240,9 @@ async def test_http_error( "service_call", SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, ) +@pytest.mark.usefixtures("bypass_throttle") async def test_services( service_call: list[dict[str, Any]], - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, @@ -279,8 +272,8 @@ async def test_services( ) +@pytest.mark.usefixtures("bypass_throttle") async def test_services_exception( - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], diff --git a/tests/components/home_connect/test_light.py b/tests/components/home_connect/test_light.py new file mode 100644 index 00000000000..8d918dc5815 --- /dev/null +++ b/tests/components/home_connect/test_light.py @@ -0,0 +1,298 @@ +"""Tests for home_connect light entities.""" + +from collections.abc import Awaitable, Callable, Generator +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ( + BSH_AMBIENT_LIGHT_BRIGHTNESS, + BSH_AMBIENT_LIGHT_CUSTOM_COLOR, + BSH_AMBIENT_LIGHT_ENABLED, + COOKING_LIGHTING, + COOKING_LIGHTING_BRIGHTNESS, +) +from homeassistant.components.light import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry, load_json_object_fixture + +TEST_HC_APP = "Hood" + +SETTINGS_STATUS = { + setting.pop("key"): setting + for setting in load_json_object_fixture("home_connect/settings.json") + .get(TEST_HC_APP) + .get("data") + .get("settings") +} + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.LIGHT] + + +async def test_light( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test switch entities.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + +@pytest.mark.parametrize( + ("entity_id", "status", "service", "service_data", "state", "appliance"), + [ + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": True, + }, + }, + SERVICE_TURN_ON, + {}, + STATE_ON, + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": True, + }, + COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {"brightness": 200}, + STATE_ON, + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: {"value": False}, + COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_OFF, + {}, + STATE_OFF, + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": None, + }, + COOKING_LIGHTING_BRIGHTNESS: None, + }, + SERVICE_TURN_ON, + {}, + STATE_UNKNOWN, + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: { + "value": True, + }, + BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {"brightness": 200}, + STATE_ON, + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: {"value": False}, + BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_OFF, + {}, + STATE_OFF, + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: {"value": True}, + BSH_AMBIENT_LIGHT_CUSTOM_COLOR: {}, + }, + SERVICE_TURN_ON, + {}, + STATE_ON, + "Hood", + ), + ], + indirect=["appliance"], +) +async def test_light_functionality( + entity_id: str, + status: dict, + service: str, + service_data: dict, + state: str, + appliance: Mock, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test light functionality.""" + appliance.status.update(SETTINGS_STATUS) + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + appliance.status.update(status) + service_data["entity_id"] = entity_id + await hass.services.async_call( + DOMAIN, + service, + service_data, + blocking=True, + ) + assert hass.states.is_state(entity_id, state) + + +@pytest.mark.parametrize( + ( + "entity_id", + "status", + "service", + "service_data", + "mock_attr", + "attr_side_effect", + "problematic_appliance", + ), + [ + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": False, + }, + }, + SERVICE_TURN_ON, + {}, + "set_setting", + [HomeConnectError, HomeConnectError], + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": True, + }, + COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {"brightness": 200}, + "set_setting", + [HomeConnectError, HomeConnectError], + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: {"value": False}, + }, + SERVICE_TURN_OFF, + {}, + "set_setting", + [HomeConnectError, HomeConnectError], + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: { + "value": True, + }, + BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {}, + "set_setting", + [HomeConnectError, HomeConnectError], + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: { + "value": True, + }, + BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {"brightness": 200}, + "set_setting", + [HomeConnectError, None, HomeConnectError, HomeConnectError], + "Hood", + ), + ], + indirect=["problematic_appliance"], +) +async def test_switch_exception_handling( + entity_id: str, + status: dict, + service: str, + service_data: dict, + mock_attr: str, + attr_side_effect: list, + problematic_appliance: Mock, + bypass_throttle: Generator[None], + hass: HomeAssistant, + integration_setup: Callable[[], Awaitable[bool]], + config_entry: MockConfigEntry, + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test light exception handling.""" + problematic_appliance.status.update(SETTINGS_STATUS) + problematic_appliance.set_setting.side_effect = attr_side_effect + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + # Assert that an exception is called. + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + problematic_appliance.status.update(status) + service_data["entity_id"] = entity_id + await hass.services.async_call(DOMAIN, service, service_data, blocking=True) + assert getattr(problematic_appliance, mock_attr).call_count == len(attr_side_effect) diff --git a/tests/components/home_connect/test_sensor.py b/tests/components/home_connect/test_sensor.py index f30f017d6d3..661ac62403f 100644 --- a/tests/components/home_connect/test_sensor.py +++ b/tests/components/home_connect/test_sensor.py @@ -1,7 +1,6 @@ """Tests for home_connect sensor entities.""" -from collections.abc import Awaitable, Callable, Generator -from typing import Any +from collections.abc import Awaitable, Callable from unittest.mock import MagicMock, Mock from freezegun.api import FrozenDateTimeFactory @@ -69,9 +68,8 @@ def platforms() -> list[str]: return [Platform.SENSOR] +@pytest.mark.usefixtures("bypass_throttle") async def test_sensors( - bypass_throttle: Generator[None, Any, None], - hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, @@ -131,12 +129,12 @@ ENTITY_ID_STATES = { ) ), ) +@pytest.mark.usefixtures("bypass_throttle") async def test_event_sensors( appliance: Mock, states: tuple, event_run: dict, freezer: FrozenDateTimeFactory, - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -180,10 +178,10 @@ ENTITY_ID_EDGE_CASE_STATES = [ @pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) +@pytest.mark.usefixtures("bypass_throttle") async def test_remaining_prog_time_edge_cases( appliance: Mock, freezer: FrozenDateTimeFactory, - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py new file mode 100644 index 00000000000..c6a7b384036 --- /dev/null +++ b/tests/components/home_connect/test_switch.py @@ -0,0 +1,216 @@ +"""Tests for home_connect sensor entities.""" + +from collections.abc import Awaitable, Callable, Generator +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ( + BSH_ACTIVE_PROGRAM, + BSH_CHILD_LOCK_STATE, + BSH_OPERATION_STATE, + BSH_POWER_OFF, + BSH_POWER_ON, + BSH_POWER_STATE, +) +from homeassistant.components.switch import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry, load_json_object_fixture + +SETTINGS_STATUS = { + setting.pop("key"): setting + for setting in load_json_object_fixture("home_connect/settings.json") + .get("Washer") + .get("data") + .get("settings") +} + +PROGRAM = "LaundryCare.Dryer.Program.Mix" + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.SWITCH] + + +async def test_switches( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test switch entities.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + +@pytest.mark.parametrize( + ("entity_id", "status", "service", "state"), + [ + ( + "switch.washer_program_mix", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + SERVICE_TURN_ON, + STATE_ON, + ), + ( + "switch.washer_program_mix", + {BSH_ACTIVE_PROGRAM: {"value": ""}}, + SERVICE_TURN_OFF, + STATE_OFF, + ), + ( + "switch.washer_power", + {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, + SERVICE_TURN_ON, + STATE_ON, + ), + ( + "switch.washer_power", + {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, + SERVICE_TURN_OFF, + STATE_OFF, + ), + ( + "switch.washer_power", + { + BSH_POWER_STATE: {"value": ""}, + BSH_OPERATION_STATE: { + "value": "BSH.Common.EnumType.OperationState.Inactive" + }, + }, + SERVICE_TURN_OFF, + STATE_OFF, + ), + ( + "switch.washer_childlock", + {BSH_CHILD_LOCK_STATE: {"value": True}}, + SERVICE_TURN_ON, + STATE_ON, + ), + ( + "switch.washer_childlock", + {BSH_CHILD_LOCK_STATE: {"value": False}}, + SERVICE_TURN_OFF, + STATE_OFF, + ), + ], +) +async def test_switch_functionality( + entity_id: str, + status: dict, + service: str, + state: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test switch functionality.""" + appliance.status.update(SETTINGS_STATUS) + appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + appliance.status.update(status) + await hass.services.async_call( + DOMAIN, service, {"entity_id": entity_id}, blocking=True + ) + assert hass.states.is_state(entity_id, state) + + +@pytest.mark.parametrize( + ("entity_id", "status", "service", "mock_attr"), + [ + ( + "switch.washer_program_mix", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + SERVICE_TURN_ON, + "start_program", + ), + ( + "switch.washer_program_mix", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + SERVICE_TURN_OFF, + "stop_program", + ), + ( + "switch.washer_power", + {BSH_POWER_STATE: {"value": ""}}, + SERVICE_TURN_ON, + "set_setting", + ), + ( + "switch.washer_power", + {BSH_POWER_STATE: {"value": ""}}, + SERVICE_TURN_OFF, + "set_setting", + ), + ( + "switch.washer_childlock", + {BSH_CHILD_LOCK_STATE: {"value": ""}}, + SERVICE_TURN_ON, + "set_setting", + ), + ( + "switch.washer_childlock", + {BSH_CHILD_LOCK_STATE: {"value": ""}}, + SERVICE_TURN_OFF, + "set_setting", + ), + ], +) +async def test_switch_exception_handling( + entity_id: str, + status: dict, + service: str, + mock_attr: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + integration_setup: Callable[[], Awaitable[bool]], + config_entry: MockConfigEntry, + setup_credentials: None, + problematic_appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test exception handling.""" + problematic_appliance.get_programs_available.side_effect = None + problematic_appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + # Assert that an exception is called. + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + problematic_appliance.status.update(status) + await hass.services.async_call( + DOMAIN, service, {"entity_id": entity_id}, blocking=True + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/homeassistant/snapshots/test_exposed_entities.ambr b/tests/components/homeassistant/snapshots/test_exposed_entities.ambr index 55b95186b49..9c93655cd4e 100644 --- a/tests/components/homeassistant/snapshots/test_exposed_entities.ambr +++ b/tests/components/homeassistant/snapshots/test_exposed_entities.ambr @@ -13,13 +13,3 @@ dict({ }) # --- -# name: test_listeners - dict({ - 'light.kitchen': dict({ - 'should_expose': True, - }), - 'switch.test_unique1': mappingproxy({ - 'should_expose': True, - }), - }) -# --- diff --git a/tests/components/homeassistant/test_exposed_entities.py b/tests/components/homeassistant/test_exposed_entities.py index b3ff6594509..1f1955c2f82 100644 --- a/tests/components/homeassistant/test_exposed_entities.py +++ b/tests/components/homeassistant/test_exposed_entities.py @@ -103,7 +103,7 @@ async def test_load_preferences(hass: HomeAssistant) -> None: """Make sure that we can load/save data correctly.""" assert await async_setup_component(hass, "homeassistant", {}) - exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES] + exposed_entities = hass.data[DATA_EXPOSED_ENTITIES] assert exposed_entities._assistants == {} exposed_entities.async_set_expose_new_entities("test1", True) @@ -139,7 +139,7 @@ async def test_expose_entity( entry1 = entity_registry.async_get_or_create("test", "test", "unique1") entry2 = entity_registry.async_get_or_create("test", "test", "unique2") - exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES] + exposed_entities = hass.data[DATA_EXPOSED_ENTITIES] assert len(exposed_entities.entities) == 0 # Set options @@ -196,7 +196,7 @@ async def test_expose_entity_unknown( assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() - exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES] + exposed_entities = hass.data[DATA_EXPOSED_ENTITIES] assert len(exposed_entities.entities) == 0 # Set options @@ -442,7 +442,7 @@ async def test_should_expose( ) # Check with a different assistant - exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES] + exposed_entities = hass.data[DATA_EXPOSED_ENTITIES] exposed_entities.async_set_expose_new_entities("cloud.no_default_expose", False) assert ( async_should_expose( @@ -545,7 +545,7 @@ async def test_listeners( """Make sure we call entity listeners.""" assert await async_setup_component(hass, "homeassistant", {}) - exposed_entities: ExposedEntities = hass.data[DATA_EXPOSED_ENTITIES] + exposed_entities = hass.data[DATA_EXPOSED_ENTITIES] callbacks = [] exposed_entities.async_listen_entity_updates("test1", lambda: callbacks.append(1)) diff --git a/tests/components/homeassistant/test_repairs.py b/tests/components/homeassistant/test_repairs.py new file mode 100644 index 00000000000..c7a1b3e762e --- /dev/null +++ b/tests/components/homeassistant/test_repairs.py @@ -0,0 +1,160 @@ +"""Test the Homeassistant repairs module.""" + +from http import HTTPStatus + +from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +async def test_integration_not_found_confirm_step( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the integration_not_found issue confirm step.""" + assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {}) + await hass.async_block_till_done() + assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) + await hass.async_block_till_done() + assert await async_setup_component(hass, "test1", {}) is False + await hass.async_block_till_done() + entry1 = MockConfigEntry(domain="test1") + entry1.add_to_hass(hass) + entry2 = MockConfigEntry(domain="test1") + entry2.add_to_hass(hass) + issue_id = "integration_not_found.test1" + + await async_process_repairs_platforms(hass) + ws_client = await hass_ws_client(hass) + http_client = await hass_client() + + # Assert the issue is present + await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 1 + issue = msg["result"]["issues"][0] + assert issue["issue_id"] == issue_id + assert issue["translation_placeholders"] == {"domain": "test1"} + + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": HOMEASSISTANT_DOMAIN, "issue_id": issue_id} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "init" + assert data["description_placeholders"] == {"domain": "test1"} + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + + # Show menu + resp = await http_client.post(url) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "menu" + + # Apply fix + resp = await http_client.post(url, json={"next_step_id": "confirm"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "create_entry" + + await hass.async_block_till_done() + + assert hass.config_entries.async_get_entry(entry1.entry_id) is None + assert hass.config_entries.async_get_entry(entry2.entry_id) is None + + # Assert the issue is resolved + await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 0 + + +async def test_integration_not_found_ignore_step( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the integration_not_found issue ignore step.""" + assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {}) + await hass.async_block_till_done() + assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) + await hass.async_block_till_done() + assert await async_setup_component(hass, "test1", {}) is False + await hass.async_block_till_done() + entry1 = MockConfigEntry(domain="test1") + entry1.add_to_hass(hass) + issue_id = "integration_not_found.test1" + + await async_process_repairs_platforms(hass) + ws_client = await hass_ws_client(hass) + http_client = await hass_client() + + # Assert the issue is present + await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 1 + issue = msg["result"]["issues"][0] + assert issue["issue_id"] == issue_id + assert issue["translation_placeholders"] == {"domain": "test1"} + + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": HOMEASSISTANT_DOMAIN, "issue_id": issue_id} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "init" + assert data["description_placeholders"] == {"domain": "test1"} + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + + # Show menu + resp = await http_client.post(url) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "menu" + + # Apply fix + resp = await http_client.post(url, json={"next_step_id": "ignore"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "abort" + assert data["reason"] == "issue_ignored" + + await hass.async_block_till_done() + + assert hass.config_entries.async_get_entry(entry1.entry_id) + + # Assert the issue is resolved + await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 1 + assert msg["result"]["issues"][0].get("dismissed_version") is not None diff --git a/tests/components/homeassistant/triggers/test_event.py b/tests/components/homeassistant/triggers/test_event.py index b7bf8e5e7f3..293a9007175 100644 --- a/tests/components/homeassistant/triggers/test_event.py +++ b/tests/components/homeassistant/triggers/test_event.py @@ -7,28 +7,24 @@ from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_O from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_mock_service, mock_component +from tests.common import mock_component @pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - -@pytest.fixture -def context_with_user(): +def context_with_user() -> Context: """Create a context with default user_id.""" return Context(user_id="test_user_id") @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") -async def test_if_fires_on_event(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_if_fires_on_event( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test the firing of events.""" context = Context() @@ -48,8 +44,8 @@ async def test_if_fires_on_event(hass: HomeAssistant, calls: list[ServiceCall]) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id await hass.services.async_call( automation.DOMAIN, @@ -57,15 +53,16 @@ async def test_if_fires_on_event(hass: HomeAssistant, calls: list[ServiceCall]) {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[0].data["id"] == 0 async def test_if_fires_on_templated_event( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events.""" context = Context() @@ -84,8 +81,8 @@ async def test_if_fires_on_templated_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id await hass.services.async_call( automation.DOMAIN, @@ -93,14 +90,15 @@ async def test_if_fires_on_templated_event( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_multiple_events( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events.""" context = Context() @@ -123,13 +121,13 @@ async def test_if_fires_on_multiple_events( await hass.async_block_till_done() hass.bus.async_fire("test2_event", context=context) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].context.parent_id == context.id - assert calls[1].context.parent_id == context.id + assert len(service_calls) == 2 + assert service_calls[0].context.parent_id == context.id + assert service_calls[1].context.parent_id == context.id async def test_if_fires_on_event_extra_data( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events still matches with event data and context.""" assert await async_setup_component( @@ -146,7 +144,7 @@ async def test_if_fires_on_event_extra_data( "test_event", {"extra_key": "extra_data"}, context=context_with_user ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.services.async_call( automation.DOMAIN, @@ -154,14 +152,15 @@ async def test_if_fires_on_event_extra_data( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_event_with_data_and_context( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events with data and context.""" assert await async_setup_component( @@ -189,7 +188,7 @@ async def test_if_fires_on_event_with_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.bus.async_fire( "test_event", @@ -197,18 +196,18 @@ async def test_if_fires_on_event_with_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 # No new call + assert len(service_calls) == 1 # No new call hass.bus.async_fire( "test_event", {"some_attr": "some_value", "another": "value", "second_attr": "second_value"}, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_event_with_templated_data_and_context( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events with templated data and context.""" assert await async_setup_component( @@ -241,7 +240,7 @@ async def test_if_fires_on_event_with_templated_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.bus.async_fire( "test_event", @@ -249,18 +248,18 @@ async def test_if_fires_on_event_with_templated_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 # No new call + assert len(service_calls) == 1 # No new call hass.bus.async_fire( "test_event", {"attr_1": "milk", "another": "value", "attr_2": "beer"}, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_event_with_empty_data_and_context_config( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events with empty data and context config. @@ -289,11 +288,11 @@ async def test_if_fires_on_event_with_empty_data_and_context_config( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_event_with_nested_data( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events with nested data. @@ -319,11 +318,11 @@ async def test_if_fires_on_event_with_nested_data( "test_event", {"parent_attr": {"some_attr": "some_value", "another": "value"}} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_event_with_empty_data( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events with empty data. @@ -345,11 +344,11 @@ async def test_if_fires_on_event_with_empty_data( ) hass.bus.async_fire("test_event", {"any_attr": {}}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_sample_zha_event( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events with a sample zha event. @@ -390,7 +389,7 @@ async def test_if_fires_on_sample_zha_event( }, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.bus.async_fire( "zha_event", @@ -404,11 +403,11 @@ async def test_if_fires_on_sample_zha_event( }, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_not_fires_if_event_data_not_matches( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test firing of event if no data match.""" assert await async_setup_component( @@ -428,11 +427,11 @@ async def test_if_not_fires_if_event_data_not_matches( hass.bus.async_fire("test_event", {"some_attr": "some_other_value"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_if_event_context_not_matches( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test firing of event if no context match.""" assert await async_setup_component( @@ -452,11 +451,11 @@ async def test_if_not_fires_if_event_context_not_matches( hass.bus.async_fire("test_event", {}, context=context_with_user) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_multiple_user_ids( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of event when the trigger has multiple user ids. @@ -481,11 +480,11 @@ async def test_if_fires_on_multiple_user_ids( hass.bus.async_fire("test_event", {}, context=context_with_user) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_event_data_with_list( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the (non)firing of event when the data schema has lists.""" assert await async_setup_component( @@ -506,17 +505,17 @@ async def test_event_data_with_list( hass.bus.async_fire("test_event", {"some_attr": [1, 2]}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # don't match a single value hass.bus.async_fire("test_event", {"some_attr": 1}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # don't match a containing list hass.bus.async_fire("test_event", {"some_attr": [1, 2, 3]}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -524,7 +523,7 @@ async def test_event_data_with_list( ) async def test_state_reported_event( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, event_type: str | list[str], ) -> None: @@ -547,7 +546,7 @@ async def test_state_reported_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 assert ( "Unnamed automation failed to setup triggers and has been disabled: Can't " "listen to state_reported in event trigger for dictionary value @ " @@ -556,7 +555,9 @@ async def test_state_reported_event( async def test_templated_state_reported_event( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + service_calls: list[ServiceCall], + caplog: pytest.LogCaptureFixture, ) -> None: """Test triggering on state reported event.""" context = Context() @@ -578,7 +579,7 @@ async def test_templated_state_reported_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 assert ( "Got error 'Can't listen to state_reported in event trigger' " "when setting up triggers for automation 0" in caplog.text diff --git a/tests/components/homeassistant/triggers/test_numeric_state.py b/tests/components/homeassistant/triggers/test_numeric_state.py index 59cd7e2a2a7..85882274fec 100644 --- a/tests/components/homeassistant/triggers/test_numeric_state.py +++ b/tests/components/homeassistant/triggers/test_numeric_state.py @@ -23,22 +23,11 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import ( - assert_setup_component, - async_fire_time_changed, - async_mock_service, - mock_component, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import assert_setup_component, async_fire_time_changed, mock_component @pytest.fixture(autouse=True) -async def setup_comp(hass): +async def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") await async_setup_component( @@ -63,7 +52,7 @@ async def setup_comp(hass): "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_entity_removal( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with removed entity.""" hass.states.async_set("test.entity", 11) @@ -86,14 +75,14 @@ async def test_if_not_fires_on_entity_removal( # Entity disappears hass.states.async_remove("test.entity") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entity_change_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -120,8 +109,8 @@ async def test_if_fires_on_entity_change_below( # 9 is below 10 hass.states.async_set("test.entity", 9, context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id # Set above 12 so the automation will fire again hass.states.async_set("test.entity", 12) @@ -132,10 +121,12 @@ async def test_if_fires_on_entity_change_below( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 + hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[0].data["id"] == 0 @pytest.mark.parametrize( @@ -144,7 +135,7 @@ async def test_if_fires_on_entity_change_below( async def test_if_fires_on_entity_change_below_uuid( hass: HomeAssistant, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], below: int | str, ) -> None: """Test the firing with changed entity specified by registry entry id.""" @@ -177,8 +168,8 @@ async def test_if_fires_on_entity_change_below_uuid( # 9 is below 10 hass.states.async_set("test.entity", 9, context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id # Set above 12 so the automation will fire again hass.states.async_set("test.entity", 12) @@ -189,17 +180,19 @@ async def test_if_fires_on_entity_change_below_uuid( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 + hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[0].data["id"] == 0 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entity_change_over_to_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -223,14 +216,14 @@ async def test_if_fires_on_entity_change_over_to_below( # 9 is below 10 hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entities_change_over_to_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entities.""" hass.states.async_set("test.entity_1", 11) @@ -255,17 +248,17 @@ async def test_if_fires_on_entities_change_over_to_below( # 9 is below 10 hass.states.async_set("test.entity_1", 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_entity_change_below_to_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" context = Context() @@ -290,25 +283,25 @@ async def test_if_not_fires_on_entity_change_below_to_below( # 9 is below 10 so this should fire hass.states.async_set("test.entity", 9, context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id # already below so should not fire again hass.states.async_set("test.entity", 5) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # still below so should not fire again hass.states.async_set("test.entity", 3) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_below_fires_on_entity_change_to_equal( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -332,14 +325,14 @@ async def test_if_not_below_fires_on_entity_change_to_equal( # 10 is not below 10 so this should not fire again hass.states.async_set("test.entity", 10) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_initial_entity_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing when starting with a match.""" hass.states.async_set("test.entity", 9) @@ -363,14 +356,14 @@ async def test_if_not_fires_on_initial_entity_below( # Do not fire on first update when initial state was already below hass.states.async_set("test.entity", 8) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( "above", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_initial_entity_above( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing when starting with a match.""" hass.states.async_set("test.entity", 11) @@ -394,14 +387,14 @@ async def test_if_not_fires_on_initial_entity_above( # Do not fire on first update when initial state was already above hass.states.async_set("test.entity", 12) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( "above", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entity_change_above( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 9) @@ -424,11 +417,11 @@ async def test_if_fires_on_entity_change_above( # 11 is above 10 hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_unavailable_at_startup( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing with changed entity at startup.""" assert await async_setup_component( @@ -448,12 +441,12 @@ async def test_if_fires_on_entity_unavailable_at_startup( # 11 is above 10 hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_if_fires_on_entity_change_below_to_above( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" # set initial state @@ -478,12 +471,12 @@ async def test_if_fires_on_entity_change_below_to_above( # 11 is above 10 and 9 is below hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_above_to_above( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" # set initial state @@ -508,17 +501,17 @@ async def test_if_not_fires_on_entity_change_above_to_above( # 12 is above 10 so this should fire hass.states.async_set("test.entity", 12) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # already above, should not fire again hass.states.async_set("test.entity", 15) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_if_not_above_fires_on_entity_change_to_equal( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" # set initial state @@ -543,7 +536,7 @@ async def test_if_not_above_fires_on_entity_change_to_equal( # 10 is not above 10 so this should not fire again hass.states.async_set("test.entity", 10) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -556,7 +549,10 @@ async def test_if_not_above_fires_on_entity_change_to_equal( ], ) async def test_if_fires_on_entity_change_below_range( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -580,7 +576,7 @@ async def test_if_fires_on_entity_change_below_range( # 9 is below 10 hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -593,7 +589,10 @@ async def test_if_fires_on_entity_change_below_range( ], ) async def test_if_fires_on_entity_change_below_above_range( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test the firing with changed entity.""" assert await async_setup_component( @@ -614,7 +613,7 @@ async def test_if_fires_on_entity_change_below_above_range( # 4 is below 5 hass.states.async_set("test.entity", 4) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -627,7 +626,10 @@ async def test_if_fires_on_entity_change_below_above_range( ], ) async def test_if_fires_on_entity_change_over_to_below_range( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -652,7 +654,7 @@ async def test_if_fires_on_entity_change_over_to_below_range( # 9 is below 10 hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -665,7 +667,10 @@ async def test_if_fires_on_entity_change_over_to_below_range( ], ) async def test_if_fires_on_entity_change_over_to_below_above_range( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -690,12 +695,12 @@ async def test_if_fires_on_entity_change_over_to_below_above_range( # 4 is below 5 so it should not fire hass.states.async_set("test.entity", 4) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [100, "input_number.value_100"]) async def test_if_not_fires_if_entity_not_match( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test if not fired with non matching entity.""" assert await async_setup_component( @@ -715,11 +720,13 @@ async def test_if_not_fires_if_entity_not_match( hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_and_warns_if_below_entity_unknown( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, calls: list[ServiceCall] + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + service_calls: list[ServiceCall], ) -> None: """Test if warns with unknown below entity.""" assert await async_setup_component( @@ -742,7 +749,7 @@ async def test_if_not_fires_and_warns_if_below_entity_unknown( hass.states.async_set("test.entity", 1) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING @@ -750,7 +757,7 @@ async def test_if_not_fires_and_warns_if_below_entity_unknown( @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_fires_on_entity_change_below_with_attribute( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" hass.states.async_set("test.entity", 11, {"test_attribute": 11}) @@ -773,12 +780,12 @@ async def test_if_fires_on_entity_change_below_with_attribute( # 9 is below 10 hass.states.async_set("test.entity", 9, {"test_attribute": 11}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_not_below_with_attribute( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes.""" assert await async_setup_component( @@ -798,12 +805,12 @@ async def test_if_not_fires_on_entity_change_not_below_with_attribute( # 11 is not below 10 hass.states.async_set("test.entity", 11, {"test_attribute": 9}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_fires_on_attribute_change_with_attribute_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" hass.states.async_set("test.entity", "entity", {"test_attribute": 11}) @@ -827,12 +834,12 @@ async def test_if_fires_on_attribute_change_with_attribute_below( # 9 is below 10 hass.states.async_set("test.entity", "entity", {"test_attribute": 9}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_attribute_change_with_attribute_not_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" assert await async_setup_component( @@ -853,12 +860,12 @@ async def test_if_not_fires_on_attribute_change_with_attribute_not_below( # 11 is not below 10 hass.states.async_set("test.entity", "entity", {"test_attribute": 11}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_with_attribute_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" assert await async_setup_component( @@ -879,12 +886,12 @@ async def test_if_not_fires_on_entity_change_with_attribute_below( # 11 is not below 10, entity state value should not be tested hass.states.async_set("test.entity", "9", {"test_attribute": 11}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_with_not_attribute_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" assert await async_setup_component( @@ -905,12 +912,12 @@ async def test_if_not_fires_on_entity_change_with_not_attribute_below( # 11 is not below 10, entity state value should not be tested hass.states.async_set("test.entity", "entity") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_fires_on_attr_change_with_attribute_below_and_multiple_attr( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" hass.states.async_set( @@ -937,12 +944,12 @@ async def test_fires_on_attr_change_with_attribute_below_and_multiple_attr( "test.entity", "entity", {"test_attribute": 9, "not_test_attribute": 11} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_template_list( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test template list.""" hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 11]}) @@ -965,12 +972,12 @@ async def test_template_list( # 3 is below 10 hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 3]}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("below", [10.0, "input_number.value_10"]) async def test_template_string( - hass: HomeAssistant, calls: list[ServiceCall], below: float | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: float | str ) -> None: """Test template string.""" assert await async_setup_component( @@ -1004,15 +1011,15 @@ async def test_template_string( await hass.async_block_till_done() hass.states.async_set("test.entity", "test state 2", {"test_attribute": "0.9"}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"numeric_state - test.entity - {below} - None - test state 1 - test state 2" ) async def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if not fired changed attributes.""" assert await async_setup_component( @@ -1035,7 +1042,7 @@ async def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr( "test.entity", "entity", {"test_attribute": 11, "not_test_attribute": 9} ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -1048,7 +1055,10 @@ async def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr( ], ) async def test_if_action( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test if action.""" entity_id = "domain.test_entity" @@ -1073,19 +1083,19 @@ async def test_if_action( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entity_id, 8) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entity_id, 9) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 @pytest.mark.parametrize( @@ -1098,7 +1108,7 @@ async def test_if_action( ], ) async def test_if_fails_setup_bad_for( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, above: int | str, below: int | str ) -> None: """Test for setup failure for bad for.""" hass.states.async_set("test.entity", 5) @@ -1124,9 +1134,7 @@ async def test_if_fails_setup_bad_for( assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_for_without_above_below( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_for_without_above_below(hass: HomeAssistant) -> None: """Test for setup failures for missing above or below.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -1158,7 +1166,7 @@ async def test_if_fails_setup_for_without_above_below( async def test_if_not_fires_on_entity_change_with_for( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1187,7 +1195,7 @@ async def test_if_not_fires_on_entity_change_with_for( freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -1200,7 +1208,10 @@ async def test_if_not_fires_on_entity_change_with_for( ], ) async def test_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for not firing on entities change with for after stop.""" hass.states.async_set("test.entity_1", 0) @@ -1232,7 +1243,7 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set("test.entity_1", 15) hass.states.async_set("test.entity_2", 15) @@ -1246,9 +1257,11 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 @pytest.mark.parametrize( @@ -1263,7 +1276,7 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( async def test_if_fires_on_entity_change_with_for_attribute_change( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1294,11 +1307,11 @@ async def test_if_fires_on_entity_change_with_for_attribute_change( async_fire_time_changed(hass) hass.states.async_set("test.entity", 9, attributes={"mock_attr": "attr_change"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=4)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -1311,7 +1324,10 @@ async def test_if_fires_on_entity_change_with_for_attribute_change( ], ) async def test_if_fires_on_entity_change_with_for( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for firing on entity change with for.""" hass.states.async_set("test.entity", 0) @@ -1338,12 +1354,12 @@ async def test_if_fires_on_entity_change_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_wait_template_with_trigger( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test using wait template with 'trigger.entity_id'.""" hass.states.async_set("test.entity", "0") @@ -1381,8 +1397,8 @@ async def test_wait_template_with_trigger( hass.states.async_set("test.entity", "12") hass.states.async_set("test.entity", "8") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "numeric_state - test.entity - 12" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "numeric_state - test.entity - 12" @pytest.mark.parametrize( @@ -1397,7 +1413,7 @@ async def test_wait_template_with_trigger( async def test_if_fires_on_entities_change_no_overlap( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1432,16 +1448,16 @@ async def test_if_fires_on_entities_change_no_overlap( freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1" hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2" @pytest.mark.parametrize( @@ -1456,7 +1472,7 @@ async def test_if_fires_on_entities_change_no_overlap( async def test_if_fires_on_entities_change_overlap( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1500,18 +1516,18 @@ async def test_if_fires_on_entities_change_overlap( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2" @pytest.mark.parametrize( @@ -1524,7 +1540,10 @@ async def test_if_fires_on_entities_change_overlap( ], ) async def test_if_fires_on_change_with_for_template_1( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", 0) @@ -1549,10 +1568,10 @@ async def test_if_fires_on_change_with_for_template_1( hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -1565,7 +1584,10 @@ async def test_if_fires_on_change_with_for_template_1( ], ) async def test_if_fires_on_change_with_for_template_2( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", 0) @@ -1590,10 +1612,10 @@ async def test_if_fires_on_change_with_for_template_2( hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -1606,7 +1628,10 @@ async def test_if_fires_on_change_with_for_template_2( ], ) async def test_if_fires_on_change_with_for_template_3( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", 0) @@ -1631,14 +1656,14 @@ async def test_if_fires_on_change_with_for_template_3( hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_not_fires_on_error_with_for_template( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on error with for template.""" hass.states.async_set("test.entity", 0) @@ -1662,17 +1687,17 @@ async def test_if_not_fires_on_error_with_for_template( hass.states.async_set("test.entity", 101) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) hass.states.async_set("test.entity", "unavailable") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) hass.states.async_set("test.entity", 101) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -1685,7 +1710,7 @@ async def test_if_not_fires_on_error_with_for_template( ], ) async def test_invalid_for_template( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, above: int | str, below: int | str ) -> None: """Test for invalid for template.""" hass.states.async_set("test.entity", 0) @@ -1726,7 +1751,7 @@ async def test_invalid_for_template( async def test_if_fires_on_entities_change_overlap_for_template( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1773,22 +1798,22 @@ async def test_if_fires_on_entities_change_overlap_for_template( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2 - 0:00:10" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2 - 0:00:10" async def test_below_above(hass: HomeAssistant) -> None: @@ -1823,7 +1848,7 @@ async def test_schema_unacceptable_entities(hass: HomeAssistant) -> None: @pytest.mark.parametrize("above", [3, "input_number.value_3"]) async def test_attribute_if_fires_on_entity_change_with_both_filters( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test for firing if both filters are match attribute.""" hass.states.async_set("test.entity", "bla", {"test-measurement": 1}) @@ -1847,12 +1872,12 @@ async def test_attribute_if_fires_on_entity_change_with_both_filters( hass.states.async_set("test.entity", "bla", {"test-measurement": 4}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("above", [3, "input_number.value_3"]) async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test for not firing on entity change with for after stop trigger.""" hass.states.async_set("test.entity", "bla", {"test-measurement": 1}) @@ -1880,10 +1905,10 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( hass.states.async_set("test.entity", "bla", {"test-measurement": 4}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -1893,7 +1918,7 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( async def test_variables_priority( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int, below: int, ) -> None: @@ -1941,17 +1966,17 @@ async def test_variables_priority( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" @pytest.mark.parametrize("multiplier", [1, 5]) async def test_template_variable( - hass: HomeAssistant, calls: list[ServiceCall], multiplier: int + hass: HomeAssistant, service_calls: list[ServiceCall], multiplier: int ) -> None: """Test template variable.""" hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 11]}) @@ -1976,6 +2001,6 @@ async def test_template_variable( hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 3]}) await hass.async_block_till_done() if multiplier * 3 < 10: - assert len(calls) == 1 + assert len(service_calls) == 1 else: - assert len(calls) == 0 + assert len(service_calls) == 0 diff --git a/tests/components/homeassistant/triggers/test_state.py b/tests/components/homeassistant/triggers/test_state.py index a40ecae7579..83157a158a6 100644 --- a/tests/components/homeassistant/triggers/test_state.py +++ b/tests/components/homeassistant/triggers/test_state.py @@ -19,29 +19,18 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import ( - assert_setup_component, - async_fire_time_changed, - async_mock_service, - mock_component, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import assert_setup_component, async_fire_time_changed, mock_component @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") hass.states.async_set("test.entity", "hello") async def test_if_fires_on_entity_change( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change.""" context = Context() @@ -74,9 +63,12 @@ async def test_if_fires_on_entity_change( hass.states.async_set("test.entity", "world", context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id - assert calls[0].data["some"] == "state - test.entity - hello - world - None - 0" + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id + assert ( + service_calls[0].data["some"] + == "state - test.entity - hello - world - None - 0" + ) await hass.services.async_call( automation.DOMAIN, @@ -84,13 +76,16 @@ async def test_if_fires_on_entity_change( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.states.async_set("test.entity", "planet") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_entity_change_uuid( - hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entity change.""" context = Context() @@ -130,9 +125,11 @@ async def test_if_fires_on_entity_change_uuid( hass.states.async_set("test.beer", "world", context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id - assert calls[0].data["some"] == "state - test.beer - hello - world - None - 0" + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id + assert ( + service_calls[0].data["some"] == "state - test.beer - hello - world - None - 0" + ) await hass.services.async_call( automation.DOMAIN, @@ -140,13 +137,14 @@ async def test_if_fires_on_entity_change_uuid( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.states.async_set("test.beer", "planet") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_entity_change_with_from_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with filter.""" assert await async_setup_component( @@ -167,11 +165,11 @@ async def test_if_fires_on_entity_change_with_from_filter( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_not_from_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change inverse filter.""" assert await async_setup_component( @@ -193,15 +191,15 @@ async def test_if_fires_on_entity_change_with_not_from_filter( # Do not fire from hello hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not calls + assert not service_calls hass.states.async_set("test.entity", "universum") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_to_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with to filter.""" assert await async_setup_component( @@ -222,11 +220,11 @@ async def test_if_fires_on_entity_change_with_to_filter( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_not_to_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with to filter.""" assert await async_setup_component( @@ -248,15 +246,15 @@ async def test_if_fires_on_entity_change_with_not_to_filter( # Do not fire to world hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not calls + assert not service_calls hass.states.async_set("test.entity", "universum") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_from_filter_all( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with filter.""" assert await async_setup_component( @@ -278,11 +276,11 @@ async def test_if_fires_on_entity_change_with_from_filter_all( hass.states.async_set("test.entity", "world") hass.states.async_set("test.entity", "world", {"attribute": 5}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_to_filter_all( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with to filter.""" assert await async_setup_component( @@ -304,11 +302,11 @@ async def test_if_fires_on_entity_change_with_to_filter_all( hass.states.async_set("test.entity", "world") hass.states.async_set("test.entity", "world", {"attribute": 5}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_attribute_change_with_to_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on attribute change.""" assert await async_setup_component( @@ -330,11 +328,11 @@ async def test_if_fires_on_attribute_change_with_to_filter( hass.states.async_set("test.entity", "world", {"test_attribute": 11}) hass.states.async_set("test.entity", "world", {"test_attribute": 12}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_both_filters( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if both filters are a non match.""" assert await async_setup_component( @@ -356,11 +354,11 @@ async def test_if_fires_on_entity_change_with_both_filters( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_not_from_to( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if not from doesn't match and to match.""" assert await async_setup_component( @@ -383,31 +381,31 @@ async def test_if_fires_on_entity_change_with_not_from_to( # We should not trigger from hello hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not calls + assert not service_calls # We should not trigger to != galaxy hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not calls + assert not service_calls # We should trigger to galaxy hass.states.async_set("test.entity", "galaxy") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # We should not trigger from milky way hass.states.async_set("test.entity", "milky_way") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # We should trigger to universe hass.states.async_set("test.entity", "universe") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_if_fires_on_entity_change_with_from_not_to( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if not from doesn't match and to match.""" assert await async_setup_component( @@ -430,31 +428,31 @@ async def test_if_fires_on_entity_change_with_from_not_to( # We should trigger to world from hello hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Reset back to hello, should not trigger hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # We should not trigger to galaxy hass.states.async_set("test.entity", "galaxy") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # We should trigger form galaxy to milky way hass.states.async_set("test.entity", "milky_way") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 # We should not trigger to universe hass.states.async_set("test.entity", "universe") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_if_not_fires_if_to_filter_not_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing if to filter is not a match.""" assert await async_setup_component( @@ -476,11 +474,11 @@ async def test_if_not_fires_if_to_filter_not_match( hass.states.async_set("test.entity", "moon") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_if_from_filter_not_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing if from filter is not a match.""" hass.states.async_set("test.entity", "bye") @@ -504,11 +502,11 @@ async def test_if_not_fires_if_from_filter_not_match( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_if_entity_not_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing if entity is not matching.""" assert await async_setup_component( @@ -525,10 +523,10 @@ async def test_if_not_fires_if_entity_not_match( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 -async def test_if_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_if_action(hass: HomeAssistant, service_calls: list[ServiceCall]) -> None: """Test for to action.""" entity_id = "domain.test_entity" test_state = "new_state" @@ -551,18 +549,16 @@ async def test_if_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entity_id, test_state + "something") hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 -async def test_if_fails_setup_if_to_boolean_value( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_if_to_boolean_value(hass: HomeAssistant) -> None: """Test for setup failure for boolean to.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -582,9 +578,7 @@ async def test_if_fails_setup_if_to_boolean_value( assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_if_from_boolean_value( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_if_from_boolean_value(hass: HomeAssistant) -> None: """Test for setup failure for boolean from.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -604,9 +598,7 @@ async def test_if_fails_setup_if_from_boolean_value( assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_bad_for( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_bad_for(hass: HomeAssistant) -> None: """Test for setup failure for bad for.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -628,7 +620,7 @@ async def test_if_fails_setup_bad_for( async def test_if_not_fires_on_entity_change_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on entity change with for.""" assert await async_setup_component( @@ -654,11 +646,11 @@ async def test_if_not_fires_on_entity_change_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on entity change with for after stop trigger.""" assert await async_setup_component( @@ -686,7 +678,7 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set("test.entity_1", "world_no") hass.states.async_set("test.entity_2", "world_no") @@ -700,14 +692,17 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_entity_change_with_for_attribute_change( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entity change with for and attribute change.""" assert await async_setup_component( @@ -735,15 +730,17 @@ async def test_if_fires_on_entity_change_with_for_attribute_change( "test.entity", "world", attributes={"mock_attr": "attr_change"} ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=4)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_for_multiple_force_update( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entity change with for and force update.""" assert await async_setup_component( @@ -770,15 +767,15 @@ async def test_if_fires_on_entity_change_with_for_multiple_force_update( async_fire_time_changed(hass) hass.states.async_set("test.force_entity", "world", None, True) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=4)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with for.""" assert await async_setup_component( @@ -802,11 +799,11 @@ async def test_if_fires_on_entity_change_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_for_without_to( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with for.""" assert await async_setup_component( @@ -830,22 +827,24 @@ async def test_if_fires_on_entity_change_with_for_without_to( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set("test.entity", "world") await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=4)) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_does_not_fires_on_entity_change_with_for_without_to_2( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entity change with for.""" assert await async_setup_component( @@ -871,11 +870,11 @@ async def test_if_does_not_fires_on_entity_change_with_for_without_to_2( async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_entity_creation_and_removal( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity creation and removal, with to/from constraints.""" # set automations for multiple combinations to/from @@ -917,32 +916,32 @@ async def test_if_fires_on_entity_creation_and_removal( # automation with match_all triggers on creation hass.states.async_set("test.entity_0", "any", context=context_0) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context_0.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context_0.id # create entities, trigger on test.entity_2 ('to' matches, no 'from') hass.states.async_set("test.entity_1", "hello", context=context_1) hass.states.async_set("test.entity_2", "world", context=context_2) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].context.parent_id == context_2.id + assert len(service_calls) == 2 + assert service_calls[1].context.parent_id == context_2.id # removal of both, trigger on test.entity_1 ('from' matches, no 'to') assert hass.states.async_remove("test.entity_1", context=context_1) assert hass.states.async_remove("test.entity_2", context=context_2) await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].context.parent_id == context_1.id + assert len(service_calls) == 3 + assert service_calls[2].context.parent_id == context_1.id # automation with match_all triggers on removal assert hass.states.async_remove("test.entity_0", context=context_0) await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].context.parent_id == context_0.id + assert len(service_calls) == 4 + assert service_calls[3].context.parent_id == context_0.id async def test_if_fires_on_for_condition( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if condition is on.""" point1 = dt_util.utcnow() @@ -971,17 +970,17 @@ async def test_if_fires_on_for_condition( # not enough time has passed hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future mock_utcnow.return_value = point2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_for_condition_attribute_change( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if condition is on with attribute change.""" point1 = dt_util.utcnow() @@ -1011,7 +1010,7 @@ async def test_if_fires_on_for_condition_attribute_change( # not enough time has passed hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Still not enough time has passed, but an attribute is changed mock_utcnow.return_value = point2 @@ -1020,18 +1019,16 @@ async def test_if_fires_on_for_condition_attribute_change( ) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Enough time has now passed mock_utcnow.return_value = point3 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 -async def test_if_fails_setup_for_without_time( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_for_without_time(hass: HomeAssistant) -> None: """Test for setup failure if no time is provided.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -1053,9 +1050,7 @@ async def test_if_fails_setup_for_without_time( assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_for_without_entity( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_for_without_entity(hass: HomeAssistant) -> None: """Test for setup failure if no entity is provided.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -1077,7 +1072,7 @@ async def test_if_fails_setup_for_without_entity( async def test_wait_template_with_trigger( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test using wait template with 'trigger.entity_id'.""" assert await async_setup_component( @@ -1113,12 +1108,14 @@ async def test_wait_template_with_trigger( hass.states.async_set("test.entity", "world") hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "state - test.entity - hello - world" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "state - test.entity - hello - world" async def test_if_fires_on_entities_change_no_overlap( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entities change with no overlap.""" assert await async_setup_component( @@ -1146,20 +1143,22 @@ async def test_if_fires_on_entities_change_no_overlap( freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1" hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2" async def test_if_fires_on_entities_change_overlap( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entities change with overlap.""" assert await async_setup_component( @@ -1196,22 +1195,22 @@ async def test_if_fires_on_entities_change_overlap( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2" async def test_if_fires_on_change_with_for_template_1( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1232,14 +1231,14 @@ async def test_if_fires_on_change_with_for_template_1( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_change_with_for_template_2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1260,14 +1259,14 @@ async def test_if_fires_on_change_with_for_template_2( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_change_with_for_template_3( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1288,14 +1287,14 @@ async def test_if_fires_on_change_with_for_template_3( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_change_with_for_template_4( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1317,14 +1316,14 @@ async def test_if_fires_on_change_with_for_template_4( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_change_from_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with from/for.""" assert await async_setup_component( @@ -1351,11 +1350,11 @@ async def test_if_fires_on_change_from_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_not_fires_on_change_from_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with from/for.""" assert await async_setup_component( @@ -1382,12 +1381,10 @@ async def test_if_not_fires_on_change_from_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 -async def test_invalid_for_template_1( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_invalid_for_template_1(hass: HomeAssistant) -> None: """Test for invalid for template.""" assert await async_setup_component( hass, @@ -1412,7 +1409,9 @@ async def test_invalid_for_template_1( async def test_if_fires_on_entities_change_overlap_for_template( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entities change with overlap and for template.""" assert await async_setup_component( @@ -1452,26 +1451,26 @@ async def test_if_fires_on_entities_change_overlap_for_template( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2 - 0:00:10" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2 - 0:00:10" async def test_attribute_if_fires_on_entity_change_with_both_filters( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if both filters are match attribute.""" hass.states.async_set("test.entity", "bla", {"name": "hello"}) @@ -1496,11 +1495,11 @@ async def test_attribute_if_fires_on_entity_change_with_both_filters( hass.states.async_set("test.entity", "bla", {"name": "world"}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attribute_if_fires_on_entity_where_attr_stays_constant( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if attribute stays the same.""" hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "old_value"}) @@ -1524,21 +1523,21 @@ async def test_attribute_if_fires_on_entity_where_attr_stays_constant( # Leave all attributes the same hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "old_value"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Change the untracked attribute hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "new_value"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Change the tracked attribute hass.states.async_set("test.entity", "bla", {"name": "world", "other": "old_value"}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attribute_if_fires_on_entity_where_attr_stays_constant_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if attribute stays the same.""" hass.states.async_set("test.entity", "bla", {"name": "other_name"}) @@ -1565,25 +1564,25 @@ async def test_attribute_if_fires_on_entity_where_attr_stays_constant_filter( "test.entity", "bla", {"name": "best_name", "other": "old_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Change the untracked attribute hass.states.async_set( "test.entity", "bla", {"name": "best_name", "other": "new_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Change the tracked attribute hass.states.async_set( "test.entity", "bla", {"name": "other_name", "other": "old_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attribute_if_fires_on_entity_where_attr_stays_constant_all( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if attribute stays the same.""" hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "old_value"}) @@ -1610,25 +1609,25 @@ async def test_attribute_if_fires_on_entity_where_attr_stays_constant_all( "test.entity", "bla", {"name": "name_1", "other": "old_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Change the untracked attribute hass.states.async_set( "test.entity", "bla", {"name": "name_1", "other": "new_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Change the tracked attribute hass.states.async_set( "test.entity", "bla", {"name": "name_2", "other": "old_value"} ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on entity change with for after stop trigger.""" hass.states.async_set("test.entity", "bla", {"name": "hello"}) @@ -1658,33 +1657,33 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( # Test that the for-check works hass.states.async_set("test.entity", "bla", {"name": "world"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) hass.states.async_set("test.entity", "bla", {"name": "world", "something": "else"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Now remove state while inside "for" hass.states.async_set("test.entity", "bla", {"name": "hello"}) hass.states.async_set("test.entity", "bla", {"name": "world"}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_remove("test.entity") await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attribute_if_fires_on_entity_change_with_both_filters_boolean( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if both filters are match attribute.""" hass.states.async_set("test.entity", "bla", {"happening": False}) @@ -1709,11 +1708,13 @@ async def test_attribute_if_fires_on_entity_change_with_both_filters_boolean( hass.states.async_set("test.entity", "bla", {"happening": True}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_variables_priority( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test an externally defined trigger variable is overridden.""" assert await async_setup_component( @@ -1754,19 +1755,19 @@ async def test_variables_priority( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2 - 0:00:10" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2 - 0:00:10" diff --git a/tests/components/homeassistant/triggers/test_time.py b/tests/components/homeassistant/triggers/test_time.py index 961bac6c367..76d80120fdd 100644 --- a/tests/components/homeassistant/triggers/test_time.py +++ b/tests/components/homeassistant/triggers/test_time.py @@ -20,28 +20,19 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import ( - assert_setup_component, - async_fire_time_changed, - async_mock_service, - mock_component, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import assert_setup_component, async_fire_time_changed, mock_component @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") async def test_if_fires_using_at( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing at.""" now = dt_util.now() @@ -71,9 +62,9 @@ async def test_if_fires_using_at( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "time - 5" - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "time - 5" + assert service_calls[0].data["id"] == 0 @pytest.mark.parametrize( @@ -82,7 +73,7 @@ async def test_if_fires_using_at( async def test_if_fires_using_at_input_datetime( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], has_date, has_time, ) -> None: @@ -132,9 +123,9 @@ async def test_if_fires_using_at_input_datetime( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[1].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger" ) @@ -152,20 +143,23 @@ async def test_if_fires_using_at_input_datetime( }, blocking=True, ) + assert len(service_calls) == 3 await hass.async_block_till_done() async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 4 assert ( - calls[1].data["some"] + service_calls[3].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger" ) async def test_if_fires_using_multiple_at( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing at.""" @@ -195,18 +189,20 @@ async def test_if_fires_using_multiple_at( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "time - 5" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "time - 5" async_fire_time_changed(hass, trigger_dt + timedelta(hours=1, seconds=1)) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "time - 6" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "time - 6" async def test_if_not_fires_using_wrong_at( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """YAML translates time values to total seconds. @@ -242,10 +238,12 @@ async def test_if_not_fires_using_wrong_at( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 -async def test_if_action_before(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_if_action_before( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test for if action before.""" assert await async_setup_component( hass, @@ -267,16 +265,18 @@ async def test_if_action_before(hass: HomeAssistant, calls: list[ServiceCall]) - hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 with patch("homeassistant.helpers.condition.dt_util.now", return_value=after_10): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 -async def test_if_action_after(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_if_action_after( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test for if action after.""" assert await async_setup_component( hass, @@ -298,17 +298,17 @@ async def test_if_action_after(hass: HomeAssistant, calls: list[ServiceCall]) -> hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 with patch("homeassistant.helpers.condition.dt_util.now", return_value=after_10): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_action_one_weekday( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for if action with one weekday.""" assert await async_setup_component( @@ -332,17 +332,17 @@ async def test_if_action_one_weekday( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 with patch("homeassistant.helpers.condition.dt_util.now", return_value=tuesday): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_action_list_weekday( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for action with a list of weekdays.""" assert await async_setup_component( @@ -367,19 +367,19 @@ async def test_if_action_list_weekday( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 with patch("homeassistant.helpers.condition.dt_util.now", return_value=tuesday): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 with patch("homeassistant.helpers.condition.dt_util.now", return_value=wednesday): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_untrack_time_change(hass: HomeAssistant) -> None: @@ -416,7 +416,9 @@ async def test_untrack_time_change(hass: HomeAssistant) -> None: async def test_if_fires_using_at_sensor( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing at sensor time.""" now = dt_util.now() @@ -452,9 +454,9 @@ async def test_if_fires_using_at_sensor( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-sensor.next_alarm" ) @@ -470,9 +472,9 @@ async def test_if_fires_using_at_sensor( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-sensor.next_alarm" ) @@ -494,7 +496,7 @@ async def test_if_fires_using_at_sensor( await hass.async_block_till_done() # We should not have listened to anything - assert len(calls) == 2 + assert len(service_calls) == 2 # Now without device class hass.states.async_set( @@ -513,7 +515,7 @@ async def test_if_fires_using_at_sensor( await hass.async_block_till_done() # We should not have listened to anything - assert len(calls) == 2 + assert len(service_calls) == 2 @pytest.mark.parametrize( @@ -544,7 +546,7 @@ def test_schema_invalid(conf) -> None: async def test_datetime_in_past_on_load( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test time trigger works if input_datetime is in past.""" await async_setup_component( @@ -566,6 +568,7 @@ async def test_datetime_in_past_on_load( }, blocking=True, ) + assert len(service_calls) == 1 await hass.async_block_till_done() assert await async_setup_component( @@ -587,7 +590,7 @@ async def test_datetime_in_past_on_load( async_fire_time_changed(hass, now) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 await hass.services.async_call( "input_datetime", @@ -598,13 +601,14 @@ async def test_datetime_in_past_on_load( }, blocking=True, ) + assert len(service_calls) == 2 await hass.async_block_till_done() async_fire_time_changed(hass, future + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 3 assert ( - calls[0].data["some"] + service_calls[2].data["some"] == f"time-{future.day}-{future.hour}-input_datetime.my_trigger" ) diff --git a/tests/components/homeassistant/triggers/test_time_pattern.py b/tests/components/homeassistant/triggers/test_time_pattern.py index 327623d373b..7138fd7dd02 100644 --- a/tests/components/homeassistant/triggers/test_time_pattern.py +++ b/tests/components/homeassistant/triggers/test_time_pattern.py @@ -13,23 +13,19 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, async_mock_service, mock_component - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import async_fire_time_changed, mock_component @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") async def test_if_fires_when_hour_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if hour is matching.""" now = dt_util.utcnow() @@ -58,7 +54,8 @@ async def test_if_fires_when_hour_matches( async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, hour=0)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 await hass.services.async_call( automation.DOMAIN, @@ -66,15 +63,17 @@ async def test_if_fires_when_hour_matches( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 async_fire_time_changed(hass, now.replace(year=now.year + 1, day=1, hour=0)) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 async def test_if_fires_when_minute_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if minutes are matching.""" now = dt_util.utcnow() @@ -101,11 +100,13 @@ async def test_if_fires_when_minute_matches( async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, minute=0)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_when_second_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if seconds are matching.""" now = dt_util.utcnow() @@ -132,11 +133,13 @@ async def test_if_fires_when_second_matches( async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, second=0)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_when_second_as_string_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if seconds are matching.""" now = dt_util.utcnow() @@ -165,11 +168,13 @@ async def test_if_fires_when_second_as_string_matches( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_when_all_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if everything matches.""" now = dt_util.utcnow() @@ -198,11 +203,13 @@ async def test_if_fires_when_all_matches( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_periodic_seconds( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing periodically every second.""" now = dt_util.utcnow() @@ -231,11 +238,13 @@ async def test_if_fires_periodic_seconds( ) await hass.async_block_till_done() - assert len(calls) >= 1 + assert len(service_calls) >= 1 async def test_if_fires_periodic_minutes( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing periodically every minute.""" @@ -265,11 +274,13 @@ async def test_if_fires_periodic_minutes( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_periodic_hours( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing periodically every hour.""" now = dt_util.utcnow() @@ -298,11 +309,13 @@ async def test_if_fires_periodic_hours( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_default_values( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing at 2 minutes every hour.""" now = dt_util.utcnow() @@ -326,24 +339,24 @@ async def test_default_values( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async_fire_time_changed( hass, now.replace(year=now.year + 2, day=1, hour=1, minute=2, second=1) ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async_fire_time_changed( hass, now.replace(year=now.year + 2, day=1, hour=2, minute=2, second=0) ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 -async def test_invalid_schemas(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_invalid_schemas() -> None: """Test invalid schemas.""" schemas = ( None, diff --git a/tests/components/homeassistant_alerts/test_init.py b/tests/components/homeassistant_alerts/test_init.py index 444db019c7c..0a38778bbee 100644 --- a/tests/components/homeassistant_alerts/test_init.py +++ b/tests/components/homeassistant_alerts/test_init.py @@ -26,7 +26,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator -def stub_alert(aioclient_mock, alert_id): +def stub_alert(aioclient_mock: AiohttpClientMocker, alert_id) -> None: """Stub an alert.""" aioclient_mock.get( f"https://alerts.home-assistant.io/alerts/{alert_id}.json", @@ -35,7 +35,7 @@ def stub_alert(aioclient_mock, alert_id): @pytest.fixture(autouse=True) -async def setup_repairs(hass): +async def setup_repairs(hass: HomeAssistant) -> None: """Set up the repairs integration.""" assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) @@ -99,9 +99,9 @@ async def test_alerts( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, - ha_version, - supervisor_info, - expected_alerts, + ha_version: str, + supervisor_info: dict[str, str] | None, + expected_alerts: list[tuple[str, str]], ) -> None: """Test creating issues based on alerts.""" @@ -292,12 +292,12 @@ async def test_alerts_refreshed_on_component_load( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, - ha_version, - supervisor_info, - initial_components, - late_components, - initial_alerts, - late_alerts, + ha_version: str, + supervisor_info: dict[str, str] | None, + initial_components: list[str], + late_components: list[str], + initial_alerts: list[tuple[str, str]], + late_alerts: list[tuple[str, str]], freezer: FrozenDateTimeFactory, ) -> None: """Test alerts are refreshed when components are loaded.""" @@ -433,9 +433,9 @@ async def test_bad_alerts( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, - ha_version, - fixture, - expected_alerts, + ha_version: str, + fixture: str, + expected_alerts: list[tuple[str, str]], ) -> None: """Test creating issues based on alerts.""" fixture_content = load_fixture(fixture, "homeassistant_alerts") diff --git a/tests/components/homeassistant_hardware/conftest.py b/tests/components/homeassistant_hardware/conftest.py index 72e937396ea..b62ccaf855b 100644 --- a/tests/components/homeassistant_hardware/conftest.py +++ b/tests/components/homeassistant_hardware/conftest.py @@ -1,17 +1,17 @@ """Test fixtures for the Home Assistant Hardware integration.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: """Mock the radio connection and probing of the ZHA config flow.""" - def mock_probe(config: dict[str, Any]) -> None: + def mock_probe(config: dict[str, Any]) -> dict[str, Any]: # The radio probing will return the correct baudrate return {**config, "baudrate": 115200} diff --git a/tests/components/homeassistant_hardware/test_config_flow.py b/tests/components/homeassistant_hardware/test_config_flow.py new file mode 100644 index 00000000000..a1842f4c4e6 --- /dev/null +++ b/tests/components/homeassistant_hardware/test_config_flow.py @@ -0,0 +1,674 @@ +"""Test the Home Assistant hardware firmware config flow.""" + +import asyncio +from collections.abc import Awaitable, Callable, Generator, Iterator +import contextlib +from typing import Any +from unittest.mock import AsyncMock, Mock, call, patch + +import pytest +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, + BaseFirmwareConfigFlow, + BaseFirmwareOptionsFlow, +) +from homeassistant.components.homeassistant_hardware.util import ( + get_otbr_addon_manager, + get_zigbee_flasher_addon_manager, +) +from homeassistant.config_entries import ConfigEntry, ConfigFlowResult, OptionsFlow +from homeassistant.core import HomeAssistant, callback +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, +) + +TEST_DOMAIN = "test_firmware_domain" +TEST_DEVICE = "/dev/SomeDevice123" +TEST_HARDWARE_NAME = "Some Hardware Name" + + +class FakeFirmwareConfigFlow(BaseFirmwareConfigFlow, domain=TEST_DOMAIN): + """Config flow for `test_firmware_domain`.""" + + VERSION = 1 + MINOR_VERSION = 2 + + @staticmethod + @callback + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: + """Return the options flow.""" + return FakeFirmwareOptionsFlowHandler(config_entry) + + async def async_step_hardware( + self, data: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle hardware flow.""" + self._device = TEST_DEVICE + self._hardware_name = TEST_HARDWARE_NAME + + return await self.async_step_confirm() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._device is not None + assert self._hardware_name is not None + assert self._probed_firmware_type is not None + + return self.async_create_entry( + title=self._hardware_name, + data={ + "device": self._device, + "firmware": self._probed_firmware_type.value, + "hardware": self._hardware_name, + }, + ) + + +class FakeFirmwareOptionsFlowHandler(BaseFirmwareOptionsFlow): + """Options flow for `test_firmware_domain`.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate options flow.""" + super().__init__(*args, **kwargs) + + self._device = self.config_entry.data["device"] + self._hardware_name = self.config_entry.data["hardware"] + + # Regenerate the translation placeholders + self._get_translation_placeholders() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._probed_firmware_type is not None + + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + "firmware": self._probed_firmware_type.value, + }, + options=self.config_entry.options, + ) + + return self.async_create_entry(title="", data={}) + + +@pytest.fixture(autouse=True) +def mock_test_firmware_platform( + hass: HomeAssistant, +) -> Generator[None]: + """Fixture for a test config flow.""" + mock_module = MockModule( + TEST_DOMAIN, async_setup_entry=AsyncMock(return_value=True) + ) + mock_integration(hass, mock_module) + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + + with mock_config_flow(TEST_DOMAIN, FakeFirmwareConfigFlow): + yield + + +def delayed_side_effect() -> Callable[..., Awaitable[None]]: + """Slows down eager tasks by delaying for an event loop tick.""" + + async def side_effect(*args: Any, **kwargs: Any) -> None: + await asyncio.sleep(0) + + return side_effect + + +@contextlib.contextmanager +def mock_addon_info( + hass: HomeAssistant, + *, + is_hassio: bool = True, + app_type: ApplicationType = ApplicationType.EZSP, + otbr_addon_info: AddonInfo = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ), + flasher_addon_info: AddonInfo = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ), +) -> Iterator[tuple[Mock, Mock]]: + """Mock the main addon states for the config flow.""" + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_get_addon_info.return_value = flasher_addon_info + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_get_addon_info.return_value = otbr_addon_info + + with ( + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.is_hassio", + return_value=is_hassio, + ), + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.probe_silabs_firmware_type", + return_value=app_type, + ), + ): + yield mock_otbr_manager, mock_flasher_manager + + +async def test_config_flow_zigbee(hass: HomeAssistant) -> None: + """Test the config flow.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + + with mock_addon_info( + hass, + app_type=ApplicationType.SPINEL, + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option: we are now installing the addon + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + assert result["description_placeholders"]["firmware_type"] == "spinel" + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + } + + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" + + +async def test_config_flow_zigbee_skip_step_if_installed(hass: HomeAssistant) -> None: + """Test the config flow, skip installing the addon if necessary.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + + with mock_addon_info( + hass, + app_type=ApplicationType.SPINEL, + flasher_addon_info=AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ), + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option: we skip installation, instead we directly run it + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + # Uninstall the addon + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + # Done + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + +async def test_config_flow_thread(hass: HomeAssistant) -> None: + """Test the config flow.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + + with mock_addon_info( + hass, + app_type=ApplicationType.EZSP, + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + assert result["description_placeholders"]["model"] == TEST_HARDWARE_NAME + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "spinel", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + } + + +async def test_config_flow_thread_addon_already_installed(hass: HomeAssistant) -> None: + """Test the Thread config flow, addon is already installed.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + with mock_addon_info( + hass, + app_type=ApplicationType.EZSP, + otbr_addon_info=AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_RUNNING, + update_available=False, + version=None, + ), + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_config_flow_zigbee_not_hassio(hass: HomeAssistant) -> None: + """Test when the stick is used with a non-hassio setup.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + with mock_addon_info( + hass, + is_hassio=False, + app_type=ApplicationType.EZSP, + ) as (mock_otbr_manager, mock_flasher_manager): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + } + + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" + + +async def test_options_flow_zigbee_to_thread(hass: HomeAssistant) -> None: + """Test the options flow, migrating Zigbee to Thread.""" + config_entry = MockConfigEntry( + domain=TEST_DOMAIN, + data={ + "firmware": "ezsp", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + assert result["description_placeholders"]["model"] == TEST_HARDWARE_NAME + + with mock_addon_info( + hass, + app_type=ApplicationType.EZSP, + ) as (mock_otbr_manager, mock_flasher_manager): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # The firmware type has been updated + assert config_entry.data["firmware"] == "spinel" + + +async def test_options_flow_thread_to_zigbee(hass: HomeAssistant) -> None: + """Test the options flow, migrating Thread to Zigbee.""" + config_entry = MockConfigEntry( + domain=TEST_DOMAIN, + data={ + "firmware": "spinel", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert result["description_placeholders"]["model"] == TEST_HARDWARE_NAME + + with mock_addon_info( + hass, + app_type=ApplicationType.SPINEL, + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option: we are now installing the addon + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # The firmware type has been updated + assert config_entry.data["firmware"] == "ezsp" diff --git a/tests/components/homeassistant_sky_connect/test_config_flow_failures.py b/tests/components/homeassistant_hardware/test_config_flow_failures.py similarity index 73% rename from tests/components/homeassistant_sky_connect/test_config_flow_failures.py rename to tests/components/homeassistant_hardware/test_config_flow_failures.py index b29f8d808ae..4c3ea7d28fa 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow_failures.py +++ b/tests/components/homeassistant_hardware/test_config_flow_failures.py @@ -1,38 +1,43 @@ -"""Test the Home Assistant SkyConnect config flow failure cases.""" +"""Test the Home Assistant hardware firmware config flow failure cases.""" from unittest.mock import AsyncMock import pytest from universal_silabs_flasher.const import ApplicationType -from homeassistant.components import usb from homeassistant.components.hassio.addon_manager import ( AddonError, AddonInfo, AddonState, ) -from homeassistant.components.homeassistant_sky_connect.config_flow import ( +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( STEP_PICK_FIRMWARE_THREAD, STEP_PICK_FIRMWARE_ZIGBEE, ) -from homeassistant.components.homeassistant_sky_connect.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .test_config_flow import USB_DATA_ZBT1, delayed_side_effect, mock_addon_info +from .test_config_flow import ( + TEST_DEVICE, + TEST_DOMAIN, + TEST_HARDWARE_NAME, + delayed_side_effect, + mock_addon_info, + mock_test_firmware_platform, # noqa: F401 +) from tests.common import MockConfigEntry @pytest.mark.parametrize( - ("usb_data", "model", "next_step"), + "next_step", [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1", STEP_PICK_FIRMWARE_ZIGBEE), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1", STEP_PICK_FIRMWARE_THREAD), + STEP_PICK_FIRMWARE_ZIGBEE, + STEP_PICK_FIRMWARE_THREAD, ], ) async def test_config_flow_cannot_probe_firmware( - usb_data: usb.UsbServiceInfo, model: str, next_step: str, hass: HomeAssistant + next_step: str, hass: HomeAssistant ) -> None: """Test failure case when firmware cannot be probed.""" @@ -42,7 +47,7 @@ async def test_config_flow_cannot_probe_firmware( ) as (mock_otbr_manager, mock_flasher_manager): # Start the flow result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) result = await hass.config_entries.flow.async_configure( @@ -54,18 +59,12 @@ async def test_config_flow_cannot_probe_firmware( assert result["reason"] == "unsupported_firmware" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_config_flow_zigbee_not_hassio_wrong_firmware( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test when the stick is used with a non-hassio setup but the firmware is bad.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -85,18 +84,12 @@ async def test_config_flow_zigbee_not_hassio_wrong_firmware( assert result["reason"] == "not_hassio" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_config_flow_zigbee_flasher_addon_already_running( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test failure case when flasher addon is already running.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -125,18 +118,10 @@ async def test_config_flow_zigbee_flasher_addon_already_running( assert result["reason"] == "addon_already_running" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_flasher_addon_info_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_zigbee_flasher_addon_info_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -166,18 +151,12 @@ async def test_config_flow_zigbee_flasher_addon_info_fails( assert result["reason"] == "addon_info_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_config_flow_zigbee_flasher_addon_install_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -202,18 +181,12 @@ async def test_config_flow_zigbee_flasher_addon_install_fails( assert result["reason"] == "addon_install_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_config_flow_zigbee_flasher_addon_set_config_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test failure case when flasher addon cannot be configured.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -242,18 +215,10 @@ async def test_config_flow_zigbee_flasher_addon_set_config_fails( assert result["reason"] == "addon_set_config_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_flasher_run_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_zigbee_flasher_run_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -279,18 +244,10 @@ async def test_config_flow_zigbee_flasher_run_fails( assert result["reason"] == "addon_start_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_flasher_uninstall_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_zigbee_flasher_uninstall_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon uninstall fails.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -319,18 +276,10 @@ async def test_config_flow_zigbee_flasher_uninstall_fails( assert result["step_id"] == "confirm_zigbee" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_not_hassio( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_not_hassio(hass: HomeAssistant) -> None: """Test when the stick is used with a non-hassio setup and Thread is selected.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -350,18 +299,10 @@ async def test_config_flow_thread_not_hassio( assert result["reason"] == "not_hassio_thread" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_info_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_addon_info_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -382,18 +323,10 @@ async def test_config_flow_thread_addon_info_fails( assert result["reason"] == "addon_info_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_already_running( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_addon_already_running(hass: HomeAssistant) -> None: """Test failure case when the Thread addon is already running.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -425,18 +358,10 @@ async def test_config_flow_thread_addon_already_running( assert result["reason"] == "otbr_addon_already_running" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_install_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_addon_install_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -460,18 +385,10 @@ async def test_config_flow_thread_addon_install_fails( assert result["reason"] == "addon_install_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_set_config_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_addon_set_config_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be configured.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -495,18 +412,10 @@ async def test_config_flow_thread_addon_set_config_fails( assert result["reason"] == "addon_set_config_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_flasher_run_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_flasher_run_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -531,18 +440,10 @@ async def test_config_flow_thread_flasher_run_fails( assert result["reason"] == "addon_start_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_flasher_uninstall_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_flasher_uninstall_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon uninstall fails.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -572,27 +473,16 @@ async def test_config_flow_thread_flasher_uninstall_fails( assert result["step_id"] == "confirm_otbr" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_options_flow_zigbee_to_thread_zha_configured( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test the options flow migration failure, ZHA using the stick.""" config_entry = MockConfigEntry( - domain="homeassistant_sky_connect", + domain=TEST_DOMAIN, data={ "firmware": "ezsp", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, }, version=1, minor_version=2, @@ -604,7 +494,7 @@ async def test_options_flow_zigbee_to_thread_zha_configured( # Set up ZHA as well zha_config_entry = MockConfigEntry( domain="zha", - data={"device": {"path": usb_data.device}}, + data={"device": {"path": TEST_DEVICE}}, ) zha_config_entry.add_to_hass(hass) @@ -620,27 +510,16 @@ async def test_options_flow_zigbee_to_thread_zha_configured( assert result["reason"] == "zha_still_using_stick" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_options_flow_thread_to_zigbee_otbr_configured( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test the options flow migration failure, OTBR still using the stick.""" config_entry = MockConfigEntry( - domain="homeassistant_sky_connect", + domain=TEST_DOMAIN, data={ "firmware": "spinel", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, }, version=1, minor_version=2, @@ -658,7 +537,7 @@ async def test_options_flow_thread_to_zigbee_otbr_configured( otbr_addon_info=AddonInfo( available=True, hostname=None, - options={"device": usb_data.device}, + options={"device": TEST_DEVICE}, state=AddonState.RUNNING, update_available=False, version="1.0.0", diff --git a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py index 1df8fa86cf9..5718133cd24 100644 --- a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py +++ b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.hassio import AddonError, AddonInfo, AddonState, HassIO from homeassistant.components.hassio.handler import HassioAPIError diff --git a/tests/components/homeassistant_hardware/test_util.py b/tests/components/homeassistant_hardware/test_util.py new file mode 100644 index 00000000000..4a30a39686f --- /dev/null +++ b/tests/components/homeassistant_hardware/test_util.py @@ -0,0 +1,158 @@ +"""Test hardware utilities.""" + +from unittest.mock import AsyncMock, patch + +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components.hassio import AddonError, AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.util import ( + FirmwareGuess, + get_zha_device_path, + guess_firmware_type, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +ZHA_CONFIG_ENTRY = MockConfigEntry( + domain="zha", + unique_id="some_unique_id", + data={ + "device": { + "path": "socket://1.2.3.4:5678", + "baudrate": 115200, + "flow_control": None, + }, + "radio_type": "ezsp", + }, + version=4, +) + + +def test_get_zha_device_path() -> None: + """Test extracting the ZHA device path from its config entry.""" + assert ( + get_zha_device_path(ZHA_CONFIG_ENTRY) == ZHA_CONFIG_ENTRY.data["device"]["path"] + ) + + +def test_get_zha_device_path_ignored_discovery() -> None: + """Test extracting the ZHA device path from an ignored ZHA discovery.""" + config_entry = MockConfigEntry( + domain="zha", + unique_id="some_unique_id", + data={}, + version=4, + ) + + assert get_zha_device_path(config_entry) is None + + +async def test_guess_firmware_type_unknown(hass: HomeAssistant) -> None: + """Test guessing the firmware type.""" + + assert (await guess_firmware_type(hass, "/dev/missing")) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" + ) + + +async def test_guess_firmware_type(hass: HomeAssistant) -> None: + """Test guessing the firmware.""" + path = ZHA_CONFIG_ENTRY.data["device"]["path"] + + ZHA_CONFIG_ENTRY.add_to_hass(hass) + + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.NOT_LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="zha" + ) + + # When ZHA is running, we indicate as such when guessing + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager = AsyncMock() + mock_multipan_addon_manager = AsyncMock() + + with ( + patch( + "homeassistant.components.homeassistant_hardware.util.is_hassio", + return_value=True, + ), + patch( + "homeassistant.components.homeassistant_hardware.util.get_otbr_addon_manager", + return_value=mock_otbr_addon_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.util.get_multiprotocol_addon_manager", + return_value=mock_multipan_addon_manager, + ), + ): + mock_otbr_addon_manager.async_get_addon_info.side_effect = AddonError() + mock_multipan_addon_manager.async_get_addon_info.side_effect = AddonError() + + # Hassio errors are ignored and we still go with ZHA + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.side_effect = None + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": "/some/other/device"}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will prefer ZHA, as it is running (and actually pointing to the device) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will still prefer ZHA, as it is the one actually running + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Finally, ZHA loses out to OTBR + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.SPINEL, source="otbr" + ) + + mock_multipan_addon_manager.async_get_addon_info.side_effect = None + mock_multipan_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Which will lose out to multi-PAN + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.CPC, source="multiprotocol" + ) diff --git a/tests/components/homeassistant_sky_connect/conftest.py b/tests/components/homeassistant_sky_connect/conftest.py index 099582999d5..69b0901aadf 100644 --- a/tests/components/homeassistant_sky_connect/conftest.py +++ b/tests/components/homeassistant_sky_connect/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for the Home Assistant SkyConnect integration.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture(name="mock_usb_serial_by_id", autouse=True) diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index a4b7b4fb81d..0d4c517b07f 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -1,30 +1,20 @@ """Test the Home Assistant SkyConnect config flow.""" -import asyncio -from collections.abc import Awaitable, Callable -import contextlib -from typing import Any -from unittest.mock import AsyncMock, Mock, call, patch +from unittest.mock import Mock, patch import pytest -from universal_silabs_flasher.const import ApplicationType from homeassistant.components import usb from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + STEP_PICK_FIRMWARE_ZIGBEE, +) from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( CONF_DISABLE_MULTI_PAN, get_flasher_addon_manager, get_multiprotocol_addon_manager, ) -from homeassistant.components.homeassistant_sky_connect.config_flow import ( - STEP_PICK_FIRMWARE_THREAD, - STEP_PICK_FIRMWARE_ZIGBEE, -) from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.components.homeassistant_sky_connect.util import ( - get_otbr_addon_manager, - get_zigbee_flasher_addon_manager, -) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -49,86 +39,6 @@ USB_DATA_ZBT1 = usb.UsbServiceInfo( ) -def delayed_side_effect() -> Callable[..., Awaitable[None]]: - """Slows down eager tasks by delaying for an event loop tick.""" - - async def side_effect(*args: Any, **kwargs: Any) -> None: - await asyncio.sleep(0) - - return side_effect - - -@contextlib.contextmanager -def mock_addon_info( - hass: HomeAssistant, - *, - is_hassio: bool = True, - app_type: ApplicationType = ApplicationType.EZSP, - otbr_addon_info: AddonInfo = AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_INSTALLED, - update_available=False, - version=None, - ), - flasher_addon_info: AddonInfo = AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_INSTALLED, - update_available=False, - version=None, - ), -): - """Mock the main addon states for the config flow.""" - mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) - mock_flasher_manager.addon_name = "Silicon Labs Flasher" - mock_flasher_manager.async_start_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_flasher_manager.async_install_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_flasher_manager.async_get_addon_info.return_value = flasher_addon_info - - mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) - mock_otbr_manager.addon_name = "OpenThread Border Router" - mock_otbr_manager.async_install_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_otbr_manager.async_uninstall_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_otbr_manager.async_start_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_otbr_manager.async_get_addon_info.return_value = otbr_addon_info - - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", - return_value=mock_otbr_manager, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", - return_value=mock_flasher_manager, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", - return_value=is_hassio, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", - return_value=app_type, - ), - ): - yield mock_otbr_manager, mock_flasher_manager - - @pytest.mark.parametrize( ("usb_data", "model"), [ @@ -136,7 +46,7 @@ def mock_addon_info( (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -async def test_config_flow_zigbee( +async def test_config_flow( usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test the config flow for SkyConnect.""" @@ -146,453 +56,42 @@ async def test_config_flow_zigbee( assert result["type"] is FlowResultType.MENU assert result["step_id"] == "pick_firmware" - - with mock_addon_info( - hass, - app_type=ApplicationType.SPINEL, - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option: we are now installing the addon - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_zigbee_flasher_addon" - assert result["description_placeholders"]["firmware_type"] == "spinel" - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now configuring the addon and running it - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "run_zigbee_flasher_addon" - assert result["progress_action"] == "run_zigbee_flasher_addon" - assert mock_flasher_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now uninstalling the addon - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "uninstall_zigbee_flasher_addon" - assert result["progress_action"] == "uninstall_zigbee_flasher_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - # We are finally done with the addon - assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - config_entry = result["result"] - assert config_entry.data == { - "firmware": "ezsp", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, - } - - # Ensure a ZHA discovery flow has been created - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - zha_flow = flows[0] - assert zha_flow["handler"] == "zha" - assert zha_flow["context"]["source"] == "hardware" - assert zha_flow["step_id"] == "confirm" - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_skip_step_if_installed( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test the config flow for SkyConnect, skip installing the addon if necessary.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - - with mock_addon_info( - hass, - app_type=ApplicationType.SPINEL, - flasher_addon_info=AddonInfo( - available=True, - hostname=None, - options={ - "device": "", - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - }, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.2.3", - ), - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option: we skip installation, instead we directly run it - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "run_zigbee_flasher_addon" - assert result["progress_action"] == "run_zigbee_flasher_addon" - assert result["description_placeholders"]["firmware_type"] == "spinel" - assert mock_flasher_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - } - ) - ] - - # Uninstall the addon - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - # Done - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test the config flow for SkyConnect.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - - with mock_addon_info( - hass, - app_type=ApplicationType.EZSP, - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_otbr_addon" - assert result["description_placeholders"]["firmware_type"] == "ezsp" - assert result["description_placeholders"]["model"] == model - - await hass.async_block_till_done(wait_background_tasks=True) - - mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={ - "device": "", - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - }, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.2.3", - ) - - # Progress the flow, it is now configuring the addon and running it - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_otbr_addon" - assert result["progress_action"] == "start_otbr_addon" - - assert mock_otbr_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # The addon is now running - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_otbr" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - config_entry = result["result"] - assert config_entry.data == { - "firmware": "spinel", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, - } - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_already_installed( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test the Thread config flow for SkyConnect, addon is already installed.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - with mock_addon_info( - hass, - app_type=ApplicationType.EZSP, - otbr_addon_info=AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_RUNNING, - update_available=False, - version=None, - ), - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_otbr_addon" - assert result["progress_action"] == "start_otbr_addon" - - assert mock_otbr_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # The addon is now running - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_otbr" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_not_hassio( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test when the stick is used with a non-hassio setup.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - with mock_addon_info( - hass, - is_hassio=False, - app_type=ApplicationType.EZSP, - ) as (mock_otbr_manager, mock_flasher_manager): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - config_entry = result["result"] - assert config_entry.data == { - "firmware": "ezsp", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, - } - - # Ensure a ZHA discovery flow has been created - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - zha_flow = flows[0] - assert zha_flow["handler"] == "zha" - assert zha_flow["context"]["source"] == "hardware" - assert zha_flow["step_id"] == "confirm" - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_options_flow_zigbee_to_thread( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test the options flow for SkyConnect, migrating Zigbee to Thread.""" - config_entry = MockConfigEntry( - domain="homeassistant_sky_connect", - data={ - "firmware": "ezsp", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, - }, - version=1, - minor_version=2, - ) - config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(config_entry.entry_id) - - # First step is confirmation - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - assert result["description_placeholders"]["firmware_type"] == "ezsp" assert result["description_placeholders"]["model"] == model - with mock_addon_info( - hass, - app_type=ApplicationType.EZSP, - ) as (mock_otbr_manager, mock_flasher_manager): - result = await hass.config_entries.options.async_configure( + async def mock_async_step_pick_firmware_zigbee(self, data): + return await self.async_step_confirm_zigbee(user_input={}) + + with patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareConfigFlow.async_step_pick_firmware_zigbee", + autospec=True, + side_effect=mock_async_step_pick_firmware_zigbee, + ): + result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_otbr_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={ - "device": "", - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - }, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.2.3", - ) - - # Progress the flow, it is now configuring the addon and running it - result = await hass.config_entries.options.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_otbr_addon" - assert result["progress_action"] == "start_otbr_addon" - - assert mock_otbr_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # The addon is now running - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_otbr" - - # We are now done - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={} - ) assert result["type"] is FlowResultType.CREATE_ENTRY - # The firmware type has been updated - assert config_entry.data["firmware"] == "spinel" + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } + + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" @pytest.mark.parametrize( @@ -602,10 +101,10 @@ async def test_options_flow_zigbee_to_thread( (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -async def test_options_flow_thread_to_zigbee( +async def test_options_flow( usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: - """Test the options flow for SkyConnect, migrating Thread to Zigbee.""" + """Test the options flow for SkyConnect.""" config_entry = MockConfigEntry( domain="homeassistant_sky_connect", data={ @@ -632,62 +131,32 @@ async def test_options_flow_thread_to_zigbee( assert result["description_placeholders"]["firmware_type"] == "spinel" assert result["description_placeholders"]["model"] == model - with mock_addon_info( - hass, - app_type=ApplicationType.SPINEL, - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option: we are now installing the addon + async def mock_async_step_pick_firmware_zigbee(self, data): + return await self.async_step_confirm_zigbee(user_input={}) + + with patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareOptionsFlow.async_step_pick_firmware_zigbee", + autospec=True, + side_effect=mock_async_step_pick_firmware_zigbee, + ): result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_zigbee_flasher_addon" - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now configuring the addon and running it - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "run_zigbee_flasher_addon" - assert result["progress_action"] == "run_zigbee_flasher_addon" - assert mock_flasher_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now uninstalling the addon - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "uninstall_zigbee_flasher_addon" - assert result["progress_action"] == "uninstall_zigbee_flasher_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - # We are finally done with the addon - assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - # We are now done - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={} - ) assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"] is True - # The firmware type has been updated - assert config_entry.data["firmware"] == "ezsp" + assert config_entry.data == { + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } @pytest.mark.parametrize( diff --git a/tests/components/homeassistant_sky_connect/test_hardware.py b/tests/components/homeassistant_sky_connect/test_hardware.py index 888ed27a3c0..f39e648b0f2 100644 --- a/tests/components/homeassistant_sky_connect/test_hardware.py +++ b/tests/components/homeassistant_sky_connect/test_hardware.py @@ -1,7 +1,8 @@ """Test the Home Assistant SkyConnect hardware platform.""" from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_STARTED +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry diff --git a/tests/components/homeassistant_sky_connect/test_init.py b/tests/components/homeassistant_sky_connect/test_init.py index 88b57f2dd64..e1c13771fdc 100644 --- a/tests/components/homeassistant_sky_connect/test_init.py +++ b/tests/components/homeassistant_sky_connect/test_init.py @@ -4,8 +4,8 @@ from unittest.mock import patch from universal_silabs_flasher.const import ApplicationType +from homeassistant.components.homeassistant_hardware.util import FirmwareGuess from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.components.homeassistant_sky_connect.util import FirmwareGuess from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry diff --git a/tests/components/homeassistant_sky_connect/test_util.py b/tests/components/homeassistant_sky_connect/test_util.py index b560acc65b7..1d1d70c1b4c 100644 --- a/tests/components/homeassistant_sky_connect/test_util.py +++ b/tests/components/homeassistant_sky_connect/test_util.py @@ -1,24 +1,14 @@ """Test SkyConnect utilities.""" -from unittest.mock import AsyncMock, patch - -from universal_silabs_flasher.const import ApplicationType - -from homeassistant.components.hassio import AddonError, AddonInfo, AddonState from homeassistant.components.homeassistant_sky_connect.const import ( DOMAIN, HardwareVariant, ) from homeassistant.components.homeassistant_sky_connect.util import ( - FirmwareGuess, get_hardware_variant, get_usb_service_info, - get_zha_device_path, - guess_firmware_type, ) from homeassistant.components.usb import UsbServiceInfo -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -52,20 +42,6 @@ CONNECT_ZBT1_CONFIG_ENTRY = MockConfigEntry( version=2, ) -ZHA_CONFIG_ENTRY = MockConfigEntry( - domain="zha", - unique_id="some_unique_id", - data={ - "device": { - "path": "/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_3c0ed67c628beb11b1cd64a0f320645d-if00-port0", - "baudrate": 115200, - "flow_control": None, - }, - "radio_type": "ezsp", - }, - version=4, -) - def test_get_usb_service_info() -> None: """Test `get_usb_service_info` conversion.""" @@ -85,131 +61,3 @@ def test_get_hardware_variant() -> None: assert ( get_hardware_variant(CONNECT_ZBT1_CONFIG_ENTRY) == HardwareVariant.CONNECT_ZBT1 ) - - -def test_get_zha_device_path() -> None: - """Test extracting the ZHA device path from its config entry.""" - assert ( - get_zha_device_path(ZHA_CONFIG_ENTRY) == ZHA_CONFIG_ENTRY.data["device"]["path"] - ) - - -def test_get_zha_device_path_ignored_discovery() -> None: - """Test extracting the ZHA device path from an ignored ZHA discovery.""" - config_entry = MockConfigEntry( - domain="zha", - unique_id="some_unique_id", - data={}, - version=4, - ) - - assert get_zha_device_path(config_entry) is None - - -async def test_guess_firmware_type_unknown(hass: HomeAssistant) -> None: - """Test guessing the firmware type.""" - - assert (await guess_firmware_type(hass, "/dev/missing")) == FirmwareGuess( - is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" - ) - - -async def test_guess_firmware_type(hass: HomeAssistant) -> None: - """Test guessing the firmware.""" - path = ZHA_CONFIG_ENTRY.data["device"]["path"] - - ZHA_CONFIG_ENTRY.add_to_hass(hass) - - ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.NOT_LOADED) - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=False, firmware_type=ApplicationType.EZSP, source="zha" - ) - - # When ZHA is running, we indicate as such when guessing - ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.LOADED) - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager = AsyncMock() - mock_multipan_addon_manager = AsyncMock() - - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.util.is_hassio", - return_value=True, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.util.get_otbr_addon_manager", - return_value=mock_otbr_addon_manager, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.util.get_multiprotocol_addon_manager", - return_value=mock_multipan_addon_manager, - ), - ): - mock_otbr_addon_manager.async_get_addon_info.side_effect = AddonError() - mock_multipan_addon_manager.async_get_addon_info.side_effect = AddonError() - - # Hassio errors are ignored and we still go with ZHA - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager.async_get_addon_info.side_effect = None - mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": "/some/other/device"}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - # We will prefer ZHA, as it is running (and actually pointing to the device) - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": path}, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.0.0", - ) - - # We will still prefer ZHA, as it is the one actually running - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": path}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - # Finally, ZHA loses out to OTBR - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.SPINEL, source="otbr" - ) - - mock_multipan_addon_manager.async_get_addon_info.side_effect = None - mock_multipan_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": path}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - # Which will lose out to multi-PAN - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.CPC, source="multiprotocol" - ) diff --git a/tests/components/homeassistant_yellow/conftest.py b/tests/components/homeassistant_yellow/conftest.py index 38398eb719f..0077fb27058 100644 --- a/tests/components/homeassistant_yellow/conftest.py +++ b/tests/components/homeassistant_yellow/conftest.py @@ -1,17 +1,17 @@ """Test fixtures for the Home Assistant Yellow integration.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: """Mock the radio connection and probing of the ZHA config flow.""" - def mock_probe(config: dict[str, Any]) -> None: + def mock_probe(config: dict[str, Any]) -> dict[str, Any]: # The radio probing will return the correct baudrate return {**config, "baudrate": 115200} diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index 4ae04180a64..95d7df89c9d 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -1,9 +1,9 @@ """Test the Home Assistant Yellow config flow.""" +from collections.abc import Generator from unittest.mock import Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN from homeassistant.components.homeassistant_yellow.const import DOMAIN diff --git a/tests/components/homekit/conftest.py b/tests/components/homekit/conftest.py index 26333b0b807..6bdad5d2b4c 100644 --- a/tests/components/homekit/conftest.py +++ b/tests/components/homekit/conftest.py @@ -4,7 +4,6 @@ from asyncio import AbstractEventLoop from collections.abc import Generator from contextlib import suppress import os -from typing import Any from unittest.mock import MagicMock, patch import pytest @@ -13,13 +12,13 @@ from homeassistant.components.device_tracker.legacy import YAML_DEVICES from homeassistant.components.homekit.accessories import HomeDriver from homeassistant.components.homekit.const import BRIDGE_NAME, EVENT_HOMEKIT_CHANGED from homeassistant.components.homekit.iidmanager import AccessoryIIDStorage -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_capture_events @pytest.fixture -def iid_storage(hass): +def iid_storage(hass: HomeAssistant) -> Generator[AccessoryIIDStorage]: """Mock the iid storage.""" with patch.object(AccessoryIIDStorage, "_async_schedule_save"): yield AccessoryIIDStorage(hass, "") @@ -28,7 +27,7 @@ def iid_storage(hass): @pytest.fixture def run_driver( hass: HomeAssistant, event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage -) -> Generator[HomeDriver, Any, None]: +) -> Generator[HomeDriver]: """Return a custom AccessoryDriver instance for HomeKit accessory init. This mock does not mock async_stop, so the driver will not be stopped @@ -57,7 +56,7 @@ def run_driver( @pytest.fixture def hk_driver( hass: HomeAssistant, event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage -) -> Generator[HomeDriver, Any, None]: +) -> Generator[HomeDriver]: """Return a custom AccessoryDriver instance for HomeKit accessory init.""" with ( patch("pyhap.accessory_driver.AsyncZeroconf"), @@ -89,7 +88,7 @@ def mock_hap( event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage, mock_zeroconf: MagicMock, -) -> Generator[HomeDriver, Any, None]: +) -> Generator[HomeDriver]: """Return a custom AccessoryDriver instance for HomeKit accessory init.""" with ( patch("pyhap.accessory_driver.AsyncZeroconf"), @@ -122,13 +121,13 @@ def mock_hap( @pytest.fixture -def events(hass): +def events(hass: HomeAssistant) -> list[Event]: """Yield caught homekit_changed events.""" return async_capture_events(hass, EVENT_HOMEKIT_CHANGED) @pytest.fixture -def demo_cleanup(hass): +def demo_cleanup(hass: HomeAssistant) -> Generator[None]: """Clean up device tracker demo file.""" yield with suppress(FileNotFoundError): diff --git a/tests/components/homekit/test_accessories.py b/tests/components/homekit/test_accessories.py index 32cd6622492..c37cac84b8a 100644 --- a/tests/components/homekit/test_accessories.py +++ b/tests/components/homekit/test_accessories.py @@ -47,7 +47,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, __version__ as hass_version, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service @@ -667,7 +667,9 @@ async def test_battery_appears_after_startup( assert acc._char_battery is None -async def test_call_service(hass: HomeAssistant, hk_driver, events) -> None: +async def test_call_service( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test call_service method.""" entity_id = "homekit.accessory" hass.states.async_set(entity_id, None) diff --git a/tests/components/homekit/test_diagnostics.py b/tests/components/homekit/test_diagnostics.py index 728624da0d0..ce3c954c447 100644 --- a/tests/components/homekit/test_diagnostics.py +++ b/tests/components/homekit/test_diagnostics.py @@ -12,7 +12,7 @@ from homeassistant.components.homekit.const import ( ) from homeassistant.const import CONF_NAME, CONF_PORT, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from .util import async_init_integration @@ -321,9 +321,7 @@ async def test_config_entry_with_trigger_accessory( hass: HomeAssistant, hass_client: ClientSessionGenerator, hk_driver, - events, demo_cleanup, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: """Test generating diagnostics for a bridge config entry with a trigger accessory.""" diff --git a/tests/components/homekit/test_get_accessories.py b/tests/components/homekit/test_get_accessories.py index 02a39ed9258..c4b1cbe98d8 100644 --- a/tests/components/homekit/test_get_accessories.py +++ b/tests/components/homekit/test_get_accessories.py @@ -335,10 +335,10 @@ def test_type_sensors(type_name, entity_id, state, attrs) -> None: ("SelectSwitch", "select.test", "option1", {}, {}), ("Switch", "switch.test", "on", {}, {}), ("Switch", "switch.test", "on", {}, {CONF_TYPE: TYPE_SWITCH}), - ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_FAUCET}), - ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_VALVE}), - ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_SHOWER}), - ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_SPRINKLER}), + ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_FAUCET}), + ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_VALVE}), + ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_SHOWER}), + ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_SPRINKLER}), ], ) def test_type_switches(type_name, entity_id, state, attrs, config) -> None: @@ -350,6 +350,21 @@ def test_type_switches(type_name, entity_id, state, attrs, config) -> None: assert mock_type.called +@pytest.mark.parametrize( + ("type_name", "entity_id", "state", "attrs"), + [ + ("Valve", "valve.test", "on", {}), + ], +) +def test_type_valve(type_name, entity_id, state, attrs) -> None: + """Test if valve types are associated correctly.""" + mock_type = Mock() + with patch.dict(TYPES, {type_name: mock_type}): + entity_state = State(entity_id, state, attrs) + get_accessory(None, None, entity_state, 2, {}) + assert mock_type.called + + @pytest.mark.parametrize( ("type_name", "entity_id", "state", "attrs"), [ diff --git a/tests/components/homekit/test_homekit.py b/tests/components/homekit/test_homekit.py index 33bfc6e66d3..93458724c5e 100644 --- a/tests/components/homekit/test_homekit.py +++ b/tests/components/homekit/test_homekit.py @@ -14,6 +14,7 @@ import pytest from homeassistant import config as hass_config from homeassistant.components import homekit as homekit_base, zeroconf from homeassistant.components.binary_sensor import BinarySensorDeviceClass +from homeassistant.components.event import EventDeviceClass from homeassistant.components.homekit import ( MAX_DEVICES, STATUS_READY, @@ -58,7 +59,8 @@ from homeassistant.const import ( STATE_ON, EntityCategory, ) -from homeassistant.core import HomeAssistant, HomeAssistantError, State +from homeassistant.core import HomeAssistant, State +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import ( device_registry as dr, entity_registry as er, @@ -1841,7 +1843,11 @@ async def test_homekit_uses_system_zeroconf(hass: HomeAssistant, hk_driver) -> N entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - entry_data: HomeKitEntryData = hass.data[DOMAIN][entry.entry_id] + # New tests should not access runtime data. + # Do not use this pattern for new tests. + entry_data: HomeKitEntryData = hass.config_entries.async_get_entry( + entry.entry_id + ).runtime_data assert entry_data.homekit.driver.advertiser == system_async_zc assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() @@ -1938,12 +1944,21 @@ async def test_homekit_ignored_missing_devices( ) +@pytest.mark.parametrize( + ("domain", "device_class"), + [ + ("binary_sensor", BinarySensorDeviceClass.MOTION), + ("event", EventDeviceClass.MOTION), + ], +) @pytest.mark.usefixtures("mock_async_zeroconf") async def test_homekit_finds_linked_motion_sensors( hass: HomeAssistant, hk_driver, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + domain: str, + device_class: EventDeviceClass | BinarySensorDeviceClass, ) -> None: """Test HomeKit start method.""" entry = await async_init_integration(hass) @@ -1963,21 +1978,21 @@ async def test_homekit_finds_linked_motion_sensors( connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - binary_motion_sensor = entity_registry.async_get_or_create( - "binary_sensor", + entry = entity_registry.async_get_or_create( + domain, "camera", "motion_sensor", device_id=device_entry.id, - original_device_class=BinarySensorDeviceClass.MOTION, + original_device_class=device_class, ) camera = entity_registry.async_get_or_create( "camera", "camera", "demo", device_id=device_entry.id ) hass.states.async_set( - binary_motion_sensor.entity_id, + entry.entity_id, STATE_ON, - {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION}, + {ATTR_DEVICE_CLASS: device_class}, ) hass.states.async_set(camera.entity_id, STATE_ON) @@ -2000,7 +2015,83 @@ async def test_homekit_finds_linked_motion_sensors( "model": "Camera Server", "platform": "test", "sw_version": "0.16.0", - "linked_motion_sensor": "binary_sensor.camera_motion_sensor", + "linked_motion_sensor": entry.entity_id, + }, + ) + + +@pytest.mark.parametrize( + ("domain", "device_class"), + [ + ("binary_sensor", BinarySensorDeviceClass.OCCUPANCY), + ("event", EventDeviceClass.DOORBELL), + ], +) +@pytest.mark.usefixtures("mock_async_zeroconf") +async def test_homekit_finds_linked_doorbell_sensors( + hass: HomeAssistant, + hk_driver, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + domain: str, + device_class: EventDeviceClass | BinarySensorDeviceClass, +) -> None: + """Test homekit can find linked doorbell sensors.""" + entry = await async_init_integration(hass) + + homekit = _mock_homekit(hass, entry, HOMEKIT_MODE_BRIDGE) + + homekit.driver = hk_driver + homekit.bridge = HomeBridge(hass, hk_driver, "mock_bridge") + + config_entry = MockConfigEntry(domain="test", data={}) + config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + sw_version="0.16.0", + model="Camera Server", + manufacturer="Ubq", + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + entry = entity_registry.async_get_or_create( + domain, + "camera", + "doorbell_sensor", + device_id=device_entry.id, + original_device_class=device_class, + ) + camera = entity_registry.async_get_or_create( + "camera", "camera", "demo", device_id=device_entry.id + ) + + hass.states.async_set( + entry.entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: device_class}, + ) + hass.states.async_set(camera.entity_id, STATE_ON) + + with ( + patch.object(homekit.bridge, "add_accessory"), + patch(f"{PATH_HOMEKIT}.async_show_setup_message"), + patch(f"{PATH_HOMEKIT}.get_accessory") as mock_get_acc, + patch("pyhap.accessory_driver.AccessoryDriver.async_start"), + ): + await homekit.async_start() + await hass.async_block_till_done() + + mock_get_acc.assert_called_with( + hass, + ANY, + ANY, + ANY, + { + "manufacturer": "Ubq", + "model": "Camera Server", + "platform": "test", + "sw_version": "0.16.0", + "linked_doorbell_sensor": entry.entity_id, }, ) diff --git a/tests/components/homekit/test_type_cameras.py b/tests/components/homekit/test_type_cameras.py index 184ce1b6521..a32656e9f2b 100644 --- a/tests/components/homekit/test_type_cameras.py +++ b/tests/components/homekit/test_type_cameras.py @@ -9,6 +9,7 @@ import pytest from homeassistant.components import camera, ffmpeg from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.components.camera.img_util import TurboJPEGSingleton +from homeassistant.components.event import EventDeviceClass from homeassistant.components.homekit.accessories import HomeBridge from homeassistant.components.homekit.const import ( AUDIO_CODEC_COPY, @@ -30,10 +31,17 @@ from homeassistant.components.homekit.const import ( ) from homeassistant.components.homekit.type_cameras import Camera from homeassistant.components.homekit.type_switches import Switch -from homeassistant.const import ATTR_DEVICE_CLASS, STATE_OFF, STATE_ON +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, + STATE_UNKNOWN, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util from tests.components.camera.common import mock_turbo_jpeg @@ -130,9 +138,7 @@ def _get_failing_mock_ffmpeg(): return ffmpeg -async def test_camera_stream_source_configured( - hass: HomeAssistant, run_driver, events -) -> None: +async def test_camera_stream_source_configured(hass: HomeAssistant, run_driver) -> None: """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component( @@ -252,7 +258,7 @@ async def test_camera_stream_source_configured( async def test_camera_stream_source_configured_with_failing_ffmpeg( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera that can stream with a configured source with ffmpeg failing.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -308,9 +314,7 @@ async def test_camera_stream_source_configured_with_failing_ffmpeg( await _async_stop_all_streams(hass, acc) -async def test_camera_stream_source_found( - hass: HomeAssistant, run_driver, events -) -> None: +async def test_camera_stream_source_found(hass: HomeAssistant, run_driver) -> None: """Test a camera that can stream and we get the source from the entity.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component( @@ -396,9 +400,7 @@ async def test_camera_stream_source_found( ) -async def test_camera_stream_source_fails( - hass: HomeAssistant, run_driver, events -) -> None: +async def test_camera_stream_source_fails(hass: HomeAssistant, run_driver) -> None: """Test a camera that can stream and we cannot get the source from the entity.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component( @@ -439,7 +441,7 @@ async def test_camera_stream_source_fails( await _async_stop_all_streams(hass, acc) -async def test_camera_with_no_stream(hass: HomeAssistant, run_driver, events) -> None: +async def test_camera_with_no_stream(hass: HomeAssistant, run_driver) -> None: """Test a camera that cannot stream.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component(hass, camera.DOMAIN, {camera.DOMAIN: {}}) @@ -472,7 +474,7 @@ async def test_camera_with_no_stream(hass: HomeAssistant, run_driver, events) -> async def test_camera_stream_source_configured_and_copy_codec( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -547,7 +549,7 @@ async def test_camera_stream_source_configured_and_copy_codec( async def test_camera_stream_source_configured_and_override_profile_names( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera that can stream with a configured source over overridden profile names.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -623,7 +625,7 @@ async def test_camera_stream_source_configured_and_override_profile_names( async def test_camera_streaming_fails_after_starting_ffmpeg( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -700,7 +702,7 @@ async def test_camera_streaming_fails_after_starting_ffmpeg( async def test_camera_with_linked_motion_sensor( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera with a linked motion sensor can update.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -793,8 +795,151 @@ async def test_camera_with_linked_motion_sensor( assert char.value is True +async def test_camera_with_linked_motion_event(hass: HomeAssistant, run_driver) -> None: + """Test a camera with a linked motion event entity can update.""" + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + await async_setup_component( + hass, camera.DOMAIN, {camera.DOMAIN: {"platform": "demo"}} + ) + await hass.async_block_till_done() + motion_entity_id = "event.motion" + + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + entity_id = "camera.demo_camera" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Camera( + hass, + run_driver, + "Camera", + entity_id, + 2, + { + CONF_STREAM_SOURCE: "/dev/null", + CONF_SUPPORT_AUDIO: True, + CONF_VIDEO_CODEC: VIDEO_CODEC_H264_OMX, + CONF_AUDIO_CODEC: AUDIO_CODEC_COPY, + CONF_LINKED_MOTION_SENSOR: motion_entity_id, + }, + ) + bridge = HomeBridge("hass", run_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 17 # Camera + + service = acc.get_service(SERV_MOTION_SENSOR) + assert service + char = service.get_characteristic(CHAR_MOTION_DETECTED) + assert char + + assert char.value is False + broker = MagicMock() + char.broker = broker + + hass.states.async_set( + motion_entity_id, STATE_UNKNOWN, {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION} + ) + await hass.async_block_till_done() + assert len(broker.mock_calls) == 0 + broker.reset_mock() + assert char.value is False + + char.set_value(True) + fire_time = dt_util.utcnow().isoformat() + hass.states.async_set( + motion_entity_id, fire_time, {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION} + ) + await hass.async_block_till_done() + assert len(broker.mock_calls) == 4 + broker.reset_mock() + assert char.value is False + + hass.states.async_set( + motion_entity_id, + fire_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + force_update=True, + ) + await hass.async_block_till_done() + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + hass.states.async_set( + motion_entity_id, + fire_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, + ) + await hass.async_block_till_done() + assert len(broker.mock_calls) == 0 + broker.reset_mock() + # Ensure we do not throw when the linked + # motion sensor is removed + hass.states.async_remove(motion_entity_id) + await hass.async_block_till_done() + acc.run() + await hass.async_block_till_done() + assert char.value is False + + # Ensure re-adding does not fire an event + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a second update does + broker.reset_mock() + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + + # Now go unavailable + broker.reset_mock() + hass.states.async_set( + motion_entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # Going from unavailable to a state should not fire an event + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a another update does + broker.reset_mock() + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + + async def test_camera_with_a_missing_linked_motion_sensor( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera with a configured linked motion sensor that is missing.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -826,7 +971,7 @@ async def test_camera_with_a_missing_linked_motion_sensor( async def test_camera_with_linked_doorbell_sensor( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera with a linked doorbell sensor can update.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -941,8 +1086,154 @@ async def test_camera_with_linked_doorbell_sensor( assert char2.value is None +async def test_camera_with_linked_doorbell_event( + hass: HomeAssistant, run_driver +) -> None: + """Test a camera with a linked doorbell event can update.""" + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + await async_setup_component( + hass, camera.DOMAIN, {camera.DOMAIN: {"platform": "demo"}} + ) + await hass.async_block_till_done() + doorbell_entity_id = "event.doorbell" + + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + entity_id = "camera.demo_camera" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Camera( + hass, + run_driver, + "Camera", + entity_id, + 2, + { + CONF_STREAM_SOURCE: "/dev/null", + CONF_SUPPORT_AUDIO: True, + CONF_VIDEO_CODEC: VIDEO_CODEC_H264_OMX, + CONF_AUDIO_CODEC: AUDIO_CODEC_COPY, + CONF_LINKED_DOORBELL_SENSOR: doorbell_entity_id, + }, + ) + bridge = HomeBridge("hass", run_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 17 # Camera + + service = acc.get_service(SERV_DOORBELL) + assert service + char = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char + + assert char.value is None + + service2 = acc.get_service(SERV_STATELESS_PROGRAMMABLE_SWITCH) + assert service2 + char2 = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char2 + broker = MagicMock() + char2.broker = broker + assert char2.value is None + + hass.states.async_set( + doorbell_entity_id, + STATE_UNKNOWN, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + + char.set_value(True) + char2.set_value(True) + broker.reset_mock() + + original_time = dt_util.utcnow().isoformat() + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 2 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + force_update=True, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL, "other": "attr"}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + # Ensure we do not throw when the linked + # doorbell sensor is removed + hass.states.async_remove(doorbell_entity_id) + await hass.async_block_till_done() + acc.run() + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + + await hass.async_block_till_done() + hass.states.async_set( + doorbell_entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + # Ensure re-adding does not fire an event + assert not broker.mock_calls + broker.reset_mock() + + # going from unavailable to a state should not fire an event + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a second update does + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + + async def test_camera_with_a_missing_linked_doorbell_sensor( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera with a configured linked doorbell sensor that is missing.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) diff --git a/tests/components/homekit/test_type_covers.py b/tests/components/homekit/test_type_covers.py index 6efd9118092..b3125c6581c 100644 --- a/tests/components/homekit/test_type_covers.py +++ b/tests/components/homekit/test_type_covers.py @@ -40,13 +40,15 @@ from homeassistant.const import ( STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_garage_door_open_close(hass: HomeAssistant, hk_driver, events) -> None: +async def test_garage_door_open_close( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.garage_door" @@ -132,9 +134,7 @@ async def test_garage_door_open_close(hass: HomeAssistant, hk_driver, events) -> assert events[-1].data[ATTR_VALUE] is None -async def test_door_instantiate_set_position( - hass: HomeAssistant, hk_driver, events -) -> None: +async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> None: """Test if Door accessory is instantiated correctly and can set position.""" entity_id = "cover.door" @@ -185,7 +185,7 @@ async def test_door_instantiate_set_position( async def test_windowcovering_set_cover_position( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.window" @@ -295,9 +295,7 @@ async def test_windowcovering_set_cover_position( assert events[-1].data[ATTR_VALUE] == 75 -async def test_window_instantiate_set_position( - hass: HomeAssistant, hk_driver, events -) -> None: +async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) -> None: """Test if Window accessory is instantiated correctly and can set position.""" entity_id = "cover.window" @@ -348,7 +346,7 @@ async def test_window_instantiate_set_position( async def test_windowcovering_cover_set_tilt( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA update slat tilt accordingly.""" entity_id = "cover.window" @@ -418,7 +416,7 @@ async def test_windowcovering_cover_set_tilt( assert events[-1].data[ATTR_VALUE] == 75 -async def test_windowcovering_tilt_only(hass: HomeAssistant, hk_driver, events) -> None: +async def test_windowcovering_tilt_only(hass: HomeAssistant, hk_driver) -> None: """Test we lock the window covering closed when its tilt only.""" entity_id = "cover.window" @@ -442,7 +440,7 @@ async def test_windowcovering_tilt_only(hass: HomeAssistant, hk_driver, events) async def test_windowcovering_open_close( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.window" @@ -525,7 +523,7 @@ async def test_windowcovering_open_close( async def test_windowcovering_open_close_stop( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.window" @@ -574,7 +572,7 @@ async def test_windowcovering_open_close_stop( async def test_windowcovering_open_close_with_position_and_stop( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.stop_window" @@ -608,7 +606,7 @@ async def test_windowcovering_open_close_with_position_and_stop( async def test_windowcovering_basic_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -646,7 +644,7 @@ async def test_windowcovering_basic_restore( async def test_windowcovering_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event entity_registry.""" hass.set_state(CoreState.not_running) @@ -684,7 +682,7 @@ async def test_windowcovering_restore( async def test_garage_door_with_linked_obstruction_sensor( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test if accessory and HA are updated accordingly with a linked obstruction sensor.""" linked_obstruction_sensor_entity_id = "binary_sensor.obstruction" diff --git a/tests/components/homekit/test_type_fans.py b/tests/components/homekit/test_type_fans.py index d971b8c06d2..1808767c614 100644 --- a/tests/components/homekit/test_type_fans.py +++ b/tests/components/homekit/test_type_fans.py @@ -24,13 +24,13 @@ from homeassistant.const import ( STATE_ON, STATE_UNKNOWN, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_fan_basic(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test fan with char state.""" entity_id = "fan.demo" @@ -108,7 +108,9 @@ async def test_fan_basic(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] is None -async def test_fan_direction(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_direction( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test fan with direction.""" entity_id = "fan.demo" @@ -186,7 +188,9 @@ async def test_fan_direction(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] == DIRECTION_REVERSE -async def test_fan_oscillate(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_oscillate( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test fan with oscillate.""" entity_id = "fan.demo" @@ -259,7 +263,7 @@ async def test_fan_oscillate(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] is True -async def test_fan_speed(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test fan with speed.""" entity_id = "fan.demo" @@ -361,7 +365,9 @@ async def test_fan_speed(hass: HomeAssistant, hk_driver, events) -> None: assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id -async def test_fan_set_all_one_shot(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_set_all_one_shot( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test fan with speed.""" entity_id = "fan.demo" @@ -555,7 +561,7 @@ async def test_fan_set_all_one_shot(hass: HomeAssistant, hk_driver, events) -> N async def test_fan_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -597,7 +603,7 @@ async def test_fan_restore( async def test_fan_multiple_preset_modes( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test fan with multiple preset modes.""" entity_id = "fan.demo" @@ -678,7 +684,9 @@ async def test_fan_multiple_preset_modes( assert len(events) == 2 -async def test_fan_single_preset_mode(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_single_preset_mode( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test fan with a single preset mode.""" entity_id = "fan.demo" diff --git a/tests/components/homekit/test_type_humidifiers.py b/tests/components/homekit/test_type_humidifiers.py index fdd01e05a91..fbb72333c9b 100644 --- a/tests/components/homekit/test_type_humidifiers.py +++ b/tests/components/homekit/test_type_humidifiers.py @@ -42,12 +42,12 @@ from homeassistant.const import ( STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service -async def test_humidifier(hass: HomeAssistant, hk_driver, events) -> None: +async def test_humidifier(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test if humidifier accessory and HA are updated accordingly.""" entity_id = "humidifier.test" @@ -132,7 +132,9 @@ async def test_humidifier(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] == "RelativeHumidityHumidifierThreshold to 39.0%" -async def test_dehumidifier(hass: HomeAssistant, hk_driver, events) -> None: +async def test_dehumidifier( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if dehumidifier accessory and HA are updated accordingly.""" entity_id = "humidifier.test" @@ -220,7 +222,9 @@ async def test_dehumidifier(hass: HomeAssistant, hk_driver, events) -> None: ) -async def test_hygrostat_power_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_hygrostat_power_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "humidifier.test" @@ -301,7 +305,7 @@ async def test_hygrostat_power_state(hass: HomeAssistant, hk_driver, events) -> async def test_hygrostat_get_humidity_range( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if humidity range is evaluated correctly.""" entity_id = "humidifier.test" @@ -452,7 +456,10 @@ async def test_humidifier_with_a_missing_linked_humidity_sensor( async def test_humidifier_as_dehumidifier( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hk_driver, + events: list[Event], + caplog: pytest.LogCaptureFixture, ) -> None: """Test an invalid char_target_humidifier_dehumidifier from HomeKit.""" entity_id = "humidifier.test" @@ -495,7 +502,10 @@ async def test_humidifier_as_dehumidifier( async def test_dehumidifier_as_humidifier( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hk_driver, + events: list[Event], + caplog: pytest.LogCaptureFixture, ) -> None: """Test an invalid char_target_humidifier_dehumidifier from HomeKit.""" entity_id = "humidifier.test" diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index 8d2978fb0bd..02532a91e6d 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -39,7 +39,7 @@ from homeassistant.const import ( STATE_ON, STATE_UNKNOWN, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util @@ -53,7 +53,7 @@ async def _wait_for_light_coalesce(hass): await hass.async_block_till_done() -async def test_light_basic(hass: HomeAssistant, hk_driver, events) -> None: +async def test_light_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test light with char state.""" entity_id = "light.demo" @@ -127,7 +127,7 @@ async def test_light_basic(hass: HomeAssistant, hk_driver, events) -> None: [[ColorMode.BRIGHTNESS], [ColorMode.HS], [ColorMode.COLOR_TEMP]], ) async def test_light_brightness( - hass: HomeAssistant, hk_driver, events, supported_color_modes + hass: HomeAssistant, hk_driver, events: list[Event], supported_color_modes ) -> None: """Test light with brightness.""" entity_id = "light.demo" @@ -274,7 +274,9 @@ async def test_light_brightness( assert acc.char_brightness.value == 1 -async def test_light_color_temperature(hass: HomeAssistant, hk_driver, events) -> None: +async def test_light_color_temperature( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test light with color temperature.""" entity_id = "light.demo" @@ -323,7 +325,7 @@ async def test_light_color_temperature(hass: HomeAssistant, hk_driver, events) - [["color_temp", "hs"], ["color_temp", "rgb"], ["color_temp", "xy"]], ) async def test_light_color_temperature_and_rgb_color( - hass: HomeAssistant, hk_driver, events, supported_color_modes + hass: HomeAssistant, hk_driver, events: list[Event], supported_color_modes ) -> None: """Test light with color temperature and rgb color not exposing temperature.""" entity_id = "light.demo" @@ -524,7 +526,7 @@ async def test_light_color_temperature_and_rgb_color( "supported_color_modes", [[ColorMode.HS], [ColorMode.RGB], [ColorMode.XY]] ) async def test_light_rgb_color( - hass: HomeAssistant, hk_driver, events, supported_color_modes + hass: HomeAssistant, hk_driver, events: list[Event], supported_color_modes ) -> None: """Test light with rgb_color.""" entity_id = "light.demo" @@ -578,7 +580,7 @@ async def test_light_rgb_color( async def test_light_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -642,7 +644,7 @@ async def test_light_restore( async def test_light_rgb_with_color_temp( hass: HomeAssistant, hk_driver, - events, + events: list[Event], supported_color_modes, state_props, turn_on_props_with_brightness, @@ -762,7 +764,7 @@ async def test_light_rgb_with_color_temp( async def test_light_rgbwx_with_color_temp_and_brightness( hass: HomeAssistant, hk_driver, - events, + events: list[Event], supported_color_modes, state_props, turn_on_props_with_brightness, @@ -824,7 +826,7 @@ async def test_light_rgbwx_with_color_temp_and_brightness( async def test_light_rgb_or_w_lights( hass: HomeAssistant, hk_driver, - events, + events: list[Event], ) -> None: """Test lights with RGB or W lights.""" entity_id = "light.demo" @@ -957,7 +959,7 @@ async def test_light_rgb_or_w_lights( async def test_light_rgb_with_white_switch_to_temp( hass: HomeAssistant, hk_driver, - events, + events: list[Event], supported_color_modes, state_props, ) -> None: @@ -1034,11 +1036,7 @@ async def test_light_rgb_with_white_switch_to_temp( assert acc.char_brightness.value == 100 -async def test_light_rgb_with_hs_color_none( - hass: HomeAssistant, - hk_driver, - events, -) -> None: +async def test_light_rgb_with_hs_color_none(hass: HomeAssistant, hk_driver) -> None: """Test lights hs color set to None.""" entity_id = "light.demo" @@ -1071,7 +1069,7 @@ async def test_light_rgb_with_hs_color_none( async def test_light_rgbww_with_color_temp_conversion( hass: HomeAssistant, hk_driver, - events, + events: list[Event], ) -> None: """Test lights with RGBWW convert color temp as expected.""" entity_id = "light.demo" @@ -1192,7 +1190,7 @@ async def test_light_rgbww_with_color_temp_conversion( async def test_light_rgbw_with_color_temp_conversion( hass: HomeAssistant, hk_driver, - events, + events: list[Event], ) -> None: """Test lights with RGBW convert color temp as expected.""" entity_id = "light.demo" @@ -1280,7 +1278,7 @@ async def test_light_rgbw_with_color_temp_conversion( async def test_light_set_brightness_and_color( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test light with all chars in one go.""" entity_id = "light.demo" @@ -1365,7 +1363,7 @@ async def test_light_set_brightness_and_color( ) -async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver, events) -> None: +async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver) -> None: """Test mireds are forced to ints.""" entity_id = "light.demo" @@ -1386,7 +1384,7 @@ async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver, events) -> N async def test_light_set_brightness_and_color_temp( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test light with all chars in one go.""" entity_id = "light.demo" diff --git a/tests/components/homekit/test_type_locks.py b/tests/components/homekit/test_type_locks.py index 4d83fe41f48..31f03b1964f 100644 --- a/tests/components/homekit/test_type_locks.py +++ b/tests/components/homekit/test_type_locks.py @@ -18,12 +18,12 @@ from homeassistant.const import ( STATE_UNKNOWN, STATE_UNLOCKED, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service -async def test_lock_unlock(hass: HomeAssistant, hk_driver, events) -> None: +async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test if accessory and HA are updated accordingly.""" code = "1234" config = {ATTR_CODE: code} @@ -121,7 +121,9 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events) -> None: @pytest.mark.parametrize("config", [{}, {ATTR_CODE: None}]) -async def test_no_code(hass: HomeAssistant, hk_driver, config, events) -> None: +async def test_no_code( + hass: HomeAssistant, hk_driver, config, events: list[Event] +) -> None: """Test accessory if lock doesn't require a code.""" entity_id = "lock.kitchen_door" diff --git a/tests/components/homekit/test_type_media_players.py b/tests/components/homekit/test_type_media_players.py index fb7233e5262..14c21f0a5f5 100644 --- a/tests/components/homekit/test_type_media_players.py +++ b/tests/components/homekit/test_type_media_players.py @@ -40,13 +40,15 @@ from homeassistant.const import ( STATE_PLAYING, STATE_STANDBY, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_media_player_set_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_media_player_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" config = { CONF_FEATURE_LIST: { @@ -177,7 +179,10 @@ async def test_media_player_set_state(hass: HomeAssistant, hk_driver, events) -> async def test_media_player_television( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hk_driver, + events: list[Event], + caplog: pytest.LogCaptureFixture, ) -> None: """Test if television accessory and HA are updated accordingly.""" entity_id = "media_player.television" @@ -366,7 +371,7 @@ async def test_media_player_television( async def test_media_player_television_basic( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver, caplog: pytest.LogCaptureFixture ) -> None: """Test if basic television accessory and HA are updated accordingly.""" entity_id = "media_player.television" @@ -409,7 +414,7 @@ async def test_media_player_television_basic( async def test_media_player_television_supports_source_select_no_sources( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver ) -> None: """Test if basic tv that supports source select but is missing a source list.""" entity_id = "media_player.television" @@ -429,7 +434,7 @@ async def test_media_player_television_supports_source_select_no_sources( async def test_tv_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -482,7 +487,7 @@ async def test_tv_restore( async def test_media_player_television_max_sources( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver ) -> None: """Test if television accessory that reaches the maximum number of sources.""" entity_id = "media_player.television" @@ -541,7 +546,7 @@ async def test_media_player_television_max_sources( async def test_media_player_television_duplicate_sources( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver ) -> None: """Test if television accessory with duplicate sources.""" entity_id = "media_player.television" @@ -586,7 +591,7 @@ async def test_media_player_television_duplicate_sources( async def test_media_player_television_unsafe_chars( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if television accessory with unsafe characters.""" entity_id = "media_player.television" diff --git a/tests/components/homekit/test_type_remote.py b/tests/components/homekit/test_type_remote.py index bd4ead58a7b..dedf3ae34db 100644 --- a/tests/components/homekit/test_type_remote.py +++ b/tests/components/homekit/test_type_remote.py @@ -26,13 +26,13 @@ from homeassistant.const import ( STATE_ON, STATE_STANDBY, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service async def test_activity_remote( - hass: HomeAssistant, hk_driver: HomeDriver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver: HomeDriver, events: list[Event] ) -> None: """Test if remote accessory and HA are updated accordingly.""" entity_id = "remote.harmony" @@ -156,7 +156,10 @@ async def test_activity_remote( async def test_activity_remote_bad_names( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hk_driver, + events: list[Event], + caplog: pytest.LogCaptureFixture, ) -> None: """Test if remote accessory with invalid names works as expected.""" entity_id = "remote.harmony" diff --git a/tests/components/homekit/test_type_security_systems.py b/tests/components/homekit/test_type_security_systems.py index 18434a345ce..27580949ec2 100644 --- a/tests/components/homekit/test_type_security_systems.py +++ b/tests/components/homekit/test_type_security_systems.py @@ -21,12 +21,14 @@ from homeassistant.const import ( STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service -async def test_switch_set_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_switch_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" code = "1234" config = {ATTR_CODE: code} @@ -118,7 +120,9 @@ async def test_switch_set_state(hass: HomeAssistant, hk_driver, events) -> None: @pytest.mark.parametrize("config", [{}, {ATTR_CODE: None}]) -async def test_no_alarm_code(hass: HomeAssistant, hk_driver, config, events) -> None: +async def test_no_alarm_code( + hass: HomeAssistant, hk_driver, config, events: list[Event] +) -> None: """Test accessory if security_system doesn't require an alarm_code.""" entity_id = "alarm_control_panel.test" @@ -139,7 +143,7 @@ async def test_no_alarm_code(hass: HomeAssistant, hk_driver, config, events) -> assert events[-1].data[ATTR_VALUE] is None -async def test_arming(hass: HomeAssistant, hk_driver, events) -> None: +async def test_arming(hass: HomeAssistant, hk_driver) -> None: """Test to make sure arming sets the right state.""" entity_id = "alarm_control_panel.test" @@ -190,7 +194,7 @@ async def test_arming(hass: HomeAssistant, hk_driver, events) -> None: assert acc.char_current_state.value == 4 -async def test_supported_states(hass: HomeAssistant, hk_driver, events) -> None: +async def test_supported_states(hass: HomeAssistant, hk_driver) -> None: """Test different supported states.""" code = "1234" config = {ATTR_CODE: code} diff --git a/tests/components/homekit/test_type_sensors.py b/tests/components/homekit/test_type_sensors.py index fc68b7c8ecf..3e8e05fdcfd 100644 --- a/tests/components/homekit/test_type_sensors.py +++ b/tests/components/homekit/test_type_sensors.py @@ -213,6 +213,16 @@ async def test_pm25(hass: HomeAssistant, hk_driver) -> None: assert acc.char_density.value == 0 assert acc.char_quality.value == 0 + hass.states.async_set(entity_id, "8") + await hass.async_block_till_done() + assert acc.char_density.value == 8 + assert acc.char_quality.value == 1 + + hass.states.async_set(entity_id, "12") + await hass.async_block_till_done() + assert acc.char_density.value == 12 + assert acc.char_quality.value == 2 + hass.states.async_set(entity_id, "23") await hass.async_block_till_done() assert acc.char_density.value == 23 @@ -601,7 +611,7 @@ async def test_binary_device_classes(hass: HomeAssistant, hk_driver) -> None: async def test_sensor_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) diff --git a/tests/components/homekit/test_type_switches.py b/tests/components/homekit/test_type_switches.py index 27937babc57..9b708f18b8a 100644 --- a/tests/components/homekit/test_type_switches.py +++ b/tests/components/homekit/test_type_switches.py @@ -17,6 +17,7 @@ from homeassistant.components.homekit.type_switches import ( Switch, Vacuum, Valve, + ValveSwitch, ) from homeassistant.components.select import ATTR_OPTIONS from homeassistant.components.vacuum import ( @@ -33,17 +34,23 @@ from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_TYPE, + SERVICE_CLOSE_VALVE, + SERVICE_OPEN_VALVE, SERVICE_SELECT_OPTION, + STATE_CLOSED, STATE_OFF, STATE_ON, + STATE_OPEN, ) -from homeassistant.core import HomeAssistant, split_entity_id +from homeassistant.core import Event, HomeAssistant, split_entity_id import homeassistant.util.dt as dt_util from tests.common import async_fire_time_changed, async_mock_service -async def test_outlet_set_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_outlet_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if Outlet accessory and HA are updated accordingly.""" entity_id = "switch.outlet_test" @@ -96,7 +103,7 @@ async def test_outlet_set_state(hass: HomeAssistant, hk_driver, events) -> None: ], ) async def test_switch_set_state( - hass: HomeAssistant, hk_driver, entity_id, attrs, events + hass: HomeAssistant, hk_driver, entity_id, attrs, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" domain = split_entity_id(entity_id)[0] @@ -140,32 +147,36 @@ async def test_switch_set_state( assert events[-1].data[ATTR_VALUE] is None -async def test_valve_set_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_valve_switch_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if Valve accessory and HA are updated accordingly.""" entity_id = "switch.valve_test" hass.states.async_set(entity_id, None) await hass.async_block_till_done() - acc = Valve(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_FAUCET}) + acc = ValveSwitch(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_FAUCET}) acc.run() await hass.async_block_till_done() assert acc.category == 29 # Faucet assert acc.char_valve_type.value == 3 # Water faucet - acc = Valve(hass, hk_driver, "Valve", entity_id, 3, {CONF_TYPE: TYPE_SHOWER}) + acc = ValveSwitch(hass, hk_driver, "Valve", entity_id, 3, {CONF_TYPE: TYPE_SHOWER}) acc.run() await hass.async_block_till_done() assert acc.category == 30 # Shower assert acc.char_valve_type.value == 2 # Shower head - acc = Valve(hass, hk_driver, "Valve", entity_id, 4, {CONF_TYPE: TYPE_SPRINKLER}) + acc = ValveSwitch( + hass, hk_driver, "Valve", entity_id, 4, {CONF_TYPE: TYPE_SPRINKLER} + ) acc.run() await hass.async_block_till_done() assert acc.category == 28 # Sprinkler assert acc.char_valve_type.value == 1 # Irrigation - acc = Valve(hass, hk_driver, "Valve", entity_id, 5, {CONF_TYPE: TYPE_VALVE}) + acc = ValveSwitch(hass, hk_driver, "Valve", entity_id, 5, {CONF_TYPE: TYPE_VALVE}) acc.run() await hass.async_block_till_done() @@ -187,8 +198,59 @@ async def test_valve_set_state(hass: HomeAssistant, hk_driver, events) -> None: assert acc.char_in_use.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, "switch", "turn_on") - call_turn_off = async_mock_service(hass, "switch", "turn_off") + call_turn_on = async_mock_service(hass, "switch", SERVICE_TURN_ON) + call_turn_off = async_mock_service(hass, "switch", SERVICE_TURN_OFF) + + acc.char_active.client_update_value(1) + await hass.async_block_till_done() + assert acc.char_in_use.value == 1 + assert call_turn_on + assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 1 + assert events[-1].data[ATTR_VALUE] is None + + acc.char_active.client_update_value(0) + await hass.async_block_till_done() + assert acc.char_in_use.value == 0 + assert call_turn_off + assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 2 + assert events[-1].data[ATTR_VALUE] is None + + +async def test_valve_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: + """Test if Valve accessory and HA are updated accordingly.""" + entity_id = "valve.valve_test" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + + acc = Valve(hass, hk_driver, "Valve", entity_id, 5, {CONF_TYPE: TYPE_VALVE}) + acc.run() + await hass.async_block_till_done() + + assert acc.aid == 5 + assert acc.category == 29 # Faucet + + assert acc.char_active.value == 0 + assert acc.char_in_use.value == 0 + assert acc.char_valve_type.value == 0 # Generic Valve + + hass.states.async_set(entity_id, STATE_OPEN) + await hass.async_block_till_done() + assert acc.char_active.value == 1 + assert acc.char_in_use.value == 1 + + hass.states.async_set(entity_id, STATE_CLOSED) + await hass.async_block_till_done() + assert acc.char_active.value == 0 + assert acc.char_in_use.value == 0 + + # Set from HomeKit + call_turn_on = async_mock_service(hass, "valve", SERVICE_OPEN_VALVE) + call_turn_off = async_mock_service(hass, "valve", SERVICE_CLOSE_VALVE) acc.char_active.client_update_value(1) await hass.async_block_till_done() @@ -208,7 +270,7 @@ async def test_valve_set_state(hass: HomeAssistant, hk_driver, events) -> None: async def test_vacuum_set_state_with_returnhome_and_start_support( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if Vacuum accessory and HA are updated accordingly.""" entity_id = "vacuum.roomba" @@ -277,7 +339,7 @@ async def test_vacuum_set_state_with_returnhome_and_start_support( async def test_vacuum_set_state_without_returnhome_and_start_support( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if Vacuum accessory and HA are updated accordingly.""" entity_id = "vacuum.roomba" @@ -322,7 +384,9 @@ async def test_vacuum_set_state_without_returnhome_and_start_support( assert events[-1].data[ATTR_VALUE] is None -async def test_reset_switch(hass: HomeAssistant, hk_driver, events) -> None: +async def test_reset_switch( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if switch accessory is reset correctly.""" domain = "scene" entity_id = "scene.test" @@ -366,7 +430,9 @@ async def test_reset_switch(hass: HomeAssistant, hk_driver, events) -> None: assert len(events) == 1 -async def test_script_switch(hass: HomeAssistant, hk_driver, events) -> None: +async def test_script_switch( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if script switch accessory is reset correctly.""" domain = "script" entity_id = "script.test" @@ -415,7 +481,7 @@ async def test_script_switch(hass: HomeAssistant, hk_driver, events) -> None: ["input_select", "select"], ) async def test_input_select_switch( - hass: HomeAssistant, hk_driver, events, domain + hass: HomeAssistant, hk_driver, events: list[Event], domain ) -> None: """Test if select switch accessory is handled correctly.""" entity_id = f"{domain}.test" @@ -470,7 +536,9 @@ async def test_input_select_switch( "domain", ["button", "input_button"], ) -async def test_button_switch(hass: HomeAssistant, hk_driver, events, domain) -> None: +async def test_button_switch( + hass: HomeAssistant, hk_driver, events: list[Event], domain +) -> None: """Test switch accessory from a (input) button entity.""" entity_id = f"{domain}.test" diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index ca2a02cb440..8454610566b 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -74,13 +74,13 @@ from homeassistant.const import ( STATE_UNKNOWN, UnitOfTemperature, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_thermostat(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" base_attrs = { @@ -161,6 +161,40 @@ async def test_thermostat(hass: HomeAssistant, hk_driver, events) -> None: assert acc.char_current_temp.value == 23.0 assert acc.char_display_units.value == 0 + hass.states.async_set( + entity_id, + HVACMode.HEAT, + { + **base_attrs, + ATTR_TEMPERATURE: 22.2, + ATTR_CURRENT_TEMPERATURE: 17.8, + ATTR_HVAC_ACTION: HVACAction.PREHEATING, + }, + ) + await hass.async_block_till_done() + assert acc.char_target_temp.value == 22.2 + assert acc.char_current_heat_cool.value == 1 + assert acc.char_target_heat_cool.value == 1 + assert acc.char_current_temp.value == 17.8 + assert acc.char_display_units.value == 0 + + hass.states.async_set( + entity_id, + HVACMode.HEAT, + { + **base_attrs, + ATTR_TEMPERATURE: 22.2, + ATTR_CURRENT_TEMPERATURE: 17.8, + ATTR_HVAC_ACTION: HVACAction.DEFROSTING, + }, + ) + await hass.async_block_till_done() + assert acc.char_target_temp.value == 22.2 + assert acc.char_current_heat_cool.value == 1 + assert acc.char_target_heat_cool.value == 1 + assert acc.char_current_temp.value == 17.8 + assert acc.char_display_units.value == 0 + hass.states.async_set( entity_id, HVACMode.FAN_ONLY, @@ -375,7 +409,9 @@ async def test_thermostat(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] == "TargetHeatingCoolingState to 3" -async def test_thermostat_auto(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat_auto( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" base_attrs = { @@ -509,7 +545,7 @@ async def test_thermostat_auto(hass: HomeAssistant, hk_driver, events) -> None: async def test_thermostat_mode_and_temp_change( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory where the mode and temp change in the same call.""" entity_id = "climate.test" @@ -616,7 +652,9 @@ async def test_thermostat_mode_and_temp_change( ) -async def test_thermostat_humidity(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat_humidity( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly with humidity.""" entity_id = "climate.test" base_attrs = {ATTR_SUPPORTED_FEATURES: 4} @@ -680,7 +718,7 @@ async def test_thermostat_humidity(hass: HomeAssistant, hk_driver, events) -> No async def test_thermostat_humidity_with_target_humidity( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test if accessory and HA are updated accordingly with humidity without target hudmidity. @@ -704,7 +742,9 @@ async def test_thermostat_humidity_with_target_humidity( assert acc.char_current_humidity.value == 65 -async def test_thermostat_power_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat_power_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" base_attrs = { @@ -812,7 +852,9 @@ async def test_thermostat_power_state(hass: HomeAssistant, hk_driver, events) -> assert acc.char_target_heat_cool.value == 2 -async def test_thermostat_fahrenheit(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat_fahrenheit( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" @@ -969,7 +1011,7 @@ async def test_thermostat_temperature_step_whole( async def test_thermostat_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -1500,7 +1542,7 @@ async def test_thermostat_hvac_modes_without_off( async def test_thermostat_without_target_temp_only_range( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test a thermostat that only supports a range.""" entity_id = "climate.test" @@ -1662,7 +1704,9 @@ async def test_thermostat_without_target_temp_only_range( assert events[-1].data[ATTR_VALUE] == "HeatingThresholdTemperature to 27.0°C" -async def test_water_heater(hass: HomeAssistant, hk_driver, events) -> None: +async def test_water_heater( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "water_heater.test" @@ -1736,7 +1780,9 @@ async def test_water_heater(hass: HomeAssistant, hk_driver, events) -> None: assert acc.char_target_heat_cool.value == 1 -async def test_water_heater_fahrenheit(hass: HomeAssistant, hk_driver, events) -> None: +async def test_water_heater_fahrenheit( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are update accordingly.""" entity_id = "water_heater.test" @@ -1799,7 +1845,7 @@ async def test_water_heater_get_temperature_range( async def test_water_heater_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -1849,7 +1895,7 @@ async def test_water_heater_restore( async def test_thermostat_with_no_modes_when_we_first_see( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test if a thermostat that is not ready when we first see it.""" entity_id = "climate.test" @@ -1903,7 +1949,7 @@ async def test_thermostat_with_no_modes_when_we_first_see( async def test_thermostat_with_no_off_after_recheck( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test if a thermostat that is not ready when we first see it that actually does not have off.""" entity_id = "climate.test" @@ -1956,9 +2002,7 @@ async def test_thermostat_with_no_off_after_recheck( assert mock_reload.called -async def test_thermostat_with_temp_clamps( - hass: HomeAssistant, hk_driver, events -) -> None: +async def test_thermostat_with_temp_clamps(hass: HomeAssistant, hk_driver) -> None: """Test that temperatures are clamped to valid values to prevent homekit crash.""" entity_id = "climate.test" base_attrs = { @@ -2013,7 +2057,7 @@ async def test_thermostat_with_temp_clamps( async def test_thermostat_with_fan_modes_with_auto( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan modes with an auto fan mode.""" entity_id = "climate.test" @@ -2219,7 +2263,7 @@ async def test_thermostat_with_fan_modes_with_auto( async def test_thermostat_with_fan_modes_with_off( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan modes that can turn off.""" entity_id = "climate.test" @@ -2328,7 +2372,7 @@ async def test_thermostat_with_fan_modes_with_off( async def test_thermostat_with_fan_modes_set_to_none( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan modes set to None.""" entity_id = "climate.test" @@ -2372,7 +2416,7 @@ async def test_thermostat_with_fan_modes_set_to_none( async def test_thermostat_with_fan_modes_set_to_none_not_supported( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan modes set to None and supported feature missing.""" entity_id = "climate.test" @@ -2415,7 +2459,7 @@ async def test_thermostat_with_fan_modes_set_to_none_not_supported( async def test_thermostat_with_supported_features_target_temp_but_fan_mode_set( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan mode and supported feature missing.""" entity_id = "climate.test" @@ -2452,9 +2496,7 @@ async def test_thermostat_with_supported_features_target_temp_but_fan_mode_set( assert not acc.fan_chars -async def test_thermostat_handles_unknown_state( - hass: HomeAssistant, hk_driver, events -) -> None: +async def test_thermostat_handles_unknown_state(hass: HomeAssistant, hk_driver) -> None: """Test a thermostat can handle unknown state.""" entity_id = "climate.test" attrs = { diff --git a/tests/components/homekit/test_type_triggers.py b/tests/components/homekit/test_type_triggers.py index 7471e0bff1c..f7415ef5599 100644 --- a/tests/components/homekit/test_type_triggers.py +++ b/tests/components/homekit/test_type_triggers.py @@ -7,7 +7,7 @@ from homeassistant.components.homekit.const import CHAR_PROGRAMMABLE_SWITCH_EVEN from homeassistant.components.homekit.type_triggers import DeviceTriggerAccessory from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, async_get_device_automations @@ -16,9 +16,7 @@ from tests.common import MockConfigEntry, async_get_device_automations async def test_programmable_switch_button_fires_on_trigger( hass: HomeAssistant, hk_driver, - events, demo_cleanup, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: """Test that DeviceTriggerAccessory fires the programmable switch event on trigger.""" diff --git a/tests/components/homekit/test_util.py b/tests/components/homekit/test_util.py index 24999242dc1..7f7e3ee0ce0 100644 --- a/tests/components/homekit/test_util.py +++ b/tests/components/homekit/test_util.py @@ -7,13 +7,38 @@ import voluptuous as vol from homeassistant.components.homekit.const import ( BRIDGE_NAME, + CONF_AUDIO_CODEC, + CONF_AUDIO_MAP, + CONF_AUDIO_PACKET_SIZE, CONF_FEATURE, CONF_FEATURE_LIST, CONF_LINKED_BATTERY_SENSOR, + CONF_LINKED_DOORBELL_SENSOR, + CONF_LINKED_MOTION_SENSOR, CONF_LOW_BATTERY_THRESHOLD, + CONF_MAX_FPS, + CONF_MAX_HEIGHT, + CONF_MAX_WIDTH, + CONF_STREAM_COUNT, + CONF_SUPPORT_AUDIO, CONF_THRESHOLD_CO, CONF_THRESHOLD_CO2, + CONF_VIDEO_CODEC, + CONF_VIDEO_MAP, + CONF_VIDEO_PACKET_SIZE, + DEFAULT_AUDIO_CODEC, + DEFAULT_AUDIO_MAP, + DEFAULT_AUDIO_PACKET_SIZE, DEFAULT_CONFIG_FLOW_PORT, + DEFAULT_LOW_BATTERY_THRESHOLD, + DEFAULT_MAX_FPS, + DEFAULT_MAX_HEIGHT, + DEFAULT_MAX_WIDTH, + DEFAULT_STREAM_COUNT, + DEFAULT_SUPPORT_AUDIO, + DEFAULT_VIDEO_CODEC, + DEFAULT_VIDEO_MAP, + DEFAULT_VIDEO_PACKET_SIZE, DOMAIN, FEATURE_ON_OFF, FEATURE_PLAY_PAUSE, @@ -178,6 +203,31 @@ def test_validate_entity_config() -> None: assert vec({"sensor.co2": {CONF_THRESHOLD_CO2: 500}}) == { "sensor.co2": {CONF_THRESHOLD_CO2: 500, CONF_LOW_BATTERY_THRESHOLD: 20} } + assert vec( + { + "camera.demo": { + CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", + CONF_LINKED_MOTION_SENSOR: "event.motion", + } + } + ) == { + "camera.demo": { + CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", + CONF_LINKED_MOTION_SENSOR: "event.motion", + CONF_AUDIO_CODEC: DEFAULT_AUDIO_CODEC, + CONF_SUPPORT_AUDIO: DEFAULT_SUPPORT_AUDIO, + CONF_MAX_WIDTH: DEFAULT_MAX_WIDTH, + CONF_MAX_HEIGHT: DEFAULT_MAX_HEIGHT, + CONF_MAX_FPS: DEFAULT_MAX_FPS, + CONF_AUDIO_MAP: DEFAULT_AUDIO_MAP, + CONF_VIDEO_MAP: DEFAULT_VIDEO_MAP, + CONF_STREAM_COUNT: DEFAULT_STREAM_COUNT, + CONF_VIDEO_CODEC: DEFAULT_VIDEO_CODEC, + CONF_AUDIO_PACKET_SIZE: DEFAULT_AUDIO_PACKET_SIZE, + CONF_VIDEO_PACKET_SIZE: DEFAULT_VIDEO_PACKET_SIZE, + CONF_LOW_BATTERY_THRESHOLD: DEFAULT_LOW_BATTERY_THRESHOLD, + } + } def test_validate_media_player_features() -> None: @@ -230,14 +280,15 @@ def test_temperature_to_states() -> None: def test_density_to_air_quality() -> None: """Test map PM2.5 density to HomeKit AirQuality level.""" assert density_to_air_quality(0) == 1 - assert density_to_air_quality(12) == 1 - assert density_to_air_quality(12.1) == 2 + assert density_to_air_quality(9) == 1 + assert density_to_air_quality(9.1) == 2 + assert density_to_air_quality(12) == 2 assert density_to_air_quality(35.4) == 2 assert density_to_air_quality(35.5) == 3 assert density_to_air_quality(55.4) == 3 assert density_to_air_quality(55.5) == 4 - assert density_to_air_quality(150.4) == 4 - assert density_to_air_quality(150.5) == 5 + assert density_to_air_quality(125.4) == 4 + assert density_to_air_quality(125.5) == 5 assert density_to_air_quality(200) == 5 @@ -256,7 +307,12 @@ async def test_async_show_setup_msg(hass: HomeAssistant, hk_driver) -> None: hass, entry.entry_id, "bridge_name", pincode, "X-HM://0" ) await hass.async_block_till_done() - entry_data: HomeKitEntryData = hass.data[DOMAIN][entry.entry_id] + + # New tests should not access runtime data. + # Do not use this pattern for new tests. + entry_data: HomeKitEntryData = hass.config_entries.async_get_entry( + entry.entry_id + ).runtime_data assert entry_data.pairing_qr_secret assert entry_data.pairing_qr diff --git a/tests/components/homekit_controller/common.py b/tests/components/homekit_controller/common.py index 1360b463e4a..9aba3ef3225 100644 --- a/tests/components/homekit_controller/common.py +++ b/tests/components/homekit_controller/common.py @@ -11,12 +11,7 @@ from unittest import mock from aiohomekit.controller.abstract import AbstractDescription, AbstractPairing from aiohomekit.hkjson import loads as hkloads -from aiohomekit.model import ( - Accessories, - AccessoriesState, - Accessory, - mixin as model_mixin, -) +from aiohomekit.model import Accessories, AccessoriesState, Accessory from aiohomekit.testing import FakeController, FakePairing from homeassistant.components.device_automation import DeviceAutomationType @@ -282,7 +277,7 @@ async def device_config_changed(hass: HomeAssistant, accessories: Accessories): async def setup_test_component( - hass, setup_accessory, capitalize=False, suffix=None, connection=None + hass, aid, setup_accessory, capitalize=False, suffix=None, connection=None ): """Load a fake homekit accessory based on a homekit accessory model. @@ -291,7 +286,7 @@ async def setup_test_component( If suffix is set, entityId will include the suffix """ accessory = Accessory.create_with_info( - "TestDevice", "example.com", "Test", "0001", "0.1" + aid, "TestDevice", "example.com", "Test", "0001", "0.1" ) setup_accessory(accessory) @@ -397,8 +392,3 @@ async def assert_devices_and_entities_created( # Root device must not have a via, otherwise its not the device assert root_device.via_device_id is None - - -def get_next_aid(): - """Get next aid.""" - return model_mixin.id_counter + 1 diff --git a/tests/components/homekit_controller/conftest.py b/tests/components/homekit_controller/conftest.py index 427c5285436..eea3f4b67f2 100644 --- a/tests/components/homekit_controller/conftest.py +++ b/tests/components/homekit_controller/conftest.py @@ -1,5 +1,6 @@ """HomeKit controller session fixtures.""" +from collections.abc import Callable, Generator import datetime from unittest.mock import MagicMock, patch @@ -7,7 +8,6 @@ from aiohomekit.testing import FakeController from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator import homeassistant.util.dt as dt_util @@ -44,3 +44,16 @@ def hk_mock_async_zeroconf(mock_async_zeroconf: MagicMock) -> None: @pytest.fixture(autouse=True) def auto_mock_bluetooth(mock_bluetooth: None) -> None: """Auto mock bluetooth.""" + + +@pytest.fixture +def get_next_aid() -> Generator[Callable[[], int]]: + """Generate a function that returns increasing accessory ids.""" + id_counter = 0 + + def _get_id(): + nonlocal id_counter + id_counter += 1 + return id_counter + + return _get_id diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index c52bf2c3b27..078ef792a55 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -24,8 +24,10 @@ ]), 'manufacturer': 'Sleekpoint Innovations', 'model': 'AP2', + 'model_id': None, 'name': 'Airversa AP2 1808', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1234', 'suggested_area': None, 'sw_version': '0.8.16', @@ -101,7 +103,7 @@ 'original_name': 'Airversa AP2 1808 AirPurifier', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_32832', 'unit_of_measurement': None, @@ -113,7 +115,7 @@ 'percentage_step': 20.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.airversa_ap2_1808_airpurifier', 'state': 'off', @@ -620,8 +622,10 @@ ]), 'manufacturer': 'Anker', 'model': 'T8010', + 'model_id': None, 'name': 'eufy HomeBase2-0AAA', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'A0000A000000000A', 'suggested_area': None, 'sw_version': '2.1.6', @@ -693,8 +697,10 @@ ]), 'manufacturer': 'Anker', 'model': 'T8113', + 'model_id': None, 'name': 'eufyCam2-0000', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'A0000A000000000D', 'suggested_area': None, 'sw_version': '1.6.7', @@ -934,8 +940,10 @@ ]), 'manufacturer': 'Anker', 'model': 'T8113', + 'model_id': None, 'name': 'eufyCam2-000A', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'A0000A000000000B', 'suggested_area': None, 'sw_version': '1.6.7', @@ -1175,8 +1183,10 @@ ]), 'manufacturer': 'Anker', 'model': 'T8113', + 'model_id': None, 'name': 'eufyCam2-000A', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'A0000A000000000C', 'suggested_area': None, 'sw_version': '1.6.7', @@ -1420,8 +1430,10 @@ ]), 'manufacturer': 'Aqara', 'model': 'HE1-G01', + 'model_id': None, 'name': 'Aqara-Hub-E1-00A0', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '00aa00000a0', 'suggested_area': None, 'sw_version': '3.3.0', @@ -1626,8 +1638,10 @@ ]), 'manufacturer': 'Aqara', 'model': 'AS006', + 'model_id': None, 'name': 'Contact Sensor', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '158d0007c59c6a', 'suggested_area': None, 'sw_version': '0', @@ -1790,8 +1804,10 @@ ]), 'manufacturer': 'Aqara', 'model': 'ZHWA11LM', + 'model_id': None, 'name': 'Aqara Hub-1563', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '0000000123456789', 'suggested_area': None, 'sw_version': '1.4.7', @@ -2065,8 +2081,10 @@ ]), 'manufacturer': 'Aqara', 'model': 'AR004', + 'model_id': None, 'name': 'Programmable Switch', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '111a1111a1a111', 'suggested_area': None, 'sw_version': '9', @@ -2188,8 +2206,10 @@ ]), 'manufacturer': 'Netgear, Inc', 'model': 'ABC1000', + 'model_id': None, 'name': 'ArloBabyA0', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '00A0000000000', 'suggested_area': None, 'sw_version': '1.10.931', @@ -2672,8 +2692,10 @@ ]), 'manufacturer': 'ConnectSense', 'model': 'CS-IWO', + 'model_id': None, 'name': 'InWall Outlet-0394DE', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1020301376', 'suggested_area': None, 'sw_version': '1.0.0', @@ -3101,8 +3123,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Basement', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AB3C', 'suggested_area': None, 'sw_version': '1.0.0', @@ -3260,8 +3284,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee3', + 'model_id': None, 'name': 'HomeW', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '123456789012', 'suggested_area': None, 'sw_version': '4.2.394', @@ -3714,8 +3740,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Kitchen', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AB1C', 'suggested_area': None, 'sw_version': '1.0.0', @@ -3873,8 +3901,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Porch', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AB2C', 'suggested_area': None, 'sw_version': '1.0.0', @@ -4036,8 +4066,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee3', + 'model_id': None, 'name': 'HomeW', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '123456789012', 'suggested_area': None, 'sw_version': '4.2.394', @@ -4494,8 +4526,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Basement', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AB3C', 'suggested_area': None, 'sw_version': '1.0.0', @@ -4608,8 +4642,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee3', + 'model_id': None, 'name': 'HomeW', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '123456789012', 'suggested_area': None, 'sw_version': '4.2.394', @@ -4889,8 +4925,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Kitchen', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AB1C', 'suggested_area': None, 'sw_version': '1.0.0', @@ -5048,8 +5086,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Porch', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AB2C', 'suggested_area': None, 'sw_version': '1.0.0', @@ -5211,8 +5251,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ECB501', + 'model_id': None, 'name': 'My ecobee', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '123456789016', 'suggested_area': None, 'sw_version': '4.7.340214', @@ -5678,8 +5720,10 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee Switch+', + 'model_id': None, 'name': 'Master Fan', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '111111111111', 'suggested_area': None, 'sw_version': '4.5.130201', @@ -5967,8 +6011,10 @@ ]), 'manufacturer': 'Elgato', 'model': 'Eve Degree 00AAA0000', + 'model_id': None, 'name': 'Eve Degree AA11', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AA00A0A00000', 'suggested_area': None, 'sw_version': '1.2.8', @@ -6323,8 +6369,10 @@ ]), 'manufacturer': 'Elgato', 'model': 'Eve Energy 20EAO8601', + 'model_id': None, 'name': 'Eve Energy 50FF', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AA00A0A00000', 'suggested_area': None, 'sw_version': '1.2.9', @@ -6661,8 +6709,10 @@ ]), 'manufacturer': 'José A. Jiménez Campos', 'model': 'RavenSystem HAA', + 'model_id': None, 'name': 'HAA-C718B3', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'C718B3-1', 'suggested_area': None, 'sw_version': '5.0.18', @@ -6819,7 +6869,7 @@ 'original_name': 'HAA-C718B3', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_8', 'unit_of_measurement': None, @@ -6831,7 +6881,7 @@ 'percentage_step': 33.333333333333336, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.haa_c718b3', 'state': 'on', @@ -6866,8 +6916,10 @@ ]), 'manufacturer': 'José A. Jiménez Campos', 'model': 'RavenSystem HAA', + 'model_id': None, 'name': 'HAA-C718B3', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'C718B3-2', 'suggested_area': None, 'sw_version': '5.0.18', @@ -6958,324 +7010,6 @@ }), ]) # --- -# name: test_snapshots[haa_fan] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'José A. Jiménez Campos', - 'model': 'RavenSystem HAA', - 'name': 'HAA-C718B3', - 'name_by_user': None, - 'serial_number': 'C718B3-1', - 'suggested_area': None, - 'sw_version': '5.0.18', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HAA-C718B3 Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'HAA-C718B3 Identify', - }), - 'entity_id': 'button.haa_c718b3_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_setup', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HAA-C718B3 Setup', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'setup', - 'unique_id': '00:00:00:00:00:00_1_1010_1012', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'HAA-C718B3 Setup', - }), - 'entity_id': 'button.haa_c718b3_setup', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_update', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HAA-C718B3 Update', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1010_1011', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'update', - 'friendly_name': 'HAA-C718B3 Update', - }), - 'entity_id': 'button.haa_c718b3_update', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': None, - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.haa_c718b3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HAA-C718B3', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'HAA-C718B3', - 'percentage': 66, - 'percentage_step': 33.333333333333336, - 'preset_mode': None, - 'preset_modes': None, - 'supported_features': , - }), - 'entity_id': 'fan.haa_c718b3', - 'state': 'on', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:2', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'José A. Jiménez Campos', - 'model': 'RavenSystem HAA', - 'name': 'HAA-C718B3', - 'name_by_user': None, - 'serial_number': 'C718B3-2', - 'suggested_area': None, - 'sw_version': '5.0.18', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HAA-C718B3 Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'HAA-C718B3 Identify', - }), - 'entity_id': 'button.haa_c718b3_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.haa_c718b3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HAA-C718B3', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'HAA-C718B3', - }), - 'entity_id': 'switch.haa_c718b3', - 'state': 'off', - }), - }), - ]), - }), - ]) -# --- # name: test_snapshots[home_assistant_bridge_basic_cover] list([ dict({ @@ -7301,8 +7035,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Family Room North', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'cover.family_door_north', 'suggested_area': None, 'sw_version': '3.6.2', @@ -7462,8 +7198,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -7535,8 +7273,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Kitchen Window', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'cover.kitchen_window', 'suggested_area': None, 'sw_version': '3.6.2', @@ -7700,8 +7440,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Ceiling Fan', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'fan.ceiling_fan', 'suggested_area': None, 'sw_version': '0.104.0.dev0', @@ -7777,7 +7519,7 @@ 'original_name': 'Ceiling Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_766313939_8', 'unit_of_measurement': None, @@ -7789,7 +7531,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.ceiling_fan', 'state': 'off', @@ -7820,8 +7562,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'Home Assistant Bridge', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '0.104.0.dev0', @@ -7893,8 +7637,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Living Room Fan', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'fan.living_room_fan', 'suggested_area': None, 'sw_version': '0.104.0.dev0', @@ -7970,7 +7716,7 @@ 'original_name': 'Living Room Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1256851357_8', 'unit_of_measurement': None, @@ -7983,7 +7729,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.living_room_fan', 'state': 'off', @@ -8018,8 +7764,10 @@ ]), 'manufacturer': 'Lookin', 'model': 'Climate Control', + 'model_id': None, 'name': '89 Living Room', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'climate.89_living_room', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -8157,7 +7905,7 @@ 'original_name': '89 Living Room', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1233851541_175', 'unit_of_measurement': None, @@ -8170,7 +7918,7 @@ 'percentage_step': 33.333333333333336, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.89_living_room', 'state': 'on', @@ -8340,8 +8088,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -8417,8 +8167,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -8490,8 +8242,10 @@ ]), 'manufacturer': 'FirstAlert', 'model': '1039102', + 'model_id': None, 'name': 'Laundry Smoke ED78', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'light.laundry_smoke_ed78', 'suggested_area': None, 'sw_version': '1.4.84', @@ -8663,8 +8417,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Family Room North', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'cover.family_door_north', 'suggested_area': None, 'sw_version': '3.6.2', @@ -8824,8 +8580,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -8897,8 +8655,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Kitchen Window', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'cover.kitchen_window', 'suggested_area': None, 'sw_version': '3.6.2', @@ -9062,8 +8822,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Ceiling Fan', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'fan.ceiling_fan', 'suggested_area': None, 'sw_version': '0.104.0.dev0', @@ -9139,7 +8901,7 @@ 'original_name': 'Ceiling Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_766313939_8', 'unit_of_measurement': None, @@ -9151,7 +8913,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.ceiling_fan', 'state': 'off', @@ -9182,8 +8944,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'Home Assistant Bridge', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '0.104.0.dev0', @@ -9255,8 +9019,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Living Room Fan', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'fan.living_room_fan', 'suggested_area': None, 'sw_version': '0.104.0.dev0', @@ -9332,7 +9098,7 @@ 'original_name': 'Living Room Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1256851357_8', 'unit_of_measurement': None, @@ -9346,7 +9112,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.living_room_fan', 'state': 'off', @@ -9381,8 +9147,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'Home Assistant Bridge', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '0.104.0.dev0', @@ -9454,8 +9222,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Living Room Fan', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'fan.living_room_fan', 'suggested_area': None, 'sw_version': '0.104.0.dev0', @@ -9531,7 +9301,7 @@ 'original_name': 'Living Room Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1256851357_8', 'unit_of_measurement': None, @@ -9545,7 +9315,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.living_room_fan', 'state': 'off', @@ -9580,8 +9350,10 @@ ]), 'manufacturer': 'Lookin', 'model': 'Climate Control', + 'model_id': None, 'name': '89 Living Room', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'climate.89_living_room', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -9728,7 +9500,7 @@ 'original_name': '89 Living Room', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1233851541_175', 'unit_of_measurement': None, @@ -9741,7 +9513,7 @@ 'percentage_step': 33.333333333333336, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.89_living_room', 'state': 'on', @@ -9911,8 +9683,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -9988,8 +9762,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -10061,8 +9837,10 @@ ]), 'manufacturer': 'switchbot', 'model': 'WoHumi', + 'model_id': None, 'name': 'Humidifier 182A', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'humidifier.humidifier_182a', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -10241,8 +10019,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -10314,8 +10094,10 @@ ]), 'manufacturer': 'switchbot', 'model': 'WoHumi', + 'model_id': None, 'name': 'Humidifier 182A', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'humidifier.humidifier_182a', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -10494,8 +10276,10 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'homekit.bridge', 'suggested_area': None, 'sw_version': '2024.2.0', @@ -10567,8 +10351,10 @@ ]), 'manufacturer': 'FirstAlert', 'model': '1039102', + 'model_id': None, 'name': 'Laundry Smoke ED78', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'light.laundry_smoke_ed78', 'suggested_area': None, 'sw_version': '1.4.84', @@ -10755,8 +10541,10 @@ ]), 'manufacturer': 'Garzola Marco', 'model': 'Daikin-fwec3a-esp32-homekit-bridge', + 'model_id': None, 'name': 'Air Conditioner', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '00000001', 'suggested_area': None, 'sw_version': '1.0.0', @@ -10953,8 +10741,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', + 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462395276914', 'suggested_area': None, 'sw_version': '1.46.13', @@ -11089,8 +10879,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', + 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462395276939', 'suggested_area': None, 'sw_version': '1.46.13', @@ -11225,8 +11017,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', + 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462403113447', 'suggested_area': None, 'sw_version': '1.46.13', @@ -11361,8 +11155,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', + 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462403233419', 'suggested_area': None, 'sw_version': '1.46.13', @@ -11497,8 +11293,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW013', + 'model_id': None, 'name': 'Hue ambiance spot', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462412411853', 'suggested_area': None, 'sw_version': '1.46.13', @@ -11643,8 +11441,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW013', + 'model_id': None, 'name': 'Hue ambiance spot', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462412413293', 'suggested_area': None, 'sw_version': '1.46.13', @@ -11789,8 +11589,10 @@ ]), 'manufacturer': 'Philips', 'model': 'RWL021', + 'model_id': None, 'name': 'Hue dimmer switch', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462389072572', 'suggested_area': None, 'sw_version': '45.1.17846', @@ -12104,8 +11906,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462378982941', 'suggested_area': None, 'sw_version': '1.46.13', @@ -12227,8 +12031,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462378983942', 'suggested_area': None, 'sw_version': '1.46.13', @@ -12350,8 +12156,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462379122122', 'suggested_area': None, 'sw_version': '1.46.13', @@ -12473,8 +12281,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462379123707', 'suggested_area': None, 'sw_version': '1.46.13', @@ -12596,8 +12406,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462383114163', 'suggested_area': None, 'sw_version': '1.46.13', @@ -12719,8 +12531,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462383114193', 'suggested_area': None, 'sw_version': '1.46.13', @@ -12842,8 +12656,10 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '6623462385996792', 'suggested_area': None, 'sw_version': '1.46.13', @@ -12965,8 +12781,10 @@ ]), 'manufacturer': 'Philips Lighting', 'model': 'BSB002', + 'model_id': None, 'name': 'Philips hue - 482544', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '123456', 'suggested_area': None, 'sw_version': '1.32.1932126170', @@ -13042,8 +12860,10 @@ ]), 'manufacturer': 'Koogeek', 'model': 'LS1', + 'model_id': None, 'name': 'Koogeek-LS1-20833F', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AAAA011111111111', 'suggested_area': None, 'sw_version': '2.2.15', @@ -13184,8 +13004,10 @@ ]), 'manufacturer': 'Koogeek', 'model': 'P1EU', + 'model_id': None, 'name': 'Koogeek-P1-A00AA0', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'EUCP03190xxxxx48', 'suggested_area': None, 'sw_version': '2.3.7', @@ -13347,8 +13169,10 @@ ]), 'manufacturer': 'Koogeek', 'model': 'KH02CN', + 'model_id': None, 'name': 'Koogeek-SW2-187A91', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'CNNT061751001372', 'suggested_area': None, 'sw_version': '1.0.3', @@ -13549,8 +13373,10 @@ ]), 'manufacturer': 'Lennox', 'model': 'E30 2B', + 'model_id': None, 'name': 'Lennox', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'XXXXXXXX', 'suggested_area': None, 'sw_version': '3.40.XX', @@ -13829,8 +13655,10 @@ ]), 'manufacturer': 'LG Electronics', 'model': 'OLED55B9PUA', + 'model_id': None, 'name': 'LG webOS TV AF80', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '999AAAAAA999', 'suggested_area': None, 'sw_version': '04.71.04', @@ -14008,8 +13836,10 @@ ]), 'manufacturer': 'Lutron Electronics Co., Inc', 'model': 'PD-FSQN-XX', + 'model_id': None, 'name': 'Caséta® Wireless Fan Speed Control', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '39024290', 'suggested_area': None, 'sw_version': '001.005', @@ -14085,7 +13915,7 @@ 'original_name': 'Caséta® Wireless Fan Speed Control', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_21474836482_2', 'unit_of_measurement': None, @@ -14097,7 +13927,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.caseta_r_wireless_fan_speed_control', 'state': 'off', @@ -14128,8 +13958,10 @@ ]), 'manufacturer': 'Lutron Electronics Co., Inc', 'model': 'L-BDG2-WH', + 'model_id': None, 'name': 'Smart Bridge 2', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '12344331', 'suggested_area': None, 'sw_version': '08.08', @@ -14205,8 +14037,10 @@ ]), 'manufacturer': 'Meross', 'model': 'MSS425F', + 'model_id': None, 'name': 'MSS425F-15cc', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'HH41234', 'suggested_area': None, 'sw_version': '4.2.3', @@ -14482,8 +14316,10 @@ ]), 'manufacturer': 'Meross', 'model': 'MSS565', + 'model_id': None, 'name': 'MSS565-28da', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'BB1121', 'suggested_area': None, 'sw_version': '4.1.9', @@ -14609,8 +14445,10 @@ ]), 'manufacturer': 'Empowered Homes Inc.', 'model': 'v1', + 'model_id': None, 'name': 'Mysa-85dda9', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AAAAAAA000', 'suggested_area': None, 'sw_version': '2.8.1', @@ -14937,8 +14775,10 @@ ]), 'manufacturer': 'Nanoleaf', 'model': 'NL55', + 'model_id': None, 'name': 'Nanoleaf Strip 3B32', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AAAA011111111111', 'suggested_area': None, 'sw_version': '1.4.40', @@ -15207,8 +15047,10 @@ ]), 'manufacturer': 'Netatmo', 'model': 'Netatmo Doorbell', + 'model_id': None, 'name': 'Netatmo-Doorbell-g738658', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'g738658', 'suggested_area': None, 'sw_version': '80.0.0', @@ -15499,8 +15341,10 @@ ]), 'manufacturer': 'Netatmo', 'model': 'Smart CO Alarm', + 'model_id': None, 'name': 'Smart CO Alarm', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1234', 'suggested_area': None, 'sw_version': '1.0.3', @@ -15658,8 +15502,10 @@ ]), 'manufacturer': 'Netatmo', 'model': 'Healthy Home Coach', + 'model_id': None, 'name': 'Healthy Home Coach', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AAAAAAAAAAAAA', 'suggested_area': None, 'sw_version': '59', @@ -15959,8 +15805,10 @@ ]), 'manufacturer': 'Green Electronics LLC', 'model': 'SPK5 Pro', + 'model_id': None, 'name': 'RainMachine-00ce4a', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '00aa0000aa0a', 'suggested_area': None, 'sw_version': '1.0.4', @@ -16380,8 +16228,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Master Bath South', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1.0.0', 'suggested_area': None, 'sw_version': '3.0.8', @@ -16541,8 +16391,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE SmartBridge', + 'model_id': None, 'name': 'RYSE SmartBridge', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '0101.3521.0436', 'suggested_area': None, 'sw_version': '1.3.0', @@ -16614,8 +16466,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'RYSE SmartShade', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '', 'suggested_area': None, 'sw_version': '', @@ -16779,8 +16633,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'BR Left', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1.0.0', 'suggested_area': None, 'sw_version': '3.0.8', @@ -16940,8 +16796,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'LR Left', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1.0.0', 'suggested_area': None, 'sw_version': '3.0.8', @@ -17101,8 +16959,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'LR Right', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1.0.0', 'suggested_area': None, 'sw_version': '3.0.8', @@ -17262,8 +17122,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE SmartBridge', + 'model_id': None, 'name': 'RYSE SmartBridge', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '0401.3521.0679', 'suggested_area': None, 'sw_version': '1.3.0', @@ -17335,8 +17197,10 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'RZSS', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1.0.0', 'suggested_area': None, 'sw_version': '3.0.8', @@ -17500,8 +17364,10 @@ ]), 'manufacturer': 'Schlage ', 'model': 'BE479CAM619', + 'model_id': None, 'name': 'SENSE ', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AAAAAAA000', 'suggested_area': None, 'sw_version': '004.027.000', @@ -17618,8 +17484,10 @@ ]), 'manufacturer': 'Hunter Fan', 'model': 'SIMPLEconnect', + 'model_id': None, 'name': 'SIMPLEconnect Fan-06F674', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1234567890abcd', 'suggested_area': None, 'sw_version': '', @@ -17695,7 +17563,7 @@ 'original_name': 'SIMPLEconnect Fan-06F674 Hunter Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_8', 'unit_of_measurement': None, @@ -17708,7 +17576,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.simpleconnect_fan_06f674_hunter_fan', 'state': 'off', @@ -17793,8 +17661,10 @@ ]), 'manufacturer': 'VELUX', 'model': 'VELUX Gateway', + 'model_id': None, 'name': 'VELUX Gateway', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'a1a11a1', 'suggested_area': None, 'sw_version': '70', @@ -17866,8 +17736,10 @@ ]), 'manufacturer': 'VELUX', 'model': 'VELUX Sensor', + 'model_id': None, 'name': 'VELUX Sensor', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'a11b111', 'suggested_area': None, 'sw_version': '16', @@ -18074,8 +17946,10 @@ ]), 'manufacturer': 'VELUX', 'model': 'VELUX Window', + 'model_id': None, 'name': 'VELUX Window', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': '1111111a114a111a', 'suggested_area': None, 'sw_version': '48', @@ -18194,8 +18068,10 @@ ]), 'manufacturer': 'VOCOlinc', 'model': 'Flowerbud', + 'model_id': None, 'name': 'VOCOlinc-Flowerbud-0d324b', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'AM01121849000327', 'suggested_area': None, 'sw_version': '3.121.2', @@ -18498,8 +18374,10 @@ ]), 'manufacturer': 'VOCOlinc', 'model': 'VP3', + 'model_id': None, 'name': 'VOCOlinc-VP3-123456', 'name_by_user': None, + 'primary_config_entry': 'TestData', 'serial_number': 'EU0121203xxxxx07', 'suggested_area': None, 'sw_version': '1.101.2', diff --git a/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py b/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py index aea53e74d46..d6dc0f70015 100644 --- a/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py +++ b/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py @@ -29,14 +29,22 @@ async def test_fan_add_feature_at_runtime( fan_state = hass.states.get("fan.living_room_fan") assert ( fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION + is FanEntityFeature.SET_SPEED + | FanEntityFeature.DIRECTION + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan = entity_registry.async_get("fan.ceiling_fan") assert fan.unique_id == "00:00:00:00:00:00_766313939_8" fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) # Now change the config to add oscillation accessories = await setup_accessories_from_file( @@ -50,9 +58,16 @@ async def test_fan_add_feature_at_runtime( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) async def test_fan_remove_feature_at_runtime( @@ -75,13 +90,20 @@ async def test_fan_remove_feature_at_runtime( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan = entity_registry.async_get("fan.ceiling_fan") assert fan.unique_id == "00:00:00:00:00:00_766313939_8" fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) # Now change the config to add oscillation accessories = await setup_accessories_from_file( @@ -92,10 +114,18 @@ async def test_fan_remove_feature_at_runtime( fan_state = hass.states.get("fan.living_room_fan") assert ( fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION + is FanEntityFeature.SET_SPEED + | FanEntityFeature.DIRECTION + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) async def test_bridge_with_two_fans_one_removed( @@ -119,13 +149,20 @@ async def test_bridge_with_two_fans_one_removed( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan = entity_registry.async_get("fan.ceiling_fan") assert fan.unique_id == "00:00:00:00:00:00_766313939_8" fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) # Now change the config to remove one of the fans accessories = await setup_accessories_from_file( @@ -141,6 +178,8 @@ async def test_bridge_with_two_fans_one_removed( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) # The second fan should have been removed assert not hass.states.get("fan.ceiling_fan") diff --git a/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py b/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py index 9c6e5a6687a..a16cd052c87 100644 --- a/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py +++ b/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py @@ -5,7 +5,7 @@ from unittest import mock from aiohomekit.exceptions import AccessoryDisconnectedError, EncryptionError from aiohomekit.model import CharacteristicsTypes, ServicesTypes -from aiohomekit.testing import FakePairing +from aiohomekit.testing import FakeController, FakePairing import pytest from homeassistant.components.homekit_controller.connection import ( @@ -48,7 +48,14 @@ async def test_recover_from_failure(hass: HomeAssistant, failure_cls) -> None: # Test that entity remains in the same state if there is a network error next_update = dt_util.utcnow() + timedelta(seconds=60) - with mock.patch.object(FakePairing, "get_characteristics") as get_char: + with ( + mock.patch.object(FakePairing, "get_characteristics") as get_char, + mock.patch.object( + FakeController, + "async_reachable", + return_value=False, + ), + ): get_char.side_effect = failure_cls("Disconnected") # Test that a poll triggers unavailable diff --git a/tests/components/homekit_controller/test_alarm_control_panel.py b/tests/components/homekit_controller/test_alarm_control_panel.py index a8852aac4f7..d08478641b3 100644 --- a/tests/components/homekit_controller/test_alarm_control_panel.py +++ b/tests/components/homekit_controller/test_alarm_control_panel.py @@ -1,12 +1,14 @@ """Basic checks for HomeKitalarm_control_panel.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_security_system_service(accessory): @@ -27,9 +29,13 @@ def create_security_system_service(accessory): targ_state.value = 50 -async def test_switch_change_alarm_state(hass: HomeAssistant) -> None: +async def test_switch_change_alarm_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit alarm on and off again.""" - helper = await setup_test_component(hass, create_security_system_service) + helper = await setup_test_component( + hass, get_next_aid(), create_security_system_service + ) await hass.services.async_call( "alarm_control_panel", @@ -84,9 +90,13 @@ async def test_switch_change_alarm_state(hass: HomeAssistant) -> None: ) -async def test_switch_read_alarm_state(hass: HomeAssistant) -> None: +async def test_switch_read_alarm_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit alarm accessory.""" - helper = await setup_test_component(hass, create_security_system_service) + helper = await setup_test_component( + hass, get_next_aid(), create_security_system_service + ) await helper.async_update( ServicesTypes.SECURITY_SYSTEM, @@ -126,7 +136,9 @@ async def test_switch_read_alarm_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a alarm_control_panel unique id.""" aid = get_next_aid() @@ -135,7 +147,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_security_system_service) + await setup_test_component(hass, aid, create_security_system_service) assert ( entity_registry.async_get(alarm_control_panel_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_binary_sensor.py b/tests/components/homekit_controller/test_binary_sensor.py index 3d4486bb38d..63b35fbe1b8 100644 --- a/tests/components/homekit_controller/test_binary_sensor.py +++ b/tests/components/homekit_controller/test_binary_sensor.py @@ -1,5 +1,7 @@ """Basic checks for HomeKit motion sensors and contact sensors.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -7,7 +9,7 @@ from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_motion_sensor_service(accessory): @@ -18,9 +20,13 @@ def create_motion_sensor_service(accessory): cur_state.value = 0 -async def test_motion_sensor_read_state(hass: HomeAssistant) -> None: +async def test_motion_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit motion sensor accessory.""" - helper = await setup_test_component(hass, create_motion_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_motion_sensor_service + ) await helper.async_update( ServicesTypes.MOTION_SENSOR, {CharacteristicsTypes.MOTION_DETECTED: False} @@ -45,9 +51,13 @@ def create_contact_sensor_service(accessory): cur_state.value = 0 -async def test_contact_sensor_read_state(hass: HomeAssistant) -> None: +async def test_contact_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit contact accessory.""" - helper = await setup_test_component(hass, create_contact_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_contact_sensor_service + ) await helper.async_update( ServicesTypes.CONTACT_SENSOR, {CharacteristicsTypes.CONTACT_STATE: 0} @@ -72,9 +82,13 @@ def create_smoke_sensor_service(accessory): cur_state.value = 0 -async def test_smoke_sensor_read_state(hass: HomeAssistant) -> None: +async def test_smoke_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit contact accessory.""" - helper = await setup_test_component(hass, create_smoke_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_smoke_sensor_service + ) await helper.async_update( ServicesTypes.SMOKE_SENSOR, {CharacteristicsTypes.SMOKE_DETECTED: 0} @@ -99,9 +113,13 @@ def create_carbon_monoxide_sensor_service(accessory): cur_state.value = 0 -async def test_carbon_monoxide_sensor_read_state(hass: HomeAssistant) -> None: +async def test_carbon_monoxide_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit contact accessory.""" - helper = await setup_test_component(hass, create_carbon_monoxide_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_carbon_monoxide_sensor_service + ) await helper.async_update( ServicesTypes.CARBON_MONOXIDE_SENSOR, @@ -128,9 +146,13 @@ def create_occupancy_sensor_service(accessory): cur_state.value = 0 -async def test_occupancy_sensor_read_state(hass: HomeAssistant) -> None: +async def test_occupancy_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit occupancy sensor accessory.""" - helper = await setup_test_component(hass, create_occupancy_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_occupancy_sensor_service + ) await helper.async_update( ServicesTypes.OCCUPANCY_SENSOR, {CharacteristicsTypes.OCCUPANCY_DETECTED: False} @@ -155,9 +177,13 @@ def create_leak_sensor_service(accessory): cur_state.value = 0 -async def test_leak_sensor_read_state(hass: HomeAssistant) -> None: +async def test_leak_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit leak sensor accessory.""" - helper = await setup_test_component(hass, create_leak_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_leak_sensor_service + ) await helper.async_update( ServicesTypes.LEAK_SENSOR, {CharacteristicsTypes.LEAK_DETECTED: 0} @@ -175,7 +201,9 @@ async def test_leak_sensor_read_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a binary_sensor unique id.""" aid = get_next_aid() @@ -184,7 +212,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_leak_sensor_service) + await setup_test_component(hass, aid, create_leak_sensor_service) assert ( entity_registry.async_get(binary_sensor_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_button.py b/tests/components/homekit_controller/test_button.py index 9f935569333..058194a7ebd 100644 --- a/tests/components/homekit_controller/test_button.py +++ b/tests/components/homekit_controller/test_button.py @@ -1,12 +1,14 @@ """Basic checks for HomeKit button.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import Helper, get_next_aid, setup_test_component +from .common import Helper, setup_test_component def create_switch_with_setup_button(accessory): @@ -39,9 +41,13 @@ def create_switch_with_ecobee_clear_hold_button(accessory): return service -async def test_press_button(hass: HomeAssistant) -> None: +async def test_press_button( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a switch service that has a button characteristic is correctly handled.""" - helper = await setup_test_component(hass, create_switch_with_setup_button) + helper = await setup_test_component( + hass, get_next_aid(), create_switch_with_setup_button + ) # Helper will be for the primary entity, which is the outlet. Make a helper for the button. button = Helper( @@ -66,10 +72,12 @@ async def test_press_button(hass: HomeAssistant) -> None: ) -async def test_ecobee_clear_hold_press_button(hass: HomeAssistant) -> None: +async def test_ecobee_clear_hold_press_button( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test ecobee clear hold button characteristic is correctly handled.""" helper = await setup_test_component( - hass, create_switch_with_ecobee_clear_hold_button + hass, get_next_aid(), create_switch_with_ecobee_clear_hold_button ) # Helper will be for the primary entity, which is the outlet. Make a helper for the button. @@ -96,7 +104,9 @@ async def test_ecobee_clear_hold_press_button(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a button unique id.""" aid = get_next_aid() @@ -105,7 +115,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-0001-aid:{aid}-sid:1-cid:2", ) - await setup_test_component(hass, create_switch_with_ecobee_clear_hold_button) + await setup_test_component(hass, aid, create_switch_with_ecobee_clear_hold_button) assert ( entity_registry.async_get(button_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_1_2" diff --git a/tests/components/homekit_controller/test_camera.py b/tests/components/homekit_controller/test_camera.py index de64ee95d74..6e20c1feb3c 100644 --- a/tests/components/homekit_controller/test_camera.py +++ b/tests/components/homekit_controller/test_camera.py @@ -1,6 +1,7 @@ """Basic checks for HomeKit cameras.""" import base64 +from collections.abc import Callable from aiohomekit.model.services import ServicesTypes from aiohomekit.testing import FAKE_CAMERA_IMAGE @@ -9,7 +10,7 @@ from homeassistant.components import camera from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_camera(accessory): @@ -18,7 +19,9 @@ def create_camera(accessory): async def test_migrate_unique_ids( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test migrating entity unique ids.""" aid = get_next_aid() @@ -27,23 +30,23 @@ async def test_migrate_unique_ids( "homekit_controller", f"homekit-0001-aid:{aid}", ) - await setup_test_component(hass, create_camera) + await setup_test_component(hass, aid, create_camera) assert ( entity_registry.async_get(camera.entity_id).unique_id == f"00:00:00:00:00:00_{aid}" ) -async def test_read_state(hass: HomeAssistant) -> None: +async def test_read_state(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test reading the state of a HomeKit camera.""" - helper = await setup_test_component(hass, create_camera) + helper = await setup_test_component(hass, get_next_aid(), create_camera) state = await helper.poll_and_get_state() assert state.state == "idle" -async def test_get_image(hass: HomeAssistant) -> None: +async def test_get_image(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test getting a JPEG from a camera.""" - helper = await setup_test_component(hass, create_camera) + helper = await setup_test_component(hass, get_next_aid(), create_camera) image = await camera.async_get_image(hass, helper.entity_id) assert image.content == base64.b64decode(FAKE_CAMERA_IMAGE) diff --git a/tests/components/homekit_controller/test_climate.py b/tests/components/homekit_controller/test_climate.py index 5470c669700..183e020eb25 100644 --- a/tests/components/homekit_controller/test_climate.py +++ b/tests/components/homekit_controller/test_climate.py @@ -1,5 +1,7 @@ """Basic checks for HomeKitclimate.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import ( ActivationStateValues, CharacteristicsTypes, @@ -21,7 +23,7 @@ from homeassistant.components.climate import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component # Test thermostat devices @@ -73,9 +75,13 @@ def create_thermostat_service_min_max(accessory): char.maxValue = 1 -async def test_climate_respect_supported_op_modes_1(hass: HomeAssistant) -> None: +async def test_climate_respect_supported_op_modes_1( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that climate respects minValue/maxValue hints.""" - helper = await setup_test_component(hass, create_thermostat_service_min_max) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_service_min_max + ) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["off", "heat"] @@ -88,16 +94,22 @@ def create_thermostat_service_valid_vals(accessory): char.valid_values = [0, 1, 2] -async def test_climate_respect_supported_op_modes_2(hass: HomeAssistant) -> None: +async def test_climate_respect_supported_op_modes_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that climate respects validValue hints.""" - helper = await setup_test_component(hass, create_thermostat_service_valid_vals) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_service_valid_vals + ) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["off", "heat", "cool"] -async def test_climate_change_thermostat_state(hass: HomeAssistant) -> None: +async def test_climate_change_thermostat_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit thermostat on and off again.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -178,9 +190,11 @@ async def test_climate_change_thermostat_state(hass: HomeAssistant) -> None: ) -async def test_climate_check_min_max_values_per_mode(hass: HomeAssistant) -> None: +async def test_climate_check_min_max_values_per_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we we get the appropriate min/max values for each mode.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -213,9 +227,11 @@ async def test_climate_check_min_max_values_per_mode(hass: HomeAssistant) -> Non assert climate_state.attributes["max_temp"] == 40 -async def test_climate_change_thermostat_temperature(hass: HomeAssistant) -> None: +async def test_climate_change_thermostat_temperature( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit thermostat on and off again.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -244,9 +260,11 @@ async def test_climate_change_thermostat_temperature(hass: HomeAssistant) -> Non ) -async def test_climate_change_thermostat_temperature_range(hass: HomeAssistant) -> None: +async def test_climate_change_thermostat_temperature_range( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set separate heat and cool setpoints in heat_cool mode.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -278,10 +296,10 @@ async def test_climate_change_thermostat_temperature_range(hass: HomeAssistant) async def test_climate_change_thermostat_temperature_range_iphone( - hass: HomeAssistant, + hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: """Test that we can set all three set points at once (iPhone heat_cool mode support).""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -313,10 +331,10 @@ async def test_climate_change_thermostat_temperature_range_iphone( async def test_climate_cannot_set_thermostat_temp_range_in_wrong_mode( - hass: HomeAssistant, + hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: """Test that we cannot set range values when not in heat_cool mode.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -372,10 +390,12 @@ def create_thermostat_single_set_point_auto(accessory): async def test_climate_check_min_max_values_per_mode_sspa_device( - hass: HomeAssistant, + hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: """Test appropriate min/max values for each mode on sspa devices.""" - helper = await setup_test_component(hass, create_thermostat_single_set_point_auto) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_single_set_point_auto + ) await hass.services.async_call( DOMAIN, @@ -408,9 +428,13 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( assert climate_state.attributes["max_temp"] == 35 -async def test_climate_set_thermostat_temp_on_sspa_device(hass: HomeAssistant) -> None: +async def test_climate_set_thermostat_temp_on_sspa_device( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test setting temperature in different modes on device with single set point in auto.""" - helper = await setup_test_component(hass, create_thermostat_single_set_point_auto) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_single_set_point_auto + ) await hass.services.async_call( DOMAIN, @@ -462,9 +486,13 @@ async def test_climate_set_thermostat_temp_on_sspa_device(hass: HomeAssistant) - ) -async def test_climate_set_mode_via_temp(hass: HomeAssistant) -> None: +async def test_climate_set_mode_via_temp( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test setting temperature and mode at same tims.""" - helper = await setup_test_component(hass, create_thermostat_single_set_point_auto) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_single_set_point_auto + ) await hass.services.async_call( DOMAIN, @@ -503,9 +531,11 @@ async def test_climate_set_mode_via_temp(hass: HomeAssistant) -> None: ) -async def test_climate_change_thermostat_humidity(hass: HomeAssistant) -> None: +async def test_climate_change_thermostat_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit thermostat on and off again.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -534,9 +564,11 @@ async def test_climate_change_thermostat_humidity(hass: HomeAssistant) -> None: ) -async def test_climate_read_thermostat_state(hass: HomeAssistant) -> None: +async def test_climate_read_thermostat_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit thermostat accessory.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) # Simulate that heating is on await helper.async_update( @@ -591,9 +623,11 @@ async def test_climate_read_thermostat_state(hass: HomeAssistant) -> None: assert state.state == HVACMode.HEAT_COOL -async def test_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> None: +async def test_hvac_mode_vs_hvac_action( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Check that we haven't conflated hvac_mode and hvac_action.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) # Simulate that current temperature is above target temp # Heating might be on, but hvac_action currently 'off' @@ -628,9 +662,11 @@ async def test_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> None: assert state.attributes["hvac_action"] == "heating" -async def test_hvac_mode_vs_hvac_action_current_mode_wrong(hass: HomeAssistant) -> None: +async def test_hvac_mode_vs_hvac_action_current_mode_wrong( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Check that we cope with buggy HEATING_COOLING_CURRENT.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await helper.async_update( ServicesTypes.THERMOSTAT, @@ -692,9 +728,13 @@ def create_heater_cooler_service_min_max(accessory): char.maxValue = 2 -async def test_heater_cooler_respect_supported_op_modes_1(hass: HomeAssistant) -> None: +async def test_heater_cooler_respect_supported_op_modes_1( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that climate respects minValue/maxValue hints.""" - helper = await setup_test_component(hass, create_heater_cooler_service_min_max) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service_min_max + ) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["heat", "cool", "off"] @@ -707,16 +747,24 @@ def create_theater_cooler_service_valid_vals(accessory): char.valid_values = [1, 2] -async def test_heater_cooler_respect_supported_op_modes_2(hass: HomeAssistant) -> None: +async def test_heater_cooler_respect_supported_op_modes_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that climate respects validValue hints.""" - helper = await setup_test_component(hass, create_theater_cooler_service_valid_vals) + helper = await setup_test_component( + hass, get_next_aid(), create_theater_cooler_service_valid_vals + ) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["heat", "cool", "off"] -async def test_heater_cooler_change_thermostat_state(hass: HomeAssistant) -> None: +async def test_heater_cooler_change_thermostat_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can change the operational mode.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -771,12 +819,16 @@ async def test_heater_cooler_change_thermostat_state(hass: HomeAssistant) -> Non ) -async def test_can_turn_on_after_off(hass: HomeAssistant) -> None: +async def test_can_turn_on_after_off( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we always force device from inactive to active when setting mode. This is a regression test for #81863. """ - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -806,9 +858,13 @@ async def test_can_turn_on_after_off(hass: HomeAssistant) -> None: ) -async def test_heater_cooler_change_thermostat_temperature(hass: HomeAssistant) -> None: +async def test_heater_cooler_change_thermostat_temperature( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can change the target temperature.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -849,9 +905,13 @@ async def test_heater_cooler_change_thermostat_temperature(hass: HomeAssistant) ) -async def test_heater_cooler_change_fan_speed(hass: HomeAssistant) -> None: +async def test_heater_cooler_change_fan_speed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can change the target fan speed.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -897,9 +957,13 @@ async def test_heater_cooler_change_fan_speed(hass: HomeAssistant) -> None: ) -async def test_heater_cooler_read_fan_speed(hass: HomeAssistant) -> None: +async def test_heater_cooler_read_fan_speed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit thermostat accessory.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) # Simulate that fan speed is off await helper.async_update( @@ -946,9 +1010,13 @@ async def test_heater_cooler_read_fan_speed(hass: HomeAssistant) -> None: assert state.attributes["fan_mode"] == "high" -async def test_heater_cooler_read_thermostat_state(hass: HomeAssistant) -> None: +async def test_heater_cooler_read_thermostat_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit thermostat accessory.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) # Simulate that heating is on await helper.async_update( @@ -1000,9 +1068,13 @@ async def test_heater_cooler_read_thermostat_state(hass: HomeAssistant) -> None: assert state.state == HVACMode.HEAT_COOL -async def test_heater_cooler_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> None: +async def test_heater_cooler_hvac_mode_vs_hvac_action( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Check that we haven't conflated hvac_mode and hvac_action.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) # Simulate that current temperature is above target temp # Heating might be on, but hvac_action currently 'off' @@ -1039,9 +1111,13 @@ async def test_heater_cooler_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> No assert state.attributes["hvac_action"] == "heating" -async def test_heater_cooler_change_swing_mode(hass: HomeAssistant) -> None: +async def test_heater_cooler_change_swing_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can change the swing mode.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -1070,9 +1146,13 @@ async def test_heater_cooler_change_swing_mode(hass: HomeAssistant) -> None: ) -async def test_heater_cooler_turn_off(hass: HomeAssistant) -> None: +async def test_heater_cooler_turn_off( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that both hvac_action and hvac_mode return "off" when turned off.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) # Simulate that the device is turned off but CURRENT_HEATER_COOLER_STATE still returns HEATING/COOLING await helper.async_update( @@ -1090,7 +1170,9 @@ async def test_heater_cooler_turn_off(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a switch unique id.""" aid = get_next_aid() @@ -1099,7 +1181,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_heater_cooler_service) + await setup_test_component(hass, aid, create_heater_cooler_service) assert ( entity_registry.async_get(climate_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_8" diff --git a/tests/components/homekit_controller/test_config_flow.py b/tests/components/homekit_controller/test_config_flow.py index a336758f4ac..420c9d45803 100644 --- a/tests/components/homekit_controller/test_config_flow.py +++ b/tests/components/homekit_controller/test_config_flow.py @@ -211,13 +211,13 @@ def setup_mock_accessory(controller): bridge = Accessories() accessory = Accessory.create_with_info( + 1, name="Koogeek-LS1-20833F", manufacturer="Koogeek", model="LS1", serial_number="12345", firmware_revision="1.1", ) - accessory.aid = 1 service = accessory.add_service(ServicesTypes.LIGHTBULB) on_char = service.add_char(CharacteristicsTypes.ON) diff --git a/tests/components/homekit_controller/test_connection.py b/tests/components/homekit_controller/test_connection.py index 0f2cdb7c9db..8d3cc02fab9 100644 --- a/tests/components/homekit_controller/test_connection.py +++ b/tests/components/homekit_controller/test_connection.py @@ -1,8 +1,13 @@ """Tests for HKDevice.""" +from collections.abc import Callable import dataclasses +from unittest import mock from aiohomekit.controller import TransportType +from aiohomekit.model.characteristics import CharacteristicsTypes +from aiohomekit.model.services import ServicesTypes +from aiohomekit.testing import FakeController import pytest from homeassistant.components.homekit_controller.const import ( @@ -12,11 +17,17 @@ from homeassistant.components.homekit_controller.const import ( IDENTIFIER_LEGACY_SERIAL_NUMBER, ) from homeassistant.components.thread import async_add_dataset, dataset_store +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from .common import setup_accessories_from_file, setup_platform, setup_test_accessories +from .common import ( + setup_accessories_from_file, + setup_platform, + setup_test_accessories, + setup_test_component, +) from tests.common import MockConfigEntry @@ -118,7 +129,7 @@ async def test_migrate_device_id_no_serial_skip_if_other_owner( bridge = device_registry.async_get(bridge.id) assert bridge.identifiers == variant.before - assert bridge.config_entries == [entry.entry_id] + assert bridge.config_entries == {entry.entry_id} @pytest.mark.parametrize("variant", DEVICE_MIGRATION_TESTS) @@ -331,3 +342,59 @@ async def test_thread_provision_migration_failed(hass: HomeAssistant) -> None: ) assert config_entry.data["Connection"] == "BLE" + + +async def test_poll_firmware_version_only_all_watchable_accessory_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that we only poll firmware if available and all chars are watchable accessory mode.""" + + def _create_accessory(accessory): + service = accessory.add_service(ServicesTypes.LIGHTBULB, name="TestDevice") + + on_char = service.add_char(CharacteristicsTypes.ON) + on_char.value = 0 + + brightness = service.add_char(CharacteristicsTypes.BRIGHTNESS) + brightness.value = 0 + + return service + + helper = await setup_test_component(hass, get_next_aid(), _create_accessory) + + with mock.patch.object( + helper.pairing, + "get_characteristics", + wraps=helper.pairing.get_characteristics, + ) as mock_get_characteristics: + # Initial state is that the light is off + state = await helper.poll_and_get_state() + assert state.state == STATE_OFF + assert mock_get_characteristics.call_count == 2 + # Verify only firmware version is polled + assert mock_get_characteristics.call_args_list[0][0][0] == {(1, 7)} + assert mock_get_characteristics.call_args_list[1][0][0] == {(1, 7)} + + # Test device goes offline + helper.pairing.available = False + with mock.patch.object( + FakeController, + "async_reachable", + return_value=False, + ): + state = await helper.poll_and_get_state() + assert state.state == STATE_UNAVAILABLE + # Tries twice before declaring unavailable + assert mock_get_characteristics.call_count == 4 + + # Test device comes back online + helper.pairing.available = True + state = await helper.poll_and_get_state() + assert state.state == STATE_OFF + assert mock_get_characteristics.call_count == 6 + + # Next poll should not happen because its a single + # accessory, available, and all chars are watchable + state = await helper.poll_and_get_state() + assert state.state == STATE_OFF + assert mock_get_characteristics.call_count == 8 diff --git a/tests/components/homekit_controller/test_cover.py b/tests/components/homekit_controller/test_cover.py index 2157eb51212..c819eac1f5a 100644 --- a/tests/components/homekit_controller/test_cover.py +++ b/tests/components/homekit_controller/test_cover.py @@ -1,5 +1,7 @@ """Basic checks for HomeKitalarm_control_panel.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -7,7 +9,7 @@ from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_window_covering_service(accessory): @@ -113,9 +115,13 @@ def create_window_covering_service_with_none_tilt(accessory): tilt_target.maxValue = 0 -async def test_change_window_cover_state(hass: HomeAssistant) -> None: +async def test_change_window_cover_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit alarm on and off again.""" - helper = await setup_test_component(hass, create_window_covering_service) + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service + ) await hass.services.async_call( "cover", "open_cover", {"entity_id": helper.entity_id}, blocking=True @@ -138,9 +144,13 @@ async def test_change_window_cover_state(hass: HomeAssistant) -> None: ) -async def test_read_window_cover_state(hass: HomeAssistant) -> None: +async def test_read_window_cover_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit alarm accessory.""" - helper = await setup_test_component(hass, create_window_covering_service) + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service + ) await helper.async_update( ServicesTypes.WINDOW_COVERING, @@ -171,10 +181,12 @@ async def test_read_window_cover_state(hass: HomeAssistant) -> None: assert state.attributes["obstruction-detected"] is True -async def test_read_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_horizontal( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that horizontal tilt is handled correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_h_tilt + hass, get_next_aid(), create_window_covering_service_with_h_tilt ) await helper.async_update( @@ -186,10 +198,12 @@ async def test_read_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_horizontal_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that horizontal tilt is handled correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_h_tilt_2 + hass, get_next_aid(), create_window_covering_service_with_h_tilt_2 ) await helper.async_update( @@ -201,10 +215,12 @@ async def test_read_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None: assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_vertical(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_vertical( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is handled correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt + hass, get_next_aid(), create_window_covering_service_with_v_tilt ) await helper.async_update( @@ -216,10 +232,12 @@ async def test_read_window_cover_tilt_vertical(hass: HomeAssistant) -> None: assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_vertical_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is handled correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt_2 + hass, get_next_aid(), create_window_covering_service_with_v_tilt_2 ) await helper.async_update( @@ -231,10 +249,12 @@ async def test_read_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_missing_tilt(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_missing_tilt( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that missing tilt is handled.""" helper = await setup_test_component( - hass, create_window_covering_service_with_none_tilt + hass, get_next_aid(), create_window_covering_service_with_none_tilt ) await helper.async_update( @@ -246,10 +266,12 @@ async def test_read_window_cover_tilt_missing_tilt(hass: HomeAssistant) -> None: assert state.state != STATE_UNAVAILABLE -async def test_write_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: +async def test_write_window_cover_tilt_horizontal( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that horizontal tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_h_tilt + hass, get_next_aid(), create_window_covering_service_with_h_tilt ) await hass.services.async_call( @@ -267,10 +289,12 @@ async def test_write_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: ) -async def test_write_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None: +async def test_write_window_cover_tilt_horizontal_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that horizontal tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_h_tilt_2 + hass, get_next_aid(), create_window_covering_service_with_h_tilt_2 ) await hass.services.async_call( @@ -288,10 +312,12 @@ async def test_write_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None ) -async def test_write_window_cover_tilt_vertical(hass: HomeAssistant) -> None: +async def test_write_window_cover_tilt_vertical( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt + hass, get_next_aid(), create_window_covering_service_with_v_tilt ) await hass.services.async_call( @@ -309,10 +335,12 @@ async def test_write_window_cover_tilt_vertical(hass: HomeAssistant) -> None: ) -async def test_write_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: +async def test_write_window_cover_tilt_vertical_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt_2 + hass, get_next_aid(), create_window_covering_service_with_v_tilt_2 ) await hass.services.async_call( @@ -330,10 +358,12 @@ async def test_write_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: ) -async def test_window_cover_stop(hass: HomeAssistant) -> None: +async def test_window_cover_stop( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt + hass, get_next_aid(), create_window_covering_service_with_v_tilt ) await hass.services.async_call( @@ -366,9 +396,13 @@ def create_garage_door_opener_service(accessory): return service -async def test_change_door_state(hass: HomeAssistant) -> None: +async def test_change_door_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn open and close a HomeKit garage door.""" - helper = await setup_test_component(hass, create_garage_door_opener_service) + helper = await setup_test_component( + hass, get_next_aid(), create_garage_door_opener_service + ) await hass.services.async_call( "cover", "open_cover", {"entity_id": helper.entity_id}, blocking=True @@ -391,9 +425,13 @@ async def test_change_door_state(hass: HomeAssistant) -> None: ) -async def test_read_door_state(hass: HomeAssistant) -> None: +async def test_read_door_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit garage door.""" - helper = await setup_test_component(hass, create_garage_door_opener_service) + helper = await setup_test_component( + hass, get_next_aid(), create_garage_door_opener_service + ) await helper.async_update( ServicesTypes.GARAGE_DOOR_OPENER, @@ -432,7 +470,9 @@ async def test_read_door_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a cover unique id.""" aid = get_next_aid() @@ -441,7 +481,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_garage_door_opener_service) + await setup_test_component(hass, aid, create_garage_door_opener_service) assert ( entity_registry.async_get(cover_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_device_trigger.py b/tests/components/homekit_controller/test_device_trigger.py index 43572f56d50..ecf34868b6c 100644 --- a/tests/components/homekit_controller/test_device_trigger.py +++ b/tests/components/homekit_controller/test_device_trigger.py @@ -1,5 +1,7 @@ """Test homekit_controller stateless triggers.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes import pytest @@ -15,7 +17,7 @@ from homeassistant.setup import async_setup_component from .common import setup_test_component -from tests.common import async_get_device_automations, async_mock_service +from tests.common import async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -23,12 +25,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - def create_remote(accessory): """Define characteristics for a button (that is inn a group).""" service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL) @@ -88,9 +84,10 @@ async def test_enumerate_remote( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that remote is correctly enumerated.""" - await setup_test_component(hass, create_remote) + await setup_test_component(hass, get_next_aid(), create_remote) bat_sensor = entity_registry.async_get("sensor.testdevice_battery") identify_button = entity_registry.async_get("button.testdevice_identify") @@ -139,9 +136,10 @@ async def test_enumerate_button( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that a button is correctly enumerated.""" - await setup_test_component(hass, create_button) + await setup_test_component(hass, get_next_aid(), create_button) bat_sensor = entity_registry.async_get("sensor.testdevice_battery") identify_button = entity_registry.async_get("button.testdevice_identify") @@ -189,9 +187,10 @@ async def test_enumerate_doorbell( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that a button is correctly enumerated.""" - await setup_test_component(hass, create_doorbell) + await setup_test_component(hass, get_next_aid(), create_doorbell) bat_sensor = entity_registry.async_get("sensor.testdevice_battery") identify_button = entity_registry.async_get("button.testdevice_identify") @@ -239,10 +238,11 @@ async def test_handle_events( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + get_next_aid: Callable[[], int], + service_calls: list[ServiceCall], ) -> None: """Test that events are handled.""" - helper = await setup_test_component(hass, create_remote) + helper = await setup_test_component(hass, get_next_aid(), create_remote) entry = entity_registry.async_get("sensor.testdevice_battery") @@ -303,8 +303,8 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "device - button1 - single_press - 0" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "device - button1 - single_press - 0" # Make sure automation doesn't trigger for long press helper.pairing.testing.update_named_service( @@ -312,7 +312,7 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure automation doesn't trigger for double press helper.pairing.testing.update_named_service( @@ -320,7 +320,7 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure second automation fires for long press helper.pairing.testing.update_named_service( @@ -328,8 +328,8 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "device - button2 - long_press - 0" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "device - button2 - long_press - 0" # Turn the automations off await hass.services.async_call( @@ -338,6 +338,7 @@ async def test_handle_events( {"entity_id": "automation.long_press"}, blocking=True, ) + assert len(service_calls) == 3 await hass.services.async_call( "automation", @@ -345,6 +346,7 @@ async def test_handle_events( {"entity_id": "automation.single_press"}, blocking=True, ) + assert len(service_calls) == 4 # Make sure event no longer fires helper.pairing.testing.update_named_service( @@ -352,17 +354,18 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 4 async def test_handle_events_late_setup( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + get_next_aid: Callable[[], int], + service_calls: list[ServiceCall], ) -> None: """Test that events are handled when setup happens after startup.""" - helper = await setup_test_component(hass, create_remote) + helper = await setup_test_component(hass, get_next_aid(), create_remote) entry = entity_registry.async_get("sensor.testdevice_battery") @@ -432,8 +435,8 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "device - button1 - single_press - 0" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "device - button1 - single_press - 0" # Make sure automation doesn't trigger for a polled None helper.pairing.testing.update_named_service( @@ -441,7 +444,7 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure automation doesn't trigger for long press helper.pairing.testing.update_named_service( @@ -449,7 +452,7 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure automation doesn't trigger for double press helper.pairing.testing.update_named_service( @@ -457,7 +460,7 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure second automation fires for long press helper.pairing.testing.update_named_service( @@ -465,8 +468,8 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "device - button2 - long_press - 0" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "device - button2 - long_press - 0" # Turn the automations off await hass.services.async_call( @@ -475,6 +478,7 @@ async def test_handle_events_late_setup( {"entity_id": "automation.long_press"}, blocking=True, ) + assert len(service_calls) == 3 await hass.services.async_call( "automation", @@ -482,6 +486,7 @@ async def test_handle_events_late_setup( {"entity_id": "automation.single_press"}, blocking=True, ) + assert len(service_calls) == 4 # Make sure event no longer fires helper.pairing.testing.update_named_service( @@ -489,4 +494,4 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 4 diff --git a/tests/components/homekit_controller/test_event.py b/tests/components/homekit_controller/test_event.py index e139b49982a..99dcf38fafc 100644 --- a/tests/components/homekit_controller/test_event.py +++ b/tests/components/homekit_controller/test_event.py @@ -1,5 +1,7 @@ """Test homekit_controller stateless triggers.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -65,9 +67,13 @@ def create_doorbell(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -async def test_remote(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_remote( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], +) -> None: """Test that remote is supported.""" - helper = await setup_test_component(hass, create_remote) + helper = await setup_test_component(hass, get_next_aid(), create_remote) entities = [ ("event.testdevice_button_1", "Button 1"), @@ -108,9 +114,13 @@ async def test_remote(hass: HomeAssistant, entity_registry: er.EntityRegistry) - assert state.attributes["event_type"] == "long_press" -async def test_button(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_button( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], +) -> None: """Test that a button is correctly enumerated.""" - helper = await setup_test_component(hass, create_button) + helper = await setup_test_component(hass, get_next_aid(), create_button) entity_id = "event.testdevice_button_1" button = entity_registry.async_get(entity_id) @@ -145,10 +155,12 @@ async def test_button(hass: HomeAssistant, entity_registry: er.EntityRegistry) - async def test_doorbell( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that doorbell service is handled.""" - helper = await setup_test_component(hass, create_doorbell) + helper = await setup_test_component(hass, get_next_aid(), create_doorbell) entity_id = "event.testdevice_doorbell" doorbell = entity_registry.async_get(entity_id) diff --git a/tests/components/homekit_controller/test_fan.py b/tests/components/homekit_controller/test_fan.py index 428d3ab7d50..8de447144af 100644 --- a/tests/components/homekit_controller/test_fan.py +++ b/tests/components/homekit_controller/test_fan.py @@ -1,12 +1,14 @@ """Basic checks for HomeKit fans.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_fan_service(accessory): @@ -90,9 +92,11 @@ def create_fanv2_service_without_rotation_speed(accessory): swing_mode.value = 0 -async def test_fan_read_state(hass: HomeAssistant) -> None: +async def test_fan_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit fan accessory.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) state = await helper.async_update( ServicesTypes.FAN, {CharacteristicsTypes.ON: False} @@ -105,9 +109,9 @@ async def test_fan_read_state(hass: HomeAssistant) -> None: assert state.state == "on" -async def test_turn_on(hass: HomeAssistant) -> None: +async def test_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can turn a fan on.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await hass.services.async_call( "fan", @@ -152,10 +156,12 @@ async def test_turn_on(hass: HomeAssistant) -> None: ) -async def test_turn_on_off_without_rotation_speed(hass: HomeAssistant) -> None: +async def test_turn_on_off_without_rotation_speed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a fan on.""" helper = await setup_test_component( - hass, create_fanv2_service_without_rotation_speed + hass, get_next_aid(), create_fanv2_service_without_rotation_speed ) await hass.services.async_call( @@ -185,9 +191,9 @@ async def test_turn_on_off_without_rotation_speed(hass: HomeAssistant) -> None: ) -async def test_turn_off(hass: HomeAssistant) -> None: +async def test_turn_off(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can turn a fan off.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await helper.async_update(ServicesTypes.FAN, {CharacteristicsTypes.ON: 1}) @@ -205,9 +211,9 @@ async def test_turn_off(hass: HomeAssistant) -> None: ) -async def test_set_speed(hass: HomeAssistant) -> None: +async def test_set_speed(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we set fan speed.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await helper.async_update(ServicesTypes.FAN, {CharacteristicsTypes.ON: 1}) @@ -264,9 +270,11 @@ async def test_set_speed(hass: HomeAssistant) -> None: ) -async def test_set_percentage(hass: HomeAssistant) -> None: +async def test_set_percentage( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we set fan speed by percentage.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await helper.async_update(ServicesTypes.FAN, {CharacteristicsTypes.ON: 1}) @@ -297,9 +305,9 @@ async def test_set_percentage(hass: HomeAssistant) -> None: ) -async def test_speed_read(hass: HomeAssistant) -> None: +async def test_speed_read(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) state = await helper.async_update( ServicesTypes.FAN, @@ -337,9 +345,11 @@ async def test_speed_read(hass: HomeAssistant) -> None: assert state.attributes["percentage"] == 0 -async def test_set_direction(hass: HomeAssistant) -> None: +async def test_set_direction( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set fan spin direction.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await hass.services.async_call( "fan", @@ -368,9 +378,11 @@ async def test_set_direction(hass: HomeAssistant) -> None: ) -async def test_direction_read(hass: HomeAssistant) -> None: +async def test_direction_read( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) state = await helper.async_update( ServicesTypes.FAN, {CharacteristicsTypes.ROTATION_DIRECTION: 0} @@ -383,9 +395,11 @@ async def test_direction_read(hass: HomeAssistant) -> None: assert state.attributes["direction"] == "reverse" -async def test_fanv2_read_state(hass: HomeAssistant) -> None: +async def test_fanv2_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit fan accessory.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: False} @@ -398,9 +412,9 @@ async def test_fanv2_read_state(hass: HomeAssistant) -> None: assert state.state == "on" -async def test_v2_turn_on(hass: HomeAssistant) -> None: +async def test_v2_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can turn a fan on.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await hass.services.async_call( "fan", @@ -473,9 +487,11 @@ async def test_v2_turn_on(hass: HomeAssistant) -> None: ) -async def test_v2_turn_off(hass: HomeAssistant) -> None: +async def test_v2_turn_off( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a fan off.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -493,9 +509,11 @@ async def test_v2_turn_off(hass: HomeAssistant) -> None: ) -async def test_v2_set_speed(hass: HomeAssistant) -> None: +async def test_v2_set_speed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we set fan speed.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -552,9 +570,11 @@ async def test_v2_set_speed(hass: HomeAssistant) -> None: ) -async def test_v2_set_percentage(hass: HomeAssistant) -> None: +async def test_v2_set_percentage( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we set fan speed by percentage.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -585,9 +605,13 @@ async def test_v2_set_percentage(hass: HomeAssistant) -> None: ) -async def test_v2_set_percentage_with_min_step(hass: HomeAssistant) -> None: +async def test_v2_set_percentage_with_min_step( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we set fan speed by percentage.""" - helper = await setup_test_component(hass, create_fanv2_service_with_min_step) + helper = await setup_test_component( + hass, get_next_aid(), create_fanv2_service_with_min_step + ) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -618,9 +642,11 @@ async def test_v2_set_percentage_with_min_step(hass: HomeAssistant) -> None: ) -async def test_v2_speed_read(hass: HomeAssistant) -> None: +async def test_v2_speed_read( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, @@ -657,9 +683,11 @@ async def test_v2_speed_read(hass: HomeAssistant) -> None: assert state.attributes["percentage"] == 0 -async def test_v2_set_direction(hass: HomeAssistant) -> None: +async def test_v2_set_direction( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set fan spin direction.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await hass.services.async_call( "fan", @@ -688,9 +716,11 @@ async def test_v2_set_direction(hass: HomeAssistant) -> None: ) -async def test_v2_direction_read(hass: HomeAssistant) -> None: +async def test_v2_direction_read( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, {CharacteristicsTypes.ROTATION_DIRECTION: 0} @@ -703,9 +733,11 @@ async def test_v2_direction_read(hass: HomeAssistant) -> None: assert state.attributes["direction"] == "reverse" -async def test_v2_oscillate(hass: HomeAssistant) -> None: +async def test_v2_oscillate( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can control a fans oscillation.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await hass.services.async_call( "fan", @@ -734,9 +766,11 @@ async def test_v2_oscillate(hass: HomeAssistant) -> None: ) -async def test_v2_oscillate_read(hass: HomeAssistant) -> None: +async def test_v2_oscillate_read( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, {CharacteristicsTypes.SWING_MODE: 0} @@ -750,11 +784,11 @@ async def test_v2_oscillate_read(hass: HomeAssistant) -> None: async def test_v2_set_percentage_non_standard_rotation_range( - hass: HomeAssistant, + hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: """Test that we set fan speed with a non-standard rotation range.""" helper = await setup_test_component( - hass, create_fanv2_service_non_standard_rotation_range + hass, get_next_aid(), create_fanv2_service_non_standard_rotation_range ) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -813,7 +847,9 @@ async def test_v2_set_percentage_non_standard_rotation_range( async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a fan unique id.""" aid = get_next_aid() @@ -822,7 +858,9 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_fanv2_service_non_standard_rotation_range) + await setup_test_component( + hass, aid, create_fanv2_service_non_standard_rotation_range + ) assert ( entity_registry.async_get(fan_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_humidifier.py b/tests/components/homekit_controller/test_humidifier.py index 60c74be3949..a031086e93d 100644 --- a/tests/components/homekit_controller/test_humidifier.py +++ b/tests/components/homekit_controller/test_humidifier.py @@ -1,5 +1,7 @@ """Basic checks for HomeKit Humidifier/Dehumidifier.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -7,7 +9,7 @@ from homeassistant.components.humidifier import DOMAIN, MODE_AUTO, MODE_NORMAL from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_humidifier_service(accessory): @@ -64,9 +66,11 @@ def create_dehumidifier_service(accessory): return service -async def test_humidifier_active_state(hass: HomeAssistant) -> None: +async def test_humidifier_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit humidifier on and off again.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True @@ -87,9 +91,13 @@ async def test_humidifier_active_state(hass: HomeAssistant) -> None: ) -async def test_dehumidifier_active_state(hass: HomeAssistant) -> None: +async def test_dehumidifier_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit dehumidifier on and off again.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) await hass.services.async_call( DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True @@ -110,9 +118,11 @@ async def test_dehumidifier_active_state(hass: HomeAssistant) -> None: ) -async def test_humidifier_read_humidity(hass: HomeAssistant) -> None: +async def test_humidifier_read_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -149,9 +159,13 @@ async def test_humidifier_read_humidity(hass: HomeAssistant) -> None: assert state.state == "off" -async def test_dehumidifier_read_humidity(hass: HomeAssistant) -> None: +async def test_dehumidifier_read_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -186,9 +200,11 @@ async def test_dehumidifier_read_humidity(hass: HomeAssistant) -> None: assert state.attributes["humidity"] == 40 -async def test_humidifier_set_humidity(hass: HomeAssistant) -> None: +async def test_humidifier_set_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( DOMAIN, @@ -202,9 +218,13 @@ async def test_humidifier_set_humidity(hass: HomeAssistant) -> None: ) -async def test_dehumidifier_set_humidity(hass: HomeAssistant) -> None: +async def test_dehumidifier_set_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) await hass.services.async_call( DOMAIN, @@ -218,9 +238,11 @@ async def test_dehumidifier_set_humidity(hass: HomeAssistant) -> None: ) -async def test_humidifier_set_mode(hass: HomeAssistant) -> None: +async def test_humidifier_set_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the mode of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( DOMAIN, @@ -251,9 +273,13 @@ async def test_humidifier_set_mode(hass: HomeAssistant) -> None: ) -async def test_dehumidifier_set_mode(hass: HomeAssistant) -> None: +async def test_dehumidifier_set_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the mode of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) await hass.services.async_call( DOMAIN, @@ -284,9 +310,11 @@ async def test_dehumidifier_set_mode(hass: HomeAssistant) -> None: ) -async def test_humidifier_read_only_mode(hass: HomeAssistant) -> None: +async def test_humidifier_read_only_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) state = await helper.poll_and_get_state() assert state.attributes["mode"] == "normal" @@ -324,9 +352,13 @@ async def test_humidifier_read_only_mode(hass: HomeAssistant) -> None: assert state.attributes["mode"] == "normal" -async def test_dehumidifier_read_only_mode(hass: HomeAssistant) -> None: +async def test_dehumidifier_read_only_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) state = await helper.poll_and_get_state() assert state.attributes["mode"] == "normal" @@ -364,9 +396,11 @@ async def test_dehumidifier_read_only_mode(hass: HomeAssistant) -> None: assert state.attributes["mode"] == "normal" -async def test_humidifier_target_humidity_modes(hass: HomeAssistant) -> None: +async def test_humidifier_target_humidity_modes( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -409,9 +443,13 @@ async def test_humidifier_target_humidity_modes(hass: HomeAssistant) -> None: assert state.attributes["humidity"] == 37 -async def test_dehumidifier_target_humidity_modes(hass: HomeAssistant) -> None: +async def test_dehumidifier_target_humidity_modes( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -457,7 +495,9 @@ async def test_dehumidifier_target_humidity_modes(hass: HomeAssistant) -> None: async def test_migrate_entity_ids( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that we can migrate humidifier entity ids.""" aid = get_next_aid() @@ -467,7 +507,7 @@ async def test_migrate_entity_ids( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_humidifier_service) + await setup_test_component(hass, aid, create_humidifier_service) assert ( entity_registry.async_get(humidifier_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_8" diff --git a/tests/components/homekit_controller/test_init.py b/tests/components/homekit_controller/test_init.py index 542d87d0b0e..c443e56b3a4 100644 --- a/tests/components/homekit_controller/test_init.py +++ b/tests/components/homekit_controller/test_init.py @@ -1,5 +1,6 @@ """Tests for homekit_controller init.""" +from collections.abc import Callable from datetime import timedelta import pathlib from unittest.mock import patch @@ -46,9 +47,11 @@ def create_motion_sensor_service(accessory): cur_state.value = 0 -async def test_unload_on_stop(hass: HomeAssistant) -> None: +async def test_unload_on_stop( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test async_unload is called on stop.""" - await setup_test_component(hass, create_motion_sensor_service) + await setup_test_component(hass, get_next_aid(), create_motion_sensor_service) with patch( "homeassistant.components.homekit_controller.HKDevice.async_unload" ) as async_unlock_mock: @@ -58,9 +61,13 @@ async def test_unload_on_stop(hass: HomeAssistant) -> None: assert async_unlock_mock.called -async def test_async_remove_entry(hass: HomeAssistant) -> None: +async def test_async_remove_entry( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test unpairing a component.""" - helper = await setup_test_component(hass, create_motion_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_motion_sensor_service + ) controller = helper.pairing.controller hkid = "00:00:00:00:00:00" @@ -88,10 +95,13 @@ async def test_device_remove_devices( device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, hass_ws_client: WebSocketGenerator, + get_next_aid: Callable[[], int], ) -> None: """Test we can only remove a device that no longer exists.""" assert await async_setup_component(hass, "config", {}) - helper: Helper = await setup_test_component(hass, create_alive_service) + helper: Helper = await setup_test_component( + hass, get_next_aid(), create_alive_service + ) config_entry = helper.config_entry entry_id = config_entry.entry_id @@ -110,10 +120,13 @@ async def test_device_remove_devices( assert response["success"] -async def test_offline_device_raises(hass: HomeAssistant, controller) -> None: +async def test_offline_device_raises( + hass: HomeAssistant, get_next_aid: Callable[[], int], controller +) -> None: """Test an offline device raises ConfigEntryNotReady.""" is_connected = False + aid = get_next_aid() class OfflineFakePairing(FakePairing): """Fake pairing that can flip is_connected.""" @@ -140,7 +153,7 @@ async def test_offline_device_raises(hass: HomeAssistant, controller) -> None: return {} accessory = Accessory.create_with_info( - "TestDevice", "example.com", "Test", "0001", "0.1" + aid, "TestDevice", "example.com", "Test", "0001", "0.1" ) create_alive_service(accessory) @@ -162,11 +175,12 @@ async def test_offline_device_raises(hass: HomeAssistant, controller) -> None: async def test_ble_device_only_checks_is_available( - hass: HomeAssistant, controller + hass: HomeAssistant, get_next_aid: Callable[[], int], controller ) -> None: """Test a BLE device only checks is_available.""" is_available = False + aid = get_next_aid() class FakeBLEPairing(FakePairing): """Fake BLE pairing that can flip is_available.""" @@ -197,7 +211,7 @@ async def test_ble_device_only_checks_is_available( return {} accessory = Accessory.create_with_info( - "TestDevice", "example.com", "Test", "0001", "0.1" + aid, "TestDevice", "example.com", "Test", "0001", "0.1" ) create_alive_service(accessory) @@ -273,12 +287,16 @@ async def test_snapshots( entry = asdict(entity_entry) entry.pop("id", None) entry.pop("device_id", None) + entry.pop("created_at", None) + entry.pop("modified_at", None) entities.append({"entry": entry, "state": state_dict}) device_dict = asdict(device) device_dict.pop("id", None) device_dict.pop("via_device_id", None) + device_dict.pop("created_at", None) + device_dict.pop("modified_at", None) devices.append({"device": device_dict, "entities": entities}) assert snapshot == devices diff --git a/tests/components/homekit_controller/test_light.py b/tests/components/homekit_controller/test_light.py index c2644735ecb..04f4d3f5e29 100644 --- a/tests/components/homekit_controller/test_light.py +++ b/tests/components/homekit_controller/test_light.py @@ -1,7 +1,11 @@ """Basic checks for HomeKitSwitch.""" +from collections.abc import Callable +from unittest import mock + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes +from aiohomekit.testing import FakeController from homeassistant.components.homekit_controller.const import KNOWN_DEVICES from homeassistant.components.light import ( @@ -13,7 +17,7 @@ from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component LIGHT_BULB_NAME = "TestDevice" LIGHT_BULB_ENTITY_ID = "light.testdevice" @@ -55,9 +59,13 @@ def create_lightbulb_service_with_color_temp(accessory): return service -async def test_switch_change_light_state(hass: HomeAssistant) -> None: +async def test_switch_change_light_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit light on and off again.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_hs) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_hs + ) await hass.services.async_call( "light", @@ -102,9 +110,13 @@ async def test_switch_change_light_state(hass: HomeAssistant) -> None: ) -async def test_switch_change_light_state_color_temp(hass: HomeAssistant) -> None: +async def test_switch_change_light_state_color_temp( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn change color_temp.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) await hass.services.async_call( "light", @@ -122,9 +134,11 @@ async def test_switch_change_light_state_color_temp(hass: HomeAssistant) -> None ) -async def test_switch_read_light_state_dimmer(hass: HomeAssistant) -> None: +async def test_switch_read_light_state_dimmer( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service) + helper = await setup_test_component(hass, get_next_aid(), create_lightbulb_service) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -157,9 +171,11 @@ async def test_switch_read_light_state_dimmer(hass: HomeAssistant) -> None: assert state.state == "off" -async def test_switch_push_light_state_dimmer(hass: HomeAssistant) -> None: +async def test_switch_push_light_state_dimmer( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service) + helper = await setup_test_component(hass, get_next_aid(), create_lightbulb_service) # Initial state is that the light is off state = hass.states.get(LIGHT_BULB_ENTITY_ID) @@ -185,9 +201,13 @@ async def test_switch_push_light_state_dimmer(hass: HomeAssistant) -> None: assert state.state == "off" -async def test_switch_read_light_state_hs(hass: HomeAssistant) -> None: +async def test_switch_read_light_state_hs( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_hs) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_hs + ) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -248,9 +268,13 @@ async def test_switch_read_light_state_hs(hass: HomeAssistant) -> None: assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 -async def test_switch_push_light_state_hs(hass: HomeAssistant) -> None: +async def test_switch_push_light_state_hs( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_hs) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_hs + ) # Initial state is that the light is off state = hass.states.get(LIGHT_BULB_ENTITY_ID) @@ -279,9 +303,13 @@ async def test_switch_push_light_state_hs(hass: HomeAssistant) -> None: assert state.state == "off" -async def test_switch_read_light_state_color_temp(hass: HomeAssistant) -> None: +async def test_switch_read_light_state_color_temp( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the color_temp of a light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -307,9 +335,13 @@ async def test_switch_read_light_state_color_temp(hass: HomeAssistant) -> None: assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 -async def test_switch_push_light_state_color_temp(hass: HomeAssistant) -> None: +async def test_switch_push_light_state_color_temp( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) # Initial state is that the light is off state = hass.states.get(LIGHT_BULB_ENTITY_ID) @@ -328,9 +360,13 @@ async def test_switch_push_light_state_color_temp(hass: HomeAssistant) -> None: assert state.attributes["color_temp"] == 400 -async def test_light_becomes_unavailable_but_recovers(hass: HomeAssistant) -> None: +async def test_light_becomes_unavailable_but_recovers( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test transition to and from unavailable state.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -338,7 +374,12 @@ async def test_light_becomes_unavailable_but_recovers(hass: HomeAssistant) -> No # Test device goes offline helper.pairing.available = False - state = await helper.poll_and_get_state() + with mock.patch.object( + FakeController, + "async_reachable", + return_value=False, + ): + state = await helper.poll_and_get_state() assert state.state == "unavailable" # Simulate that someone switched on the device in the real world not via HA @@ -356,9 +397,13 @@ async def test_light_becomes_unavailable_but_recovers(hass: HomeAssistant) -> No assert state.attributes["color_temp"] == 400 -async def test_light_unloaded_removed(hass: HomeAssistant) -> None: +async def test_light_unloaded_removed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test entity and HKDevice are correctly unloaded and removed.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -382,7 +427,9 @@ async def test_light_unloaded_removed(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a light unique id.""" aid = get_next_aid() @@ -391,7 +438,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_lightbulb_service_with_color_temp) + await setup_test_component(hass, aid, create_lightbulb_service_with_color_temp) assert ( entity_registry.async_get(light_entry.entity_id).unique_id @@ -400,7 +447,9 @@ async def test_migrate_unique_id( async def test_only_migrate_once( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we handle migration happening after an upgrade and than a downgrade and then an upgrade.""" aid = get_next_aid() @@ -414,7 +463,7 @@ async def test_only_migrate_once( "homekit_controller", f"00:00:00:00:00:00_{aid}_8", ) - await setup_test_component(hass, create_lightbulb_service_with_color_temp) + await setup_test_component(hass, aid, create_lightbulb_service_with_color_temp) assert ( entity_registry.async_get(old_light_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_lock.py b/tests/components/homekit_controller/test_lock.py index db248b82b1a..e56ca5fcffe 100644 --- a/tests/components/homekit_controller/test_lock.py +++ b/tests/components/homekit_controller/test_lock.py @@ -1,12 +1,14 @@ """Basic checks for HomeKitLock.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_lock_service(accessory): @@ -29,9 +31,11 @@ def create_lock_service(accessory): return service -async def test_switch_change_lock_state(hass: HomeAssistant) -> None: +async def test_switch_change_lock_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit lock on and off again.""" - helper = await setup_test_component(hass, create_lock_service) + helper = await setup_test_component(hass, get_next_aid(), create_lock_service) await hass.services.async_call( "lock", "lock", {"entity_id": "lock.testdevice"}, blocking=True @@ -54,9 +58,11 @@ async def test_switch_change_lock_state(hass: HomeAssistant) -> None: ) -async def test_switch_read_lock_state(hass: HomeAssistant) -> None: +async def test_switch_read_lock_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit lock accessory.""" - helper = await setup_test_component(hass, create_lock_service) + helper = await setup_test_component(hass, get_next_aid(), create_lock_service) state = await helper.async_update( ServicesTypes.LOCK_MECHANISM, @@ -119,7 +125,9 @@ async def test_switch_read_lock_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a lock unique id.""" aid = get_next_aid() @@ -128,7 +136,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_lock_service) + await setup_test_component(hass, aid, create_lock_service) assert ( entity_registry.async_get(lock_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_media_player.py b/tests/components/homekit_controller/test_media_player.py index 62a042ff7b9..a7f900217d7 100644 --- a/tests/components/homekit_controller/test_media_player.py +++ b/tests/components/homekit_controller/test_media_player.py @@ -1,5 +1,7 @@ """Basic checks for HomeKit motion sensors and contact sensors.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import ( CharacteristicPermissions, CharacteristicsTypes, @@ -10,7 +12,7 @@ import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_tv_service(accessory): @@ -62,9 +64,11 @@ def create_tv_service_with_target_media_state(accessory): return service -async def test_tv_read_state(hass: HomeAssistant) -> None: +async def test_tv_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit fan accessory.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) state = await helper.async_update( ServicesTypes.TELEVISION, @@ -91,18 +95,22 @@ async def test_tv_read_state(hass: HomeAssistant) -> None: assert state.state == "idle" -async def test_tv_read_sources(hass: HomeAssistant) -> None: +async def test_tv_read_sources( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the input source of a HomeKit TV.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) state = await helper.poll_and_get_state() assert state.attributes["source"] == "HDMI 1" assert state.attributes["source_list"] == ["HDMI 1", "HDMI 2"] -async def test_play_remote_key(hass: HomeAssistant) -> None: +async def test_play_remote_key( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can play media on a media player.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) await helper.async_update( ServicesTypes.TELEVISION, @@ -147,9 +155,11 @@ async def test_play_remote_key(hass: HomeAssistant) -> None: ) -async def test_pause_remote_key(hass: HomeAssistant) -> None: +async def test_pause_remote_key( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can pause a media player.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) await helper.async_update( ServicesTypes.TELEVISION, @@ -194,9 +204,11 @@ async def test_pause_remote_key(hass: HomeAssistant) -> None: ) -async def test_play(hass: HomeAssistant) -> None: +async def test_play(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can play media on a media player.""" - helper = await setup_test_component(hass, create_tv_service_with_target_media_state) + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) await helper.async_update( ServicesTypes.TELEVISION, @@ -243,9 +255,11 @@ async def test_play(hass: HomeAssistant) -> None: ) -async def test_pause(hass: HomeAssistant) -> None: +async def test_pause(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can turn pause a media player.""" - helper = await setup_test_component(hass, create_tv_service_with_target_media_state) + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) await helper.async_update( ServicesTypes.TELEVISION, @@ -291,9 +305,11 @@ async def test_pause(hass: HomeAssistant) -> None: ) -async def test_stop(hass: HomeAssistant) -> None: +async def test_stop(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can stop a media player.""" - helper = await setup_test_component(hass, create_tv_service_with_target_media_state) + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) await hass.services.async_call( "media_player", @@ -332,9 +348,11 @@ async def test_stop(hass: HomeAssistant) -> None: ) -async def test_tv_set_source(hass: HomeAssistant) -> None: +async def test_tv_set_source( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the input source of a HomeKit TV.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) await hass.services.async_call( "media_player", @@ -353,9 +371,11 @@ async def test_tv_set_source(hass: HomeAssistant) -> None: assert state.attributes["source"] == "HDMI 2" -async def test_tv_set_source_fail(hass: HomeAssistant) -> None: +async def test_tv_set_source_fail( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the input source of a HomeKit TV.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) with pytest.raises(ValueError): await hass.services.async_call( @@ -370,7 +390,9 @@ async def test_tv_set_source_fail(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a media_player unique id.""" aid = get_next_aid() @@ -379,7 +401,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_tv_service_with_target_media_state) + await setup_test_component(hass, aid, create_tv_service_with_target_media_state) assert ( entity_registry.async_get(media_player_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_number.py b/tests/components/homekit_controller/test_number.py index 96e2cbe8d4d..fcbcc3ca7a8 100644 --- a/tests/components/homekit_controller/test_number.py +++ b/tests/components/homekit_controller/test_number.py @@ -1,12 +1,14 @@ """Basic checks for HomeKit sensor.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import Helper, get_next_aid, setup_test_component +from .common import Helper, setup_test_component def create_switch_with_spray_level(accessory): @@ -31,7 +33,9 @@ def create_switch_with_spray_level(accessory): async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a number unique id.""" aid = get_next_aid() @@ -41,7 +45,7 @@ async def test_migrate_unique_id( f"homekit-0001-aid:{aid}-sid:8-cid:9", suggested_object_id="testdevice_spray_quantity", ) - await setup_test_component(hass, create_switch_with_spray_level) + await setup_test_component(hass, aid, create_switch_with_spray_level) assert ( entity_registry.async_get(number.entity_id).unique_id @@ -49,9 +53,13 @@ async def test_migrate_unique_id( ) -async def test_read_number(hass: HomeAssistant) -> None: +async def test_read_number( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a switch service that has a sensor characteristic is correctly handled.""" - helper = await setup_test_component(hass, create_switch_with_spray_level) + helper = await setup_test_component( + hass, get_next_aid(), create_switch_with_spray_level + ) # Helper will be for the primary entity, which is the outlet. Make a helper for the sensor. spray_level = Helper( @@ -75,9 +83,13 @@ async def test_read_number(hass: HomeAssistant) -> None: assert state.state == "5" -async def test_write_number(hass: HomeAssistant) -> None: +async def test_write_number( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a switch service that has a sensor characteristic is correctly handled.""" - helper = await setup_test_component(hass, create_switch_with_spray_level) + helper = await setup_test_component( + hass, get_next_aid(), create_switch_with_spray_level + ) # Helper will be for the primary entity, which is the outlet. Make a helper for the sensor. spray_level = Helper( diff --git a/tests/components/homekit_controller/test_select.py b/tests/components/homekit_controller/test_select.py index b00206e1b0d..cd9357b78d9 100644 --- a/tests/components/homekit_controller/test_select.py +++ b/tests/components/homekit_controller/test_select.py @@ -1,5 +1,7 @@ """Basic checks for HomeKit select entities.""" +from collections.abc import Callable + from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.characteristics.const import TemperatureDisplayUnits @@ -8,7 +10,7 @@ from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import Helper, get_next_aid, setup_test_component +from .common import Helper, setup_test_component def create_service_with_ecobee_mode(accessory: Accessory): @@ -35,7 +37,9 @@ def create_service_with_temperature_units(accessory: Accessory): async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test we can migrate a select unique id.""" aid = get_next_aid() @@ -46,7 +50,7 @@ async def test_migrate_unique_id( suggested_object_id="testdevice_current_mode", ) - await setup_test_component(hass, create_service_with_ecobee_mode) + await setup_test_component(hass, aid, create_service_with_ecobee_mode) assert ( entity_registry.async_get(select.entity_id).unique_id @@ -54,9 +58,13 @@ async def test_migrate_unique_id( ) -async def test_read_current_mode(hass: HomeAssistant) -> None: +async def test_read_current_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that Ecobee mode can be correctly read and show as human readable text.""" - helper = await setup_test_component(hass, create_service_with_ecobee_mode) + helper = await setup_test_component( + hass, get_next_aid(), create_service_with_ecobee_mode + ) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. ecobee_mode = Helper( @@ -92,9 +100,13 @@ async def test_read_current_mode(hass: HomeAssistant) -> None: assert state.state == "away" -async def test_write_current_mode(hass: HomeAssistant) -> None: +async def test_write_current_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test can set a specific mode.""" - helper = await setup_test_component(hass, create_service_with_ecobee_mode) + helper = await setup_test_component( + hass, get_next_aid(), create_service_with_ecobee_mode + ) helper.accessory.services.first(service_type=ServicesTypes.THERMOSTAT) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. @@ -140,9 +152,13 @@ async def test_write_current_mode(hass: HomeAssistant) -> None: ) -async def test_read_select(hass: HomeAssistant) -> None: +async def test_read_select( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test the generic select can read the current value.""" - helper = await setup_test_component(hass, create_service_with_temperature_units) + helper = await setup_test_component( + hass, get_next_aid(), create_service_with_temperature_units + ) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. select_entity = Helper( @@ -170,9 +186,13 @@ async def test_read_select(hass: HomeAssistant) -> None: assert state.state == "fahrenheit" -async def test_write_select(hass: HomeAssistant) -> None: +async def test_write_select( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test can set a value.""" - helper = await setup_test_component(hass, create_service_with_temperature_units) + helper = await setup_test_component( + hass, get_next_aid(), create_service_with_temperature_units + ) helper.accessory.services.first(service_type=ServicesTypes.THERMOSTAT) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. diff --git a/tests/components/homekit_controller/test_sensor.py b/tests/components/homekit_controller/test_sensor.py index 461d62742a5..ad896395e75 100644 --- a/tests/components/homekit_controller/test_sensor.py +++ b/tests/components/homekit_controller/test_sensor.py @@ -1,5 +1,6 @@ """Basic checks for HomeKit sensor.""" +from collections.abc import Callable from unittest.mock import patch from aiohomekit.model import Transport @@ -71,10 +72,12 @@ def create_battery_level_sensor(accessory): return service -async def test_temperature_sensor_read_state(hass: HomeAssistant) -> None: +async def test_temperature_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit temperature sensor accessory.""" helper = await setup_test_component( - hass, create_temperature_sensor_service, suffix="temperature" + hass, get_next_aid(), create_temperature_sensor_service, suffix="temperature" ) state = await helper.async_update( @@ -97,10 +100,12 @@ async def test_temperature_sensor_read_state(hass: HomeAssistant) -> None: assert state.attributes["state_class"] == SensorStateClass.MEASUREMENT -async def test_temperature_sensor_not_added_twice(hass: HomeAssistant) -> None: +async def test_temperature_sensor_not_added_twice( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """A standalone temperature sensor should not get a characteristic AND a service entity.""" helper = await setup_test_component( - hass, create_temperature_sensor_service, suffix="temperature" + hass, get_next_aid(), create_temperature_sensor_service, suffix="temperature" ) created_sensors = set() @@ -111,10 +116,12 @@ async def test_temperature_sensor_not_added_twice(hass: HomeAssistant) -> None: assert created_sensors == {helper.entity_id} -async def test_humidity_sensor_read_state(hass: HomeAssistant) -> None: +async def test_humidity_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit humidity sensor accessory.""" helper = await setup_test_component( - hass, create_humidity_sensor_service, suffix="humidity" + hass, get_next_aid(), create_humidity_sensor_service, suffix="humidity" ) state = await helper.async_update( @@ -136,10 +143,12 @@ async def test_humidity_sensor_read_state(hass: HomeAssistant) -> None: assert state.attributes["device_class"] == SensorDeviceClass.HUMIDITY -async def test_light_level_sensor_read_state(hass: HomeAssistant) -> None: +async def test_light_level_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit temperature sensor accessory.""" helper = await setup_test_component( - hass, create_light_level_sensor_service, suffix="light_level" + hass, get_next_aid(), create_light_level_sensor_service, suffix="light_level" ) state = await helper.async_update( @@ -161,10 +170,15 @@ async def test_light_level_sensor_read_state(hass: HomeAssistant) -> None: assert state.attributes["device_class"] == SensorDeviceClass.ILLUMINANCE -async def test_carbon_dioxide_level_sensor_read_state(hass: HomeAssistant) -> None: +async def test_carbon_dioxide_level_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit carbon dioxide sensor accessory.""" helper = await setup_test_component( - hass, create_carbon_dioxide_level_sensor_service, suffix="carbon_dioxide" + hass, + get_next_aid(), + create_carbon_dioxide_level_sensor_service, + suffix="carbon_dioxide", ) state = await helper.async_update( @@ -184,10 +198,12 @@ async def test_carbon_dioxide_level_sensor_read_state(hass: HomeAssistant) -> No assert state.state == "20" -async def test_battery_level_sensor(hass: HomeAssistant) -> None: +async def test_battery_level_sensor( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit battery level sensor.""" helper = await setup_test_component( - hass, create_battery_level_sensor, suffix="battery" + hass, get_next_aid(), create_battery_level_sensor, suffix="battery" ) state = await helper.async_update( @@ -211,10 +227,12 @@ async def test_battery_level_sensor(hass: HomeAssistant) -> None: assert state.attributes["device_class"] == SensorDeviceClass.BATTERY -async def test_battery_charging(hass: HomeAssistant) -> None: +async def test_battery_charging( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit battery's charging state.""" helper = await setup_test_component( - hass, create_battery_level_sensor, suffix="battery" + hass, get_next_aid(), create_battery_level_sensor, suffix="battery" ) state = await helper.async_update( @@ -235,10 +253,12 @@ async def test_battery_charging(hass: HomeAssistant) -> None: assert state.attributes["icon"] == "mdi:battery-charging-20" -async def test_battery_low(hass: HomeAssistant) -> None: +async def test_battery_low( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit battery's low state.""" helper = await setup_test_component( - hass, create_battery_level_sensor, suffix="battery" + hass, get_next_aid(), create_battery_level_sensor, suffix="battery" ) state = await helper.async_update( @@ -277,9 +297,11 @@ def create_switch_with_sensor(accessory): return service -async def test_switch_with_sensor(hass: HomeAssistant) -> None: +async def test_switch_with_sensor( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a switch service that has a sensor characteristic is correctly handled.""" - helper = await setup_test_component(hass, create_switch_with_sensor) + helper = await setup_test_component(hass, get_next_aid(), create_switch_with_sensor) # Helper will be for the primary entity, which is the outlet. Make a helper for the sensor. energy_helper = Helper( @@ -307,9 +329,11 @@ async def test_switch_with_sensor(hass: HomeAssistant) -> None: assert state.state == "50" -async def test_sensor_unavailable(hass: HomeAssistant) -> None: +async def test_sensor_unavailable( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a sensor becoming unavailable.""" - helper = await setup_test_component(hass, create_switch_with_sensor) + helper = await setup_test_component(hass, get_next_aid(), create_switch_with_sensor) outlet = helper.accessory.services.first(service_type=ServicesTypes.OUTLET) on_char = outlet[CharacteristicsTypes.ON] @@ -383,7 +407,9 @@ def test_thread_status_to_str() -> None: @pytest.mark.usefixtures("enable_bluetooth", "entity_registry_enabled_by_default") -async def test_rssi_sensor(hass: HomeAssistant) -> None: +async def test_rssi_sensor( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test an rssi sensor.""" inject_bluetooth_service_info(hass, TEST_DEVICE_SERVICE_INFO) @@ -398,14 +424,20 @@ async def test_rssi_sensor(hass: HomeAssistant) -> None: # Any accessory will do for this test, but we need at least # one or the rssi sensor will not be created await setup_test_component( - hass, create_battery_level_sensor, suffix="battery", connection="BLE" + hass, + get_next_aid(), + create_battery_level_sensor, + suffix="battery", + connection="BLE", ) assert hass.states.get("sensor.testdevice_signal_strength").state == "-56" @pytest.mark.usefixtures("enable_bluetooth", "entity_registry_enabled_by_default") async def test_migrate_rssi_sensor_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test an rssi sensor unique id migration.""" rssi_sensor = entity_registry.async_get_or_create( @@ -428,7 +460,11 @@ async def test_migrate_rssi_sensor_unique_id( # Any accessory will do for this test, but we need at least # one or the rssi sensor will not be created await setup_test_component( - hass, create_battery_level_sensor, suffix="battery", connection="BLE" + hass, + get_next_aid(), + create_battery_level_sensor, + suffix="battery", + connection="BLE", ) assert hass.states.get("sensor.renamed_rssi").state == "-56" diff --git a/tests/components/homekit_controller/test_storage.py b/tests/components/homekit_controller/test_storage.py index 9523dc9abb7..ab7d7afd6fe 100644 --- a/tests/components/homekit_controller/test_storage.py +++ b/tests/components/homekit_controller/test_storage.py @@ -1,5 +1,6 @@ """Basic checks for entity map storage.""" +from collections.abc import Callable from typing import Any from aiohomekit.model.characteristics import CharacteristicsTypes @@ -72,10 +73,10 @@ def create_lightbulb_service(accessory): async def test_storage_is_updated_on_add( - hass: HomeAssistant, hass_storage: dict[str, Any] + hass: HomeAssistant, hass_storage: dict[str, Any], get_next_aid: Callable[[], int] ) -> None: """Test entity map storage is cleaned up on adding an accessory.""" - await setup_test_component(hass, create_lightbulb_service) + await setup_test_component(hass, get_next_aid(), create_lightbulb_service) entity_map: EntityMapStorage = hass.data[ENTITY_MAP] hkid = "00:00:00:00:00:00" diff --git a/tests/components/homekit_controller/test_switch.py b/tests/components/homekit_controller/test_switch.py index 8a6b2a65e88..1fc49c5c636 100644 --- a/tests/components/homekit_controller/test_switch.py +++ b/tests/components/homekit_controller/test_switch.py @@ -1,5 +1,7 @@ """Basic checks for HomeKitSwitch.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import ( CharacteristicsTypes, InUseValues, @@ -10,7 +12,7 @@ from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_switch_service(accessory): @@ -50,9 +52,11 @@ def create_char_switch_service(accessory): on_char.value = False -async def test_switch_change_outlet_state(hass: HomeAssistant) -> None: +async def test_switch_change_outlet_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit outlet on and off again.""" - helper = await setup_test_component(hass, create_switch_service) + helper = await setup_test_component(hass, get_next_aid(), create_switch_service) await hass.services.async_call( "switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True @@ -75,9 +79,11 @@ async def test_switch_change_outlet_state(hass: HomeAssistant) -> None: ) -async def test_switch_read_outlet_state(hass: HomeAssistant) -> None: +async def test_switch_read_outlet_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit outlet accessory.""" - helper = await setup_test_component(hass, create_switch_service) + helper = await setup_test_component(hass, get_next_aid(), create_switch_service) # Initial state is that the switch is off and the outlet isn't in use switch_1 = await helper.poll_and_get_state() @@ -108,9 +114,11 @@ async def test_switch_read_outlet_state(hass: HomeAssistant) -> None: assert switch_1.attributes["outlet_in_use"] is True -async def test_valve_change_active_state(hass: HomeAssistant) -> None: +async def test_valve_change_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a valve on and off again.""" - helper = await setup_test_component(hass, create_valve_service) + helper = await setup_test_component(hass, get_next_aid(), create_valve_service) await hass.services.async_call( "switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True @@ -133,9 +141,11 @@ async def test_valve_change_active_state(hass: HomeAssistant) -> None: ) -async def test_valve_read_state(hass: HomeAssistant) -> None: +async def test_valve_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a valve accessory.""" - helper = await setup_test_component(hass, create_valve_service) + helper = await setup_test_component(hass, get_next_aid(), create_valve_service) # Initial state is that the switch is off and the outlet isn't in use switch_1 = await helper.poll_and_get_state() @@ -166,10 +176,12 @@ async def test_valve_read_state(hass: HomeAssistant) -> None: assert switch_1.attributes["in_use"] is False -async def test_char_switch_change_state(hass: HomeAssistant) -> None: +async def test_char_switch_change_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a characteristic on and off again.""" helper = await setup_test_component( - hass, create_char_switch_service, suffix="pairing_mode" + hass, get_next_aid(), create_char_switch_service, suffix="pairing_mode" ) await hass.services.async_call( @@ -199,10 +211,12 @@ async def test_char_switch_change_state(hass: HomeAssistant) -> None: ) -async def test_char_switch_read_state(hass: HomeAssistant) -> None: +async def test_char_switch_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit characteristic switch.""" helper = await setup_test_component( - hass, create_char_switch_service, suffix="pairing_mode" + hass, get_next_aid(), create_char_switch_service, suffix="pairing_mode" ) # Simulate that someone switched on the device in the real world not via HA @@ -221,7 +235,9 @@ async def test_char_switch_read_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a switch unique id.""" aid = get_next_aid() @@ -235,7 +251,9 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-0001-aid:{aid}-sid:8-cid:9", ) - await setup_test_component(hass, create_char_switch_service, suffix="pairing_mode") + await setup_test_component( + hass, aid, create_char_switch_service, suffix="pairing_mode" + ) assert ( entity_registry.async_get(switch_entry.entity_id).unique_id diff --git a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json index 922601ca733..e67ffd78467 100644 --- a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json +++ b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json @@ -7347,6 +7347,551 @@ "serializedGlobalTradeItemNumber": "3014F7110000000000000DLD", "type": "DOOR_LOCK_DRIVE", "updateState": "UP_TO_DATE" + }, + "3014F7110000000000ESIGAS": { + "availableFirmwareVersion": "1.2.2", + "connectionType": "HMIP_RF", + "deviceArchetype": "HMIP", + "firmwareVersion": "1.2.2", + "firmwareVersionInteger": 66050, + "functionalChannels": { + "0": { + "busConfigMismatch": null, + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "controlsMountingOrientation": null, + "daliBusState": null, + "defaultLinkedGroup": [], + "deviceCommunicationError": null, + "deviceDriveError": null, + "deviceDriveModeError": null, + "deviceId": "3014F7110000000000ESIGAS", + "deviceOperationMode": null, + "deviceOverheated": false, + "deviceOverloaded": false, + "devicePowerFailureDetected": false, + "deviceUndervoltage": false, + "displayContrast": null, + "dutyCycle": false, + "functionalChannelType": "DEVICE_BASE", + "groupIndex": 0, + "groups": ["00000000-0000-0000-0000-000000000047"], + "index": 0, + "label": "", + "lockJammed": null, + "lowBat": false, + "mountingOrientation": null, + "multicastRoutingEnabled": false, + "particulateMatterSensorCommunicationError": null, + "particulateMatterSensorError": null, + "powerShortCircuit": null, + "profilePeriodLimitReached": null, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -73, + "rssiPeerValue": null, + "sensorCommunicationError": false, + "sensorError": false, + "shortCircuitDataLine": null, + "supportedOptionalFeatures": { + "IFeatureBusConfigMismatch": false, + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceCommunicationError": false, + "IFeatureDeviceDaliBusError": false, + "IFeatureDeviceDriveError": false, + "IFeatureDeviceDriveModeError": false, + "IFeatureDeviceIdentify": false, + "IFeatureDeviceOverheated": false, + "IFeatureDeviceOverloaded": false, + "IFeatureDeviceParticulateMatterSensorCommunicationError": false, + "IFeatureDeviceParticulateMatterSensorError": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceSensorCommunicationError": true, + "IFeatureDeviceSensorError": true, + "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, + "IFeatureDeviceTemperatureHumiditySensorError": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false, + "IFeatureMulticastRouter": false, + "IFeaturePowerShortCircuit": false, + "IFeatureProfilePeriodLimit": false, + "IFeatureRssiValue": true, + "IFeatureShortCircuitDataLine": false, + "IOptionalFeatureDefaultLinkedGroup": false, + "IOptionalFeatureDeviceErrorLockJammed": false, + "IOptionalFeatureDeviceOperationMode": false, + "IOptionalFeatureDisplayContrast": false, + "IOptionalFeatureDutyCycle": true, + "IOptionalFeatureLowBat": true, + "IOptionalFeatureMountingOrientation": false + }, + "temperatureHumiditySensorCommunicationError": null, + "temperatureHumiditySensorError": null, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "channelRole": "ENERGY_SENSOR", + "connectedEnergySensorType": "ES_GAS", + "currentGasFlow": 1.03, + "currentPowerConsumption": null, + "deviceId": "3014F7110000000000ESIGAS", + "energyCounterOne": null, + "energyCounterOneType": "UNKNOWN", + "energyCounterThree": null, + "energyCounterThreeType": "UNKNOWN", + "energyCounterTwo": null, + "energyCounterTwoType": "UNKNOWN", + "functionalChannelType": "ENERGY_SENSORS_INTERFACE_CHANNEL", + "gasVolume": 1019.26, + "gasVolumePerImpulse": 0.01, + "groupIndex": 1, + "groups": ["00000000-0000-0000-0000-000000000077"], + "impulsesPerKWH": 10000, + "index": 1, + "label": "", + "supportedOptionalFeatures": { + "IOptionalFeatureCounterOffset": true, + "IOptionalFeatureCurrentGasFlow": true, + "IOptionalFeatureCurrentPowerConsumption": false, + "IOptionalFeatureEnergyCounterOne": false, + "IOptionalFeatureEnergyCounterThree": false, + "IOptionalFeatureEnergyCounterTwo": false, + "IOptionalFeatureGasVolume": true, + "IOptionalFeatureGasVolumePerImpulse": true, + "IOptionalFeatureImpulsesPerKWH": false + } + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F7110000000000ESIGAS", + "label": "esi_gas", + "lastStatusUpdate": 1708880308351, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manuallyUpdateForced": false, + "manufacturerCode": 1, + "measuredAttributes": { + "1": { + "currentGasFlow": true, + "gasVolume": true + } + }, + "modelId": 509, + "modelType": "HmIP-ESI", + "oem": "eQ-3", + "permanentlyReachable": false, + "serializedGlobalTradeItemNumber": "3014F7110000000000ESIGAS", + "type": "ENERGY_SENSORS_INTERFACE", + "updateState": "UP_TO_DATE" + }, + "3014F7110000000000ESIIEC": { + "availableFirmwareVersion": "0.0.0", + "connectionType": "HMIP_RF", + "deviceArchetype": "HMIP", + "firmwareVersion": "1.0.6", + "firmwareVersionInteger": 65542, + "functionalChannels": { + "0": { + "busConfigMismatch": null, + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "controlsMountingOrientation": null, + "daliBusState": null, + "defaultLinkedGroup": [], + "deviceCommunicationError": null, + "deviceDriveError": null, + "deviceDriveModeError": null, + "deviceId": "3014F7110000000000ESIIEC", + "deviceOperationMode": null, + "deviceOverheated": false, + "deviceOverloaded": false, + "devicePowerFailureDetected": false, + "deviceUndervoltage": false, + "displayContrast": null, + "dutyCycle": false, + "functionalChannelType": "DEVICE_BASE", + "groupIndex": 0, + "groups": ["00000000-0000-0000-0000-000000000031"], + "index": 0, + "label": "", + "lockJammed": null, + "lowBat": false, + "mountingOrientation": null, + "multicastRoutingEnabled": false, + "particulateMatterSensorCommunicationError": null, + "particulateMatterSensorError": null, + "powerShortCircuit": null, + "profilePeriodLimitReached": null, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -94, + "rssiPeerValue": null, + "sensorCommunicationError": false, + "sensorError": true, + "shortCircuitDataLine": null, + "supportedOptionalFeatures": { + "IFeatureBusConfigMismatch": false, + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceCommunicationError": false, + "IFeatureDeviceDaliBusError": false, + "IFeatureDeviceDriveError": false, + "IFeatureDeviceDriveModeError": false, + "IFeatureDeviceIdentify": false, + "IFeatureDeviceOverheated": false, + "IFeatureDeviceOverloaded": false, + "IFeatureDeviceParticulateMatterSensorCommunicationError": false, + "IFeatureDeviceParticulateMatterSensorError": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceSensorCommunicationError": true, + "IFeatureDeviceSensorError": true, + "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, + "IFeatureDeviceTemperatureHumiditySensorError": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false, + "IFeatureMulticastRouter": false, + "IFeaturePowerShortCircuit": false, + "IFeatureProfilePeriodLimit": false, + "IFeatureRssiValue": true, + "IFeatureShortCircuitDataLine": false, + "IOptionalFeatureDefaultLinkedGroup": false, + "IOptionalFeatureDeviceErrorLockJammed": false, + "IOptionalFeatureDeviceOperationMode": false, + "IOptionalFeatureDisplayContrast": false, + "IOptionalFeatureDutyCycle": true, + "IOptionalFeatureLowBat": true, + "IOptionalFeatureMountingOrientation": false + }, + "temperatureHumiditySensorCommunicationError": null, + "temperatureHumiditySensorError": null, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "channelRole": null, + "connectedEnergySensorType": "ES_IEC", + "currentGasFlow": null, + "currentPowerConsumption": 432, + "deviceId": "3014F7110000000000ESIIEC", + "energyCounterOne": 194.0, + "energyCounterOneType": "ENERGY_COUNTER_USAGE_HIGH_TARIFF", + "energyCounterThree": 3.0, + "energyCounterThreeType": "ENERGY_COUNTER_INPUT_SINGLE_TARIFF", + "energyCounterTwo": 0.0, + "energyCounterTwoType": "ENERGY_COUNTER_USAGE_LOW_TARIFF", + "functionalChannelType": "ENERGY_SENSORS_INTERFACE_CHANNEL", + "gasVolume": null, + "gasVolumePerImpulse": 0.01, + "groupIndex": 1, + "groups": ["00000000-0000-0000-0000-000000000051"], + "impulsesPerKWH": 10000, + "index": 1, + "label": "", + "supportedOptionalFeatures": { + "IOptionalFeatureCounterOffset": false, + "IOptionalFeatureCurrentGasFlow": false, + "IOptionalFeatureCurrentPowerConsumption": true, + "IOptionalFeatureEnergyCounterOne": true, + "IOptionalFeatureEnergyCounterThree": true, + "IOptionalFeatureEnergyCounterTwo": true, + "IOptionalFeatureGasVolume": false, + "IOptionalFeatureGasVolumePerImpulse": false, + "IOptionalFeatureImpulsesPerKWH": false + } + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F7110000000000ESIIEC", + "label": "esi_iec", + "lastStatusUpdate": 1702420986697, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manuallyUpdateForced": false, + "manufacturerCode": 1, + "measuredAttributes": {}, + "modelId": 509, + "modelType": "HmIP-ESI", + "oem": "eQ-3", + "permanentlyReachable": false, + "serializedGlobalTradeItemNumber": "3014F7110000000000ESIIEC", + "type": "ENERGY_SENSORS_INTERFACE", + "updateState": "UP_TO_DATE" + }, + "3014F711000000000ESIIEC2": { + "availableFirmwareVersion": "0.0.0", + "connectionType": "HMIP_RF", + "deviceArchetype": "HMIP", + "firmwareVersion": "1.0.6", + "firmwareVersionInteger": 65542, + "functionalChannels": { + "0": { + "busConfigMismatch": null, + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "controlsMountingOrientation": null, + "daliBusState": null, + "defaultLinkedGroup": [], + "deviceCommunicationError": null, + "deviceDriveError": null, + "deviceDriveModeError": null, + "deviceId": "3014F711000000000ESIIEC2", + "deviceOperationMode": null, + "deviceOverheated": false, + "deviceOverloaded": false, + "devicePowerFailureDetected": false, + "deviceUndervoltage": false, + "displayContrast": null, + "dutyCycle": false, + "functionalChannelType": "DEVICE_BASE", + "groupIndex": 0, + "groups": ["00000000-0000-0000-0000-000000000031"], + "index": 0, + "label": "", + "lockJammed": null, + "lowBat": false, + "mountingOrientation": null, + "multicastRoutingEnabled": false, + "particulateMatterSensorCommunicationError": null, + "particulateMatterSensorError": null, + "powerShortCircuit": null, + "profilePeriodLimitReached": null, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -94, + "rssiPeerValue": null, + "sensorCommunicationError": false, + "sensorError": true, + "shortCircuitDataLine": null, + "supportedOptionalFeatures": { + "IFeatureBusConfigMismatch": false, + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceCommunicationError": false, + "IFeatureDeviceDaliBusError": false, + "IFeatureDeviceDriveError": false, + "IFeatureDeviceDriveModeError": false, + "IFeatureDeviceIdentify": false, + "IFeatureDeviceOverheated": false, + "IFeatureDeviceOverloaded": false, + "IFeatureDeviceParticulateMatterSensorCommunicationError": false, + "IFeatureDeviceParticulateMatterSensorError": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceSensorCommunicationError": true, + "IFeatureDeviceSensorError": true, + "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, + "IFeatureDeviceTemperatureHumiditySensorError": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false, + "IFeatureMulticastRouter": false, + "IFeaturePowerShortCircuit": false, + "IFeatureProfilePeriodLimit": false, + "IFeatureRssiValue": true, + "IFeatureShortCircuitDataLine": false, + "IOptionalFeatureDefaultLinkedGroup": false, + "IOptionalFeatureDeviceErrorLockJammed": false, + "IOptionalFeatureDeviceOperationMode": false, + "IOptionalFeatureDisplayContrast": false, + "IOptionalFeatureDutyCycle": true, + "IOptionalFeatureLowBat": true, + "IOptionalFeatureMountingOrientation": false + }, + "temperatureHumiditySensorCommunicationError": null, + "temperatureHumiditySensorError": null, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "channelRole": null, + "connectedEnergySensorType": "ES_IEC", + "currentGasFlow": null, + "currentPowerConsumption": 432, + "deviceId": "3014F711000000000ESIIEC2", + "energyCounterOne": 194.0, + "energyCounterOneType": "ENERGY_COUNTER_USAGE_HIGH_TARIFF", + "energyCounterThree": 3.0, + "energyCounterThreeType": "UNKNOWN", + "energyCounterTwo": 0.0, + "energyCounterTwoType": "ENERGY_COUNTER_USAGE_LOW_TARIFF", + "functionalChannelType": "ENERGY_SENSORS_INTERFACE_CHANNEL", + "gasVolume": null, + "gasVolumePerImpulse": 0.01, + "groupIndex": 1, + "groups": ["00000000-0000-0000-0000-000000000051"], + "impulsesPerKWH": 10000, + "index": 1, + "label": "", + "supportedOptionalFeatures": { + "IOptionalFeatureCounterOffset": false, + "IOptionalFeatureCurrentGasFlow": false, + "IOptionalFeatureCurrentPowerConsumption": true, + "IOptionalFeatureEnergyCounterOne": true, + "IOptionalFeatureEnergyCounterThree": true, + "IOptionalFeatureEnergyCounterTwo": true, + "IOptionalFeatureGasVolume": false, + "IOptionalFeatureGasVolumePerImpulse": false, + "IOptionalFeatureImpulsesPerKWH": false + } + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F711000000000ESIIEC2", + "label": "esi_iec2", + "lastStatusUpdate": 1702420986697, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manuallyUpdateForced": false, + "manufacturerCode": 1, + "measuredAttributes": {}, + "modelId": 509, + "modelType": "HmIP-ESI", + "oem": "eQ-3", + "permanentlyReachable": false, + "serializedGlobalTradeItemNumber": "3014F711000000000ESIIEC2", + "type": "ENERGY_SENSORS_INTERFACE", + "updateState": "UP_TO_DATE" + }, + "3014F7110000000000ESIIE3": { + "availableFirmwareVersion": "0.0.0", + "connectionType": "HMIP_RF", + "deviceArchetype": "HMIP", + "firmwareVersion": "1.0.6", + "firmwareVersionInteger": 65542, + "functionalChannels": { + "0": { + "busConfigMismatch": null, + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "controlsMountingOrientation": null, + "daliBusState": null, + "defaultLinkedGroup": [], + "deviceCommunicationError": null, + "deviceDriveError": null, + "deviceDriveModeError": null, + "deviceId": "3014F7110000000000ESIIE3", + "deviceOperationMode": null, + "deviceOverheated": false, + "deviceOverloaded": false, + "devicePowerFailureDetected": false, + "deviceUndervoltage": false, + "displayContrast": null, + "dutyCycle": false, + "functionalChannelType": "DEVICE_BASE", + "groupIndex": 0, + "groups": ["00000000-0000-0000-0000-000000000031"], + "index": 0, + "label": "", + "lockJammed": null, + "lowBat": false, + "mountingOrientation": null, + "multicastRoutingEnabled": false, + "particulateMatterSensorCommunicationError": null, + "particulateMatterSensorError": null, + "powerShortCircuit": null, + "profilePeriodLimitReached": null, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -94, + "rssiPeerValue": null, + "sensorCommunicationError": false, + "sensorError": true, + "shortCircuitDataLine": null, + "supportedOptionalFeatures": { + "IFeatureBusConfigMismatch": false, + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceCommunicationError": false, + "IFeatureDeviceDaliBusError": false, + "IFeatureDeviceDriveError": false, + "IFeatureDeviceDriveModeError": false, + "IFeatureDeviceIdentify": false, + "IFeatureDeviceOverheated": false, + "IFeatureDeviceOverloaded": false, + "IFeatureDeviceParticulateMatterSensorCommunicationError": false, + "IFeatureDeviceParticulateMatterSensorError": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceSensorCommunicationError": true, + "IFeatureDeviceSensorError": true, + "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, + "IFeatureDeviceTemperatureHumiditySensorError": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false, + "IFeatureMulticastRouter": false, + "IFeaturePowerShortCircuit": false, + "IFeatureProfilePeriodLimit": false, + "IFeatureRssiValue": true, + "IFeatureShortCircuitDataLine": false, + "IOptionalFeatureDefaultLinkedGroup": false, + "IOptionalFeatureDeviceErrorLockJammed": false, + "IOptionalFeatureDeviceOperationMode": false, + "IOptionalFeatureDisplayContrast": false, + "IOptionalFeatureDutyCycle": true, + "IOptionalFeatureLowBat": true, + "IOptionalFeatureMountingOrientation": false + }, + "temperatureHumiditySensorCommunicationError": null, + "temperatureHumiditySensorError": null, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "channelRole": "ENERGY_SENSOR", + "connectedEnergySensorType": "ES_LED", + "currentGasFlow": null, + "currentPowerConsumption": 189.15, + "deviceId": "3014F7110000000000ESIIE3", + "energyCounterOne": 23825.748, + "energyCounterOneType": "UNKNOWN", + "energyCounterThree": null, + "energyCounterThreeType": "UNKNOWN", + "energyCounterTwo": null, + "energyCounterTwoType": "UNKNOWN", + "functionalChannelType": "ENERGY_SENSORS_INTERFACE_CHANNEL", + "gasVolume": null, + "gasVolumePerImpulse": 0.01, + "groupIndex": 1, + "groups": ["00000000-0000-0000-0000-000000000057"], + "impulsesPerKWH": 1000, + "index": 1, + "label": "", + "supportedOptionalFeatures": { + "IOptionalFeatureCounterOffset": true, + "IOptionalFeatureCurrentGasFlow": false, + "IOptionalFeatureCurrentPowerConsumption": true, + "IOptionalFeatureEnergyCounterOne": true, + "IOptionalFeatureEnergyCounterThree": false, + "IOptionalFeatureEnergyCounterTwo": false, + "IOptionalFeatureGasVolume": false, + "IOptionalFeatureGasVolumePerImpulse": false, + "IOptionalFeatureImpulsesPerKWH": true + } + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F7110000000000ESIIE3", + "label": "esi_led", + "lastStatusUpdate": 1702420986697, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manuallyUpdateForced": false, + "manufacturerCode": 1, + "measuredAttributes": {}, + "modelId": 509, + "modelType": "HmIP-ESI", + "oem": "eQ-3", + "permanentlyReachable": false, + "serializedGlobalTradeItemNumber": "3014F7110000000000ESIIE3", + "type": "ENERGY_SENSORS_INTERFACE", + "updateState": "UP_TO_DATE" } }, "groups": { diff --git a/tests/components/homematicip_cloud/helper.py b/tests/components/homematicip_cloud/helper.py index e7d7350f98e..bf20d37f2a3 100644 --- a/tests/components/homematicip_cloud/helper.py +++ b/tests/components/homematicip_cloud/helper.py @@ -132,7 +132,7 @@ class HomeTemplate(Home): def __init__( self, connection=None, home_name="", test_devices=None, test_groups=None - ): + ) -> None: """Init template with connection.""" super().__init__(connection=connection) self.name = home_name diff --git a/tests/components/homematicip_cloud/test_climate.py b/tests/components/homematicip_cloud/test_climate.py index f175e2060df..2b4d023baf8 100644 --- a/tests/components/homematicip_cloud/test_climate.py +++ b/tests/components/homematicip_cloud/test_climate.py @@ -622,18 +622,67 @@ async def test_hmip_climate_services( assert len(home._connection.mock_calls) == 10 not_existing_hap_id = "5555F7110000000000000001" - await hass.services.async_call( - "homematicip_cloud", - "deactivate_vacation", - {"accesspoint_id": not_existing_hap_id}, - blocking=True, - ) - assert home.mock_calls[-1][0] == "deactivate_vacation" - assert home.mock_calls[-1][1] == () + with pytest.raises(ServiceValidationError) as excinfo: + await hass.services.async_call( + "homematicip_cloud", + "deactivate_vacation", + {"accesspoint_id": not_existing_hap_id}, + blocking=True, + ) + assert excinfo.value.translation_domain == HMIPC_DOMAIN + assert excinfo.value.translation_key == "access_point_not_found" # There is no further call on connection. assert len(home._connection.mock_calls) == 10 +async def test_hmip_set_home_cooling_mode( + hass: HomeAssistant, mock_hap_with_service +) -> None: + """Test HomematicipSetHomeCoolingMode.""" + + home = mock_hap_with_service.home + + await hass.services.async_call( + "homematicip_cloud", + "set_home_cooling_mode", + {"accesspoint_id": HAPID, "cooling": False}, + blocking=True, + ) + assert home.mock_calls[-1][0] == "set_cooling" + assert home.mock_calls[-1][1] == (False,) + assert len(home._connection.mock_calls) == 1 + + await hass.services.async_call( + "homematicip_cloud", + "set_home_cooling_mode", + {"accesspoint_id": HAPID, "cooling": True}, + blocking=True, + ) + assert home.mock_calls[-1][0] == "set_cooling" + assert home.mock_calls[-1][1] + assert len(home._connection.mock_calls) == 2 + + await hass.services.async_call( + "homematicip_cloud", "set_home_cooling_mode", blocking=True + ) + assert home.mock_calls[-1][0] == "set_cooling" + assert home.mock_calls[-1][1] + assert len(home._connection.mock_calls) == 3 + + not_existing_hap_id = "5555F7110000000000000001" + with pytest.raises(ServiceValidationError) as excinfo: + await hass.services.async_call( + "homematicip_cloud", + "set_home_cooling_mode", + {"accesspoint_id": not_existing_hap_id, "cooling": True}, + blocking=True, + ) + assert excinfo.value.translation_domain == HMIPC_DOMAIN + assert excinfo.value.translation_key == "access_point_not_found" + # There is no further call on connection. + assert len(home._connection.mock_calls) == 3 + + async def test_hmip_heating_group_services( hass: HomeAssistant, default_mock_hap_factory ) -> None: diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index fb7fe7d7deb..074a30e94b2 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -26,7 +26,7 @@ async def test_hmip_load_all_supported_devices( test_devices=None, test_groups=None ) - assert len(mock_hap.hmip_device_by_entity_id) == 278 + assert len(mock_hap.hmip_device_by_entity_id) == 293 async def test_hmip_remove_device( diff --git a/tests/components/homematicip_cloud/test_init.py b/tests/components/homematicip_cloud/test_init.py index 9303a755e89..ad1c8140aea 100644 --- a/tests/components/homematicip_cloud/test_init.py +++ b/tests/components/homematicip_cloud/test_init.py @@ -199,7 +199,7 @@ async def test_setup_services_and_unload_services(hass: HomeAssistant) -> None: # Check services are created hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 8 + assert len(hmipc_services) == 9 config_entries = hass.config_entries.async_entries(HMIPC_DOMAIN) assert len(config_entries) == 1 @@ -232,7 +232,7 @@ async def test_setup_two_haps_unload_one_by_one(hass: HomeAssistant) -> None: assert await async_setup_component(hass, HMIPC_DOMAIN, {}) hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 8 + assert len(hmipc_services) == 9 config_entries = hass.config_entries.async_entries(HMIPC_DOMAIN) assert len(config_entries) == 2 @@ -241,7 +241,7 @@ async def test_setup_two_haps_unload_one_by_one(hass: HomeAssistant) -> None: # services still exists hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 8 + assert len(hmipc_services) == 9 # unload the second AP await hass.config_entries.async_unload(config_entries[1].entry_id) diff --git a/tests/components/homematicip_cloud/test_sensor.py b/tests/components/homematicip_cloud/test_sensor.py index 3089bb062e5..2b62c46fd72 100644 --- a/tests/components/homematicip_cloud/test_sensor.py +++ b/tests/components/homematicip_cloud/test_sensor.py @@ -511,3 +511,162 @@ async def test_hmip_passage_detector_delta_counter( await async_manipulate_test_data(hass, hmip_device, "leftRightCounterDelta", 190) ha_state = hass.states.get(entity_id) assert ha_state.state == "190" + + +async def test_hmip_esi_iec_current_power_consumption( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC currentPowerConsumption Sensor.""" + entity_id = "sensor.esi_iec_currentPowerConsumption" + entity_name = "esi_iec CurrentPowerConsumption" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_iec"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "432" + + +async def test_hmip_esi_iec_energy_counter_usage_high_tariff( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC ENERGY_COUNTER_USAGE_HIGH_TARIFF.""" + entity_id = "sensor.esi_iec_energy_counter_usage_high_tariff" + entity_name = "esi_iec ENERGY_COUNTER_USAGE_HIGH_TARIFF" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_iec"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "194.0" + + +async def test_hmip_esi_iec_energy_counter_usage_low_tariff( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC ENERGY_COUNTER_USAGE_LOW_TARIFF.""" + entity_id = "sensor.esi_iec_energy_counter_usage_low_tariff" + entity_name = "esi_iec ENERGY_COUNTER_USAGE_LOW_TARIFF" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_iec"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "0.0" + + +async def test_hmip_esi_iec_energy_counter_input_single_tariff( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC ENERGY_COUNTER_INPUT_SINGLE_TARIFF.""" + entity_id = "sensor.esi_iec_energy_counter_input_single_tariff" + entity_name = "esi_iec ENERGY_COUNTER_INPUT_SINGLE_TARIFF" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_iec"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "3.0" + + +async def test_hmip_esi_iec_unknown_channel( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test devices are loaded partially.""" + not_existing_entity_id = "sensor.esi_iec2_energy_counter_input_single_tariff" + existing_entity_id = "sensor.esi_iec2_energy_counter_usage_high_tariff" + await default_mock_hap_factory.async_get_mock_hap(test_devices=["esi_iec2"]) + + not_existing_ha_state = hass.states.get(not_existing_entity_id) + existing_ha_state = hass.states.get(existing_entity_id) + + assert not_existing_ha_state is None + assert existing_ha_state.state == "194.0" + + +async def test_hmip_esi_gas_current_gas_flow( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC CurrentGasFlow.""" + entity_id = "sensor.esi_gas_currentgasflow" + entity_name = "esi_gas CurrentGasFlow" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_gas"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "1.03" + + +async def test_hmip_esi_gas_gas_volume( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC GasVolume.""" + entity_id = "sensor.esi_gas_gasvolume" + entity_name = "esi_gas GasVolume" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_gas"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "1019.26" + + +async def test_hmip_esi_led_current_power_consumption( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC currentPowerConsumption Sensor.""" + entity_id = "sensor.esi_led_currentPowerConsumption" + entity_name = "esi_led CurrentPowerConsumption" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_led"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "189.15" + + +async def test_hmip_esi_led_energy_counter_usage_high_tariff( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC ENERGY_COUNTER_USAGE_HIGH_TARIFF.""" + entity_id = "sensor.esi_led_energy_counter_usage_high_tariff" + entity_name = "esi_led ENERGY_COUNTER_USAGE_HIGH_TARIFF" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_led"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "23825.748" diff --git a/tests/components/homewizard/conftest.py b/tests/components/homewizard/conftest.py index eb638492941..fcfe1e5c189 100644 --- a/tests/components/homewizard/conftest.py +++ b/tests/components/homewizard/conftest.py @@ -1,11 +1,11 @@ """Fixtures for HomeWizard integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from homewizard_energy.errors import NotFoundError from homewizard_energy.models import Data, Device, State, System import pytest -from typing_extensions import Generator from homeassistant.components.homewizard.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS diff --git a/tests/components/homewizard/snapshots/test_button.ambr b/tests/components/homewizard/snapshots/test_button.ambr index 5ab108d344c..d5ad9770478 100644 --- a/tests/components/homewizard/snapshots/test_button.ambr +++ b/tests/components/homewizard/snapshots/test_button.ambr @@ -71,9 +71,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', diff --git a/tests/components/homewizard/snapshots/test_diagnostics.ambr b/tests/components/homewizard/snapshots/test_diagnostics.ambr index 7b82056aacb..f8ac80f2536 100644 --- a/tests/components/homewizard/snapshots/test_diagnostics.ambr +++ b/tests/components/homewizard/snapshots/test_diagnostics.ambr @@ -65,6 +65,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', + 'product': dict({ + 'description': 'Measure solar panels, car chargers and more.', + 'model': 'HWE-KWH1', + 'name': 'Wi-Fi kWh Meter 1-phase', + 'url': 'https://www.homewizard.com/kwh-meter/', + }), 'product_name': 'kWh meter', 'product_type': 'HWE-KWH1', 'serial': '**REDACTED**', @@ -148,6 +154,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', + 'product': dict({ + 'description': 'Measure solar panels, car chargers and more.', + 'model': 'HWE-KWH3', + 'name': 'Wi-Fi kWh Meter 3-phase', + 'url': 'https://www.homewizard.com/kwh-meter/', + }), 'product_name': 'KWh meter 3-phase', 'product_type': 'HWE-KWH3', 'serial': '**REDACTED**', @@ -282,6 +294,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '4.19', + 'product': dict({ + 'description': 'The HomeWizard P1 Meter gives you detailed insight in your electricity-, gas consumption and solar surplus.', + 'model': 'HWE-P1', + 'name': 'Wi-Fi P1 Meter', + 'url': 'https://www.homewizard.com/p1-meter/', + }), 'product_name': 'P1 meter', 'product_type': 'HWE-P1', 'serial': '**REDACTED**', @@ -365,6 +383,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.03', + 'product': dict({ + 'description': 'Measure and switch every device.', + 'model': 'HWE-SKT', + 'name': 'Wi-Fi Energy Socket', + 'url': 'https://www.homewizard.com/energy-socket/', + }), 'product_name': 'Energy Socket', 'product_type': 'HWE-SKT', 'serial': '**REDACTED**', @@ -452,6 +476,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '4.07', + 'product': dict({ + 'description': 'Measure and switch every device.', + 'model': 'HWE-SKT', + 'name': 'Wi-Fi Energy Socket', + 'url': 'https://www.homewizard.com/energy-socket/', + }), 'product_name': 'Energy Socket', 'product_type': 'HWE-SKT', 'serial': '**REDACTED**', @@ -539,6 +569,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '2.03', + 'product': dict({ + 'description': 'Real-time water consumption insights', + 'model': 'HWE-WTR', + 'name': 'Wi-Fi Watermeter', + 'url': 'https://www.homewizard.com/watermeter/', + }), 'product_name': 'Watermeter', 'product_type': 'HWE-WTR', 'serial': '**REDACTED**', @@ -622,6 +658,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', + 'product': dict({ + 'description': 'Measure solar panels, car chargers and more.', + 'model': 'SDM230-wifi', + 'name': 'Wi-Fi kWh Meter 1-phase', + 'url': 'https://www.homewizard.com/kwh-meter/', + }), 'product_name': 'kWh meter', 'product_type': 'SDM230-wifi', 'serial': '**REDACTED**', @@ -705,6 +747,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', + 'product': dict({ + 'description': 'Measure solar panels, car chargers and more.', + 'model': 'SDM630-wifi', + 'name': 'Wi-Fi kWh Meter 3-phase', + 'url': 'https://www.homewizard.com/kwh-meter/', + }), 'product_name': 'KWh meter 3-phase', 'product_type': 'SDM630-wifi', 'serial': '**REDACTED**', diff --git a/tests/components/homewizard/snapshots/test_number.ambr b/tests/components/homewizard/snapshots/test_number.ambr index a9c9e45098d..768255c7508 100644 --- a/tests/components/homewizard/snapshots/test_number.ambr +++ b/tests/components/homewizard/snapshots/test_number.ambr @@ -80,9 +80,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -170,9 +172,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index 5e8ddc0d6be..63ee9312a13 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -57,9 +57,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -142,9 +144,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -227,9 +231,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -312,9 +318,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -397,9 +405,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -482,9 +492,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -570,9 +582,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -655,9 +669,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -740,9 +756,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -825,9 +843,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -905,9 +925,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -989,9 +1011,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1074,9 +1098,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1159,9 +1185,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1244,9 +1272,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1329,9 +1359,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1414,9 +1446,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1499,9 +1533,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1584,9 +1620,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1669,9 +1707,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1754,9 +1794,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1839,9 +1881,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -1924,9 +1968,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2012,9 +2058,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2097,9 +2145,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2182,9 +2232,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2267,9 +2319,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2355,9 +2409,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2443,9 +2499,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2531,9 +2589,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2616,9 +2676,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2701,9 +2763,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2786,9 +2850,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2871,9 +2937,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -2956,9 +3024,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -3041,9 +3111,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -3126,9 +3198,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -3206,9 +3280,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -3290,9 +3366,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -3372,9 +3450,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -3457,9 +3537,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -3542,9 +3624,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -3627,9 +3711,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -3707,9 +3793,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -3792,9 +3880,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -3877,9 +3967,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -3962,9 +4054,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4047,9 +4141,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4132,9 +4228,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4217,9 +4315,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4302,9 +4402,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4387,9 +4489,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4472,9 +4576,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4557,9 +4663,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4642,9 +4750,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4722,9 +4832,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4804,9 +4916,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4892,9 +5006,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -4972,9 +5088,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5060,9 +5178,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5148,9 +5268,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5236,9 +5358,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5316,9 +5440,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5396,9 +5522,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5490,9 +5618,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5575,9 +5705,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5660,9 +5792,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5745,9 +5879,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5830,9 +5966,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5910,9 +6048,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -5990,9 +6130,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -6070,9 +6212,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -6150,9 +6294,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -6230,9 +6376,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -6310,9 +6458,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -6394,9 +6544,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -6474,9 +6626,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -6555,8 +6709,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Gas meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'gas_meter_G001', 'suggested_area': None, 'sw_version': None, @@ -6636,8 +6792,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Heat meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'heat_meter_H001', 'suggested_area': None, 'sw_version': None, @@ -6717,8 +6875,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Inlet heat meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'inlet_heat_meter_IH001', 'suggested_area': None, 'sw_version': None, @@ -6797,8 +6957,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Warm water meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'warm_water_meter_WW001', 'suggested_area': None, 'sw_version': None, @@ -6878,8 +7040,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Water meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'water_meter_W001', 'suggested_area': None, 'sw_version': None, @@ -6962,9 +7126,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7044,9 +7210,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7129,9 +7297,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7214,9 +7384,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7299,9 +7471,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7379,9 +7553,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7464,9 +7640,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7549,9 +7727,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7634,9 +7814,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7719,9 +7901,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7804,9 +7988,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7889,9 +8075,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -7974,9 +8162,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8059,9 +8249,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8144,9 +8336,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8229,9 +8423,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8314,9 +8510,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8394,9 +8592,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8476,9 +8676,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8564,9 +8766,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8644,9 +8848,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8732,9 +8938,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8820,9 +9028,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8908,9 +9118,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -8988,9 +9200,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9068,9 +9282,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9162,9 +9378,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9247,9 +9465,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9332,9 +9552,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9417,9 +9639,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9502,9 +9726,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9582,9 +9808,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9662,9 +9890,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9742,9 +9972,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9822,9 +10054,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9902,9 +10136,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -9982,9 +10218,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -10066,9 +10304,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -10146,9 +10386,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -10227,8 +10469,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Gas meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'gas_meter_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'suggested_area': None, 'sw_version': None, @@ -10308,8 +10552,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Heat meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'heat_meter_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'suggested_area': None, 'sw_version': None, @@ -10389,8 +10635,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Inlet heat meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'inlet_heat_meter_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'suggested_area': None, 'sw_version': None, @@ -10469,8 +10717,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Warm water meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'warm_water_meter_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'suggested_area': None, 'sw_version': None, @@ -10550,8 +10800,10 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Water meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'water_meter_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'suggested_area': None, 'sw_version': None, @@ -10634,9 +10886,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -10716,9 +10970,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -10801,9 +11057,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -10886,9 +11144,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -10971,9 +11231,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11056,9 +11318,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11141,9 +11405,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11226,9 +11492,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11311,9 +11579,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11396,9 +11666,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11481,9 +11753,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11566,9 +11840,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11651,9 +11927,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11736,9 +12014,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11821,9 +12101,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11906,9 +12188,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -11986,9 +12270,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12074,9 +12360,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12154,9 +12442,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12242,9 +12532,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12330,9 +12622,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12418,9 +12712,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12503,9 +12799,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12588,9 +12886,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12673,9 +12973,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12758,9 +13060,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12838,9 +13142,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12918,9 +13224,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -12998,9 +13306,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -13078,9 +13388,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -13158,9 +13470,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -13238,9 +13552,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.19', @@ -13322,9 +13638,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -13407,9 +13725,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -13492,9 +13812,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -13580,9 +13902,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -13668,9 +13992,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -13748,9 +14074,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -13832,9 +14160,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -13917,9 +14247,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14002,9 +14334,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14087,9 +14421,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14172,9 +14508,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14257,9 +14595,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14345,9 +14685,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14430,9 +14772,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14518,9 +14862,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14603,9 +14949,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14688,9 +15036,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14768,9 +15118,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -14852,9 +15204,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '2.03', @@ -14937,9 +15291,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '2.03', @@ -15021,9 +15377,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '2.03', @@ -15101,9 +15459,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '2.03', @@ -15185,9 +15545,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -15270,9 +15632,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -15355,9 +15719,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -15440,9 +15806,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -15525,9 +15893,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -15610,9 +15980,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -15698,9 +16070,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -15783,9 +16157,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -15868,9 +16244,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -15953,9 +16331,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16033,9 +16413,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16117,9 +16499,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16202,9 +16586,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16287,9 +16673,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16372,9 +16760,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16457,9 +16847,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16542,9 +16934,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16627,9 +17021,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16712,9 +17108,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16797,9 +17195,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16882,9 +17282,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -16967,9 +17369,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17052,9 +17456,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17140,9 +17546,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17225,9 +17633,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17310,9 +17720,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17395,9 +17807,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17483,9 +17897,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17571,9 +17987,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17659,9 +18077,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17744,9 +18164,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17829,9 +18251,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17914,9 +18338,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -17999,9 +18425,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -18084,9 +18512,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -18169,9 +18599,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -18254,9 +18686,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -18334,9 +18768,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', diff --git a/tests/components/homewizard/snapshots/test_switch.ambr b/tests/components/homewizard/snapshots/test_switch.ambr index 99a5bcab6cb..68a351c1ebb 100644 --- a/tests/components/homewizard/snapshots/test_switch.ambr +++ b/tests/components/homewizard/snapshots/test_switch.ambr @@ -70,9 +70,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -150,9 +152,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -231,9 +235,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -311,9 +317,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -391,9 +399,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.03', @@ -472,9 +482,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -552,9 +564,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -632,9 +646,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '4.07', @@ -712,9 +728,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '2.03', @@ -792,9 +810,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', @@ -872,9 +892,11 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '3.06', diff --git a/tests/components/homewizard/test_init.py b/tests/components/homewizard/test_init.py index 969be7a604c..33412900677 100644 --- a/tests/components/homewizard/test_init.py +++ b/tests/components/homewizard/test_init.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock -from homewizard_energy.errors import DisabledError, HomeWizardEnergyException +from homewizard_energy.errors import DisabledError import pytest from homeassistant.components.homewizard.const import DOMAIN @@ -97,152 +97,6 @@ async def test_load_removes_reauth_flow( assert len(flows) == 0 -@pytest.mark.parametrize( - "exception", - [ - HomeWizardEnergyException, - Exception, - ], -) -async def test_load_handles_homewizardenergy_exception( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_homewizardenergy: MagicMock, - exception: Exception, -) -> None: - """Test setup handles exception from API.""" - mock_homewizardenergy.device.side_effect = exception - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state in ( - ConfigEntryState.SETUP_RETRY, - ConfigEntryState.SETUP_ERROR, - ) - - -@pytest.mark.parametrize( - ("device_fixture", "old_unique_id", "new_unique_id"), - [ - ( - "HWE-SKT-11", - "aabbccddeeff_total_power_import_t1_kwh", - "aabbccddeeff_total_power_import_kwh", - ), - ( - "HWE-SKT-11", - "aabbccddeeff_total_power_export_t1_kwh", - "aabbccddeeff_total_power_export_kwh", - ), - ( - "HWE-SKT-21", - "aabbccddeeff_total_power_import_t1_kwh", - "aabbccddeeff_total_power_import_kwh", - ), - ( - "HWE-SKT-21", - "aabbccddeeff_total_power_export_t1_kwh", - "aabbccddeeff_total_power_export_kwh", - ), - ], -) -@pytest.mark.usefixtures("mock_homewizardenergy") -async def test_sensor_migration( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - old_unique_id: str, - new_unique_id: str, -) -> None: - """Test total power T1 sensors are migrated.""" - mock_config_entry.add_to_hass(hass) - - entity: er.RegistryEntry = entity_registry.async_get_or_create( - domain=Platform.SENSOR, - platform=DOMAIN, - unique_id=old_unique_id, - config_entry=mock_config_entry, - ) - - assert entity.unique_id == old_unique_id - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - entity_migrated = entity_registry.async_get(entity.entity_id) - assert entity_migrated - assert entity_migrated.unique_id == new_unique_id - assert entity_migrated.previous_unique_id == old_unique_id - - -@pytest.mark.parametrize( - ("device_fixture", "old_unique_id", "new_unique_id"), - [ - ( - "HWE-SKT-11", - "aabbccddeeff_total_power_import_t1_kwh", - "aabbccddeeff_total_power_import_kwh", - ), - ( - "HWE-SKT-11", - "aabbccddeeff_total_power_export_t1_kwh", - "aabbccddeeff_total_power_export_kwh", - ), - ( - "HWE-SKT-21", - "aabbccddeeff_total_power_import_t1_kwh", - "aabbccddeeff_total_power_import_kwh", - ), - ( - "HWE-SKT-21", - "aabbccddeeff_total_power_export_t1_kwh", - "aabbccddeeff_total_power_export_kwh", - ), - ], -) -@pytest.mark.usefixtures("mock_homewizardenergy") -async def test_sensor_migration_does_not_trigger( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - old_unique_id: str, - new_unique_id: str, -) -> None: - """Test total power T1 sensors are not migrated when not possible.""" - mock_config_entry.add_to_hass(hass) - - old_entity: er.RegistryEntry = entity_registry.async_get_or_create( - domain=Platform.SENSOR, - platform=DOMAIN, - unique_id=old_unique_id, - config_entry=mock_config_entry, - ) - - new_entity: er.RegistryEntry = entity_registry.async_get_or_create( - domain=Platform.SENSOR, - platform=DOMAIN, - unique_id=new_unique_id, - config_entry=mock_config_entry, - ) - - assert old_entity.unique_id == old_unique_id - assert new_entity.unique_id == new_unique_id - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - entity = entity_registry.async_get(old_entity.entity_id) - assert entity - assert entity.unique_id == old_unique_id - assert entity.previous_unique_id is None - - entity = entity_registry.async_get(new_entity.entity_id) - assert entity - assert entity.unique_id == new_unique_id - assert entity.previous_unique_id is None - - @pytest.mark.parametrize( ("device_fixture", "old_unique_id", "new_unique_id"), [ diff --git a/tests/components/homeworks/conftest.py b/tests/components/homeworks/conftest.py index ca0e08e9215..9562063ab97 100644 --- a/tests/components/homeworks/conftest.py +++ b/tests/components/homeworks/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Lutron Homeworks Series 4 and 8 tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.homeworks.const import ( CONF_ADDR, @@ -17,10 +17,55 @@ from homeassistant.components.homeworks.const import ( CONF_RELEASE_DELAY, DOMAIN, ) -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, +) from tests.common import MockConfigEntry +CONFIG_ENTRY_OPTIONS = { + CONF_CONTROLLER_ID: "main_controller", + CONF_HOST: "192.168.0.1", + CONF_PORT: 1234, + CONF_DIMMERS: [ + { + CONF_ADDR: "[02:08:01:01]", + CONF_NAME: "Foyer Sconces", + CONF_RATE: 1.0, + } + ], + CONF_KEYPADS: [ + { + CONF_ADDR: "[02:08:02:01]", + CONF_NAME: "Foyer Keypad", + CONF_BUTTONS: [ + { + CONF_NAME: "Morning", + CONF_NUMBER: 1, + CONF_LED: True, + CONF_RELEASE_DELAY: None, + }, + { + CONF_NAME: "Relax", + CONF_NUMBER: 2, + CONF_LED: True, + CONF_RELEASE_DELAY: None, + }, + { + CONF_NAME: "Dim up", + CONF_NUMBER: 3, + CONF_LED: False, + CONF_RELEASE_DELAY: 0.2, + }, + ], + } + ], +} + @pytest.fixture def mock_config_entry() -> MockConfigEntry: @@ -28,45 +73,19 @@ def mock_config_entry() -> MockConfigEntry: return MockConfigEntry( title="Lutron Homeworks", domain=DOMAIN, - data={}, - options={ - CONF_CONTROLLER_ID: "main_controller", - CONF_HOST: "192.168.0.1", - CONF_PORT: 1234, - CONF_DIMMERS: [ - { - CONF_ADDR: "[02:08:01:01]", - CONF_NAME: "Foyer Sconces", - CONF_RATE: 1.0, - } - ], - CONF_KEYPADS: [ - { - CONF_ADDR: "[02:08:02:01]", - CONF_NAME: "Foyer Keypad", - CONF_BUTTONS: [ - { - CONF_NAME: "Morning", - CONF_NUMBER: 1, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Relax", - CONF_NUMBER: 2, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Dim up", - CONF_NUMBER: 3, - CONF_LED: False, - CONF_RELEASE_DELAY: 0.2, - }, - ], - } - ], - }, + data={CONF_PASSWORD: None, CONF_USERNAME: None}, + options=CONFIG_ENTRY_OPTIONS, + ) + + +@pytest.fixture +def mock_config_entry_username_password() -> MockConfigEntry: + """Return the default mocked config entry with credentials.""" + return MockConfigEntry( + title="Lutron Homeworks", + domain=DOMAIN, + data={CONF_PASSWORD: "hunter2", CONF_USERNAME: "username"}, + options=CONFIG_ENTRY_OPTIONS, ) diff --git a/tests/components/homeworks/test_binary_sensor.py b/tests/components/homeworks/test_binary_sensor.py index 0b21ae3b773..4bd42cc0a59 100644 --- a/tests/components/homeworks/test_binary_sensor.py +++ b/tests/components/homeworks/test_binary_sensor.py @@ -30,7 +30,7 @@ async def test_binary_sensor_attributes_state_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] assert entity_id in hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN) diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index 8f5334b21f9..d0693531006 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -2,6 +2,7 @@ from unittest.mock import ANY, MagicMock +from pyhomeworks import exceptions as hw_exceptions import pytest from pytest_unordered import unordered @@ -17,7 +18,13 @@ from homeassistant.components.homeworks.const import ( DOMAIN, ) from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -45,7 +52,7 @@ async def test_user_flow( ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Main controller" - assert result["data"] == {} + assert result["data"] == {"password": None, "username": None} assert result["options"] == { "controller_id": "main_controller", "dimmers": [], @@ -53,9 +60,107 @@ async def test_user_flow( "keypads": [], "port": 1234, } - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) mock_controller.close.assert_called_once_with() - mock_controller.join.assert_called_once_with() + mock_controller.join.assert_not_called() + + +async def test_user_flow_credentials( + hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry +) -> None: + """Test the user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.1", + CONF_NAME: "Main controller", + CONF_PASSWORD: "hunter2", + CONF_PORT: 1234, + CONF_USERNAME: "username", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Main controller" + assert result["data"] == {"password": "hunter2", "username": "username"} + assert result["options"] == { + "controller_id": "main_controller", + "dimmers": [], + "host": "192.168.0.1", + "keypads": [], + "port": 1234, + } + mock_homeworks.assert_called_once_with( + "192.168.0.1", 1234, ANY, "username", "hunter2" + ) + mock_controller.close.assert_called_once_with() + mock_controller.join.assert_not_called() + + +async def test_user_flow_credentials_user_only( + hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry +) -> None: + """Test the user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.1", + CONF_NAME: "Main controller", + CONF_PORT: 1234, + CONF_USERNAME: "username", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Main controller" + assert result["data"] == {"password": None, "username": "username"} + assert result["options"] == { + "controller_id": "main_controller", + "dimmers": [], + "host": "192.168.0.1", + "keypads": [], + "port": 1234, + } + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, "username", None) + mock_controller.close.assert_called_once_with() + mock_controller.join.assert_not_called() + + +async def test_user_flow_credentials_password_only( + hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry +) -> None: + """Test the user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.1", + CONF_NAME: "Main controller", + CONF_PASSWORD: "hunter2", + CONF_PORT: 1234, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "need_username_with_password"} async def test_user_flow_already_exists( @@ -96,7 +201,12 @@ async def test_user_flow_already_exists( @pytest.mark.parametrize( ("side_effect", "error"), - [(ConnectionError, "connection_error"), (Exception, "unknown_error")], + [ + (hw_exceptions.HomeworksConnectionFailed, "connection_error"), + (hw_exceptions.HomeworksInvalidCredentialsProvided, "invalid_credentials"), + (hw_exceptions.HomeworksNoCredentialsProvided, "credentials_needed"), + (Exception, "unknown_error"), + ], ) async def test_user_flow_cannot_connect( hass: HomeAssistant, @@ -266,6 +376,32 @@ async def test_reconfigure_flow_flow_no_change( } +async def test_reconfigure_flow_credentials_password_only( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homeworks: MagicMock +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.2", + CONF_PASSWORD: "hunter2", + CONF_PORT: 1234, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {"base": "need_username_with_password"} + + async def test_options_add_light_flow( hass: HomeAssistant, mock_empty_config_entry: MockConfigEntry, @@ -432,7 +568,14 @@ async def test_options_add_remove_light_flow( ) -@pytest.mark.parametrize("keypad_address", ["[02:08:03:01]", "[02:08:03]"]) +@pytest.mark.parametrize( + "keypad_address", + [ + "[02:08:03]", + "[02:08:03:01]", + "[02:08:03:01:00]", + ], +) async def test_options_add_remove_keypad_flow( hass: HomeAssistant, mock_config_entry: MockConfigEntry, diff --git a/tests/components/homeworks/test_init.py b/tests/components/homeworks/test_init.py index 87aabb6258f..2a4bd28138e 100644 --- a/tests/components/homeworks/test_init.py +++ b/tests/components/homeworks/test_init.py @@ -2,12 +2,18 @@ from unittest.mock import ANY, MagicMock -from pyhomeworks.pyhomeworks import HW_BUTTON_PRESSED, HW_BUTTON_RELEASED +from pyhomeworks import exceptions as hw_exceptions +from pyhomeworks.pyhomeworks import ( + HW_BUTTON_PRESSED, + HW_BUTTON_RELEASED, + HW_LOGIN_INCORRECT, +) import pytest from homeassistant.components.homeworks import EVENT_BUTTON_PRESS, EVENT_BUTTON_RELEASE from homeassistant.components.homeworks.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -25,7 +31,7 @@ async def test_load_unload_config_entry( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -34,13 +40,60 @@ async def test_load_unload_config_entry( assert mock_config_entry.state is ConfigEntryState.NOT_LOADED +async def test_load_config_entry_with_credentials( + hass: HomeAssistant, + mock_config_entry_username_password: MockConfigEntry, + mock_homeworks: MagicMock, +) -> None: + """Test the Homeworks configuration entry loading/unloading.""" + mock_config_entry_username_password.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_username_password.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry_username_password.state is ConfigEntryState.LOADED + mock_homeworks.assert_called_once_with( + "192.168.0.1", 1234, ANY, "username", "hunter2" + ) + + await hass.config_entries.async_unload(mock_config_entry_username_password.entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert mock_config_entry_username_password.state is ConfigEntryState.NOT_LOADED + + +async def test_controller_credentials_changed( + hass: HomeAssistant, + mock_config_entry_username_password: MockConfigEntry, + mock_homeworks: MagicMock, +) -> None: + """Test controller credentials changed. + + Note: This just ensures we don't blow up when credentials changed, in the future a + reauth flow should be added. + """ + mock_config_entry_username_password.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_username_password.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry_username_password.state is ConfigEntryState.LOADED + mock_homeworks.assert_called_once_with( + "192.168.0.1", 1234, ANY, "username", "hunter2" + ) + hw_callback = mock_homeworks.mock_calls[0][1][2] + + hw_callback(HW_LOGIN_INCORRECT, []) + + async def test_config_entry_not_ready( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homeworks: MagicMock, ) -> None: """Test the Homeworks configuration entry not ready.""" - mock_homeworks.side_effect = ConnectionError + mock_homeworks.return_value.connect.side_effect = ( + hw_exceptions.HomeworksConnectionFailed + ) mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -62,7 +115,7 @@ async def test_keypad_events( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] hw_callback(HW_BUTTON_PRESSED, ["[02:08:02:01]", 1]) @@ -165,3 +218,25 @@ async def test_send_command( blocking=True, ) assert len(mock_controller._send.mock_calls) == 0 + + +async def test_cleanup_on_ha_shutdown( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homeworks: MagicMock, +) -> None: + """Test cleanup when HA shuts down.""" + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) + mock_controller.stop.assert_not_called() + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + + mock_controller.stop.assert_called_once_with() diff --git a/tests/components/homeworks/test_light.py b/tests/components/homeworks/test_light.py index a5d94f736d5..1cd2951128c 100644 --- a/tests/components/homeworks/test_light.py +++ b/tests/components/homeworks/test_light.py @@ -35,7 +35,7 @@ async def test_light_attributes_state_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] assert len(mock_controller.request_dimmer_level.mock_calls) == 1 @@ -106,7 +106,7 @@ async def test_light_restore_brightness( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] assert hass.states.async_entity_ids("light") == unordered([entity_id]) diff --git a/tests/components/honeywell/conftest.py b/tests/components/honeywell/conftest.py index 5c5b6c0a44a..e48664db9ae 100644 --- a/tests/components/honeywell/conftest.py +++ b/tests/components/honeywell/conftest.py @@ -86,6 +86,7 @@ def device(): mock_device.system_mode = "off" mock_device.name = "device1" mock_device.current_temperature = CURRENTTEMPERATURE + mock_device.temperature_unit = "C" mock_device.mac_address = "macaddress1" mock_device.outdoor_temperature = None mock_device.outdoor_humidity = None diff --git a/tests/components/honeywell/snapshots/test_climate.ambr b/tests/components/honeywell/snapshots/test_climate.ambr index d1faf9af9a0..25bb73851c6 100644 --- a/tests/components/honeywell/snapshots/test_climate.ambr +++ b/tests/components/honeywell/snapshots/test_climate.ambr @@ -3,7 +3,7 @@ ReadOnlyDict({ 'aux_heat': 'off', 'current_humidity': 50, - 'current_temperature': -6.7, + 'current_temperature': 20, 'fan_action': 'idle', 'fan_mode': 'auto', 'fan_modes': list([ @@ -20,9 +20,9 @@ , ]), 'max_humidity': 99, - 'max_temp': 1.7, + 'max_temp': 35, 'min_humidity': 30, - 'min_temp': -13.9, + 'min_temp': 7, 'permanent_hold': False, 'preset_mode': 'none', 'preset_modes': list([ diff --git a/tests/components/honeywell/test_climate.py b/tests/components/honeywell/test_climate.py index b57be5f1838..55a55f7d7e7 100644 --- a/tests/components/honeywell/test_climate.py +++ b/tests/components/honeywell/test_climate.py @@ -92,14 +92,13 @@ async def test_dynamic_attributes( hass: HomeAssistant, device: MagicMock, config_entry: MagicMock ) -> None: """Test dynamic attributes.""" - await init_integration(hass, config_entry) entity_id = f"climate.{device.name}" state = hass.states.get(entity_id) assert state.state == HVACMode.OFF attributes = state.attributes - assert attributes["current_temperature"] == -6.7 + assert attributes["current_temperature"] == 20 assert attributes["current_humidity"] == 50 device.system_mode = "cool" @@ -114,7 +113,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.COOL attributes = state.attributes - assert attributes["current_temperature"] == -6.1 + assert attributes["current_temperature"] == 21 assert attributes["current_humidity"] == 55 device.system_mode = "heat" @@ -129,7 +128,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT attributes = state.attributes - assert attributes["current_temperature"] == 16.1 + assert attributes["current_temperature"] == 61 assert attributes["current_humidity"] == 50 device.system_mode = "auto" @@ -142,7 +141,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT_COOL attributes = state.attributes - assert attributes["current_temperature"] == 16.1 + assert attributes["current_temperature"] == 61 assert attributes["current_humidity"] == 50 @@ -348,7 +347,7 @@ async def test_service_calls_off_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 35}, blocking=True, ) @@ -362,8 +361,8 @@ async def test_service_calls_off_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) device.set_setpoint_heat.reset_mock() device.set_setpoint_heat.side_effect = aiosomecomfort.SomeComfortError @@ -375,13 +374,13 @@ async def test_service_calls_off_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 25.0, - ATTR_TARGET_TEMP_HIGH: 35.0, + ATTR_TARGET_TEMP_LOW: 24.0, + ATTR_TARGET_TEMP_HIGH: 34.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(34) + device.set_setpoint_heat.assert_called_with(24) assert "Invalid temperature" in caplog.text device.set_setpoint_heat.reset_mock() @@ -399,14 +398,14 @@ async def test_service_calls_off_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) reset_mock(device) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 35}, blocking=True, ) device.set_setpoint_heat.assert_not_called() @@ -517,7 +516,7 @@ async def test_service_calls_cool_mode( {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) - device.set_hold_cool.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_cool.assert_called_once_with(datetime.time(2, 30), 15) device.set_hold_cool.reset_mock() await hass.services.async_call( @@ -525,13 +524,13 @@ async def test_service_calls_cool_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 25.0, - ATTR_TARGET_TEMP_HIGH: 35.0, + ATTR_TARGET_TEMP_LOW: 15.0, + ATTR_TARGET_TEMP_HIGH: 20.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(20) + device.set_setpoint_heat.assert_called_with(15) caplog.clear() device.set_setpoint_cool.reset_mock() @@ -543,13 +542,13 @@ async def test_service_calls_cool_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 25.0, - ATTR_TARGET_TEMP_HIGH: 35.0, + ATTR_TARGET_TEMP_LOW: 15.0, + ATTR_TARGET_TEMP_HIGH: 20.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(20) + device.set_setpoint_heat.assert_called_with(15) assert "Invalid temperature" in caplog.text reset_mock(device) @@ -733,10 +732,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) device.set_hold_heat.reset_mock() device.set_hold_heat.side_effect = aiosomecomfort.SomeComfortError @@ -744,10 +743,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) device.set_hold_heat.reset_mock() assert "Invalid temperature" in caplog.text @@ -756,10 +755,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) device.set_hold_heat.reset_mock() caplog.clear() @@ -773,8 +772,8 @@ async def test_service_calls_heat_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) device.set_setpoint_heat.reset_mock() device.set_setpoint_heat.side_effect = aiosomecomfort.SomeComfortError @@ -789,8 +788,8 @@ async def test_service_calls_heat_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) assert "Invalid temperature" in caplog.text reset_mock(device) @@ -984,8 +983,8 @@ async def test_service_calls_auto_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_once_with(95) - device.set_setpoint_heat.assert_called_once_with(77) + device.set_setpoint_cool.assert_called_once_with(35) + device.set_setpoint_heat.assert_called_once_with(25) reset_mock(device) caplog.clear() diff --git a/tests/components/honeywell/test_init.py b/tests/components/honeywell/test_init.py index cdd767f019d..ac24876413d 100644 --- a/tests/components/honeywell/test_init.py +++ b/tests/components/honeywell/test_init.py @@ -173,14 +173,13 @@ async def test_remove_stale_device( identifiers={("OtherDomain", 7654321)}, ) + config_entry.add_to_hass(hass) device_registry.async_update_device( device_entry_other.id, add_config_entry_id=config_entry.entry_id, merge_identifiers={(DOMAIN, 7654321)}, ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/honeywell/test_switch.py b/tests/components/honeywell/test_switch.py index 73052871ef1..482b9837b93 100644 --- a/tests/components/honeywell/test_switch.py +++ b/tests/components/honeywell/test_switch.py @@ -24,26 +24,6 @@ async def test_emheat_switch( await init_integration(hass, config_entry) entity_id = f"switch.{device.name}_emergency_heat" - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - device.set_system_mode.assert_not_called() - - device.set_system_mode.reset_mock() - - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - device.set_system_mode.assert_not_called() - - device.system_mode = "heat" - await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, @@ -53,6 +33,7 @@ async def test_emheat_switch( device.set_system_mode.assert_called_once_with("emheat") device.set_system_mode.reset_mock() + device.system_mode = "emheat" await hass.services.async_call( SWITCH_DOMAIN, diff --git a/tests/components/http/test_auth.py b/tests/components/http/test_auth.py index 20dfe0a3710..7f29f8a4b9f 100644 --- a/tests/components/http/test_auth.py +++ b/tests/components/http/test_auth.py @@ -63,7 +63,7 @@ PRIVATE_ADDRESSES = [ ] -async def mock_handler(request): +async def mock_handler(request: web.Request) -> web.Response: """Return if request was authenticated.""" if not request[KEY_AUTHENTICATED]: raise HTTPUnauthorized @@ -75,7 +75,7 @@ async def mock_handler(request): @pytest.fixture -def app(hass): +def app(hass: HomeAssistant) -> web.Application: """Fixture to set up a web.Application.""" app = web.Application() app[KEY_HASS] = hass @@ -85,7 +85,7 @@ def app(hass): @pytest.fixture -def app2(hass): +def app2(hass: HomeAssistant) -> web.Application: """Fixture to set up a web.Application without real_ip middleware.""" app = web.Application() app[KEY_HASS] = hass @@ -94,7 +94,9 @@ def app2(hass): @pytest.fixture -def trusted_networks_auth(hass): +def trusted_networks_auth( + hass: HomeAssistant, +) -> trusted_networks.TrustedNetworksAuthProvider: """Load trusted networks auth provider.""" prv = trusted_networks.TrustedNetworksAuthProvider( hass, @@ -114,7 +116,7 @@ async def test_auth_middleware_loaded_by_default(hass: HomeAssistant) -> None: async def test_cant_access_with_password_in_header( - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, local_auth: HassAuthProvider, hass: HomeAssistant, @@ -131,7 +133,7 @@ async def test_cant_access_with_password_in_header( async def test_cant_access_with_password_in_query( - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, local_auth: HassAuthProvider, hass: HomeAssistant, @@ -151,7 +153,7 @@ async def test_cant_access_with_password_in_query( async def test_basic_auth_does_not_work( - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass: HomeAssistant, local_auth: HassAuthProvider, @@ -175,8 +177,8 @@ async def test_basic_auth_does_not_work( async def test_cannot_access_with_trusted_ip( hass: HomeAssistant, - app2, - trusted_networks_auth, + app2: web.Application, + trusted_networks_auth: trusted_networks.TrustedNetworksAuthProvider, aiohttp_client: ClientSessionGenerator, hass_owner_user: MockUser, ) -> None: @@ -203,7 +205,7 @@ async def test_cannot_access_with_trusted_ip( async def test_auth_active_access_with_access_token_in_header( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -239,8 +241,8 @@ async def test_auth_active_access_with_access_token_in_header( async def test_auth_active_access_with_trusted_ip( hass: HomeAssistant, - app2, - trusted_networks_auth, + app2: web.Application, + trusted_networks_auth: trusted_networks.TrustedNetworksAuthProvider, aiohttp_client: ClientSessionGenerator, hass_owner_user: MockUser, ) -> None: @@ -266,7 +268,7 @@ async def test_auth_active_access_with_trusted_ip( async def test_auth_legacy_support_api_password_cannot_access( - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, local_auth: HassAuthProvider, hass: HomeAssistant, @@ -287,7 +289,7 @@ async def test_auth_legacy_support_api_password_cannot_access( async def test_auth_access_signed_path_with_refresh_token( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -332,7 +334,7 @@ async def test_auth_access_signed_path_with_refresh_token( async def test_auth_access_signed_path_with_query_param( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -362,7 +364,7 @@ async def test_auth_access_signed_path_with_query_param( async def test_auth_access_signed_path_with_query_param_order( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -403,7 +405,7 @@ async def test_auth_access_signed_path_with_query_param_order( async def test_auth_access_signed_path_with_query_param_safe_param( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -440,7 +442,7 @@ async def test_auth_access_signed_path_with_query_param_safe_param( ) async def test_auth_access_signed_path_with_query_param_tamper( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, base_url: str, @@ -466,7 +468,7 @@ async def test_auth_access_signed_path_with_query_param_tamper( async def test_auth_access_signed_path_via_websocket( hass: HomeAssistant, - app, + app: web.Application, hass_ws_client: WebSocketGenerator, hass_read_only_access_token: str, ) -> None: @@ -504,7 +506,7 @@ async def test_auth_access_signed_path_via_websocket( async def test_auth_access_signed_path_with_http( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -539,7 +541,7 @@ async def test_auth_access_signed_path_with_http( async def test_auth_access_signed_path_with_content_user( - hass: HomeAssistant, app, aiohttp_client: ClientSessionGenerator + hass: HomeAssistant, app: web.Application, aiohttp_client: ClientSessionGenerator ) -> None: """Test access signed url uses content user.""" await async_setup_auth(hass, app) @@ -556,7 +558,7 @@ async def test_auth_access_signed_path_with_content_user( async def test_local_only_user_rejected( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -579,7 +581,9 @@ async def test_local_only_user_rejected( assert req.status == HTTPStatus.UNAUTHORIZED -async def test_async_user_not_allowed_do_auth(hass: HomeAssistant, app) -> None: +async def test_async_user_not_allowed_do_auth( + hass: HomeAssistant, app: web.Application +) -> None: """Test for not allowing auth.""" user = await hass.auth.async_create_user("Hello") user.is_active = False diff --git a/tests/components/http/test_cors.py b/tests/components/http/test_cors.py index 1188131cc0f..c0256abb25d 100644 --- a/tests/components/http/test_cors.py +++ b/tests/components/http/test_cors.py @@ -119,7 +119,7 @@ async def test_cors_middleware_with_cors_allowed_view(hass: HomeAssistant) -> No requires_auth = False cors_allowed = True - def __init__(self, url, name): + def __init__(self, url, name) -> None: """Initialize test view.""" self.url = url self.name = name diff --git a/tests/components/http/test_init.py b/tests/components/http/test_init.py index 7a9fb329fcd..2895209b5f9 100644 --- a/tests/components/http/test_init.py +++ b/tests/components/http/test_init.py @@ -543,5 +543,5 @@ async def test_register_static_paths( "Detected code that calls hass.http.register_static_path " "which is deprecated because it does blocking I/O in the " "event loop, instead call " - "`await hass.http.async_register_static_path" + "`await hass.http.async_register_static_paths" ) in caplog.text diff --git a/tests/components/http/test_static.py b/tests/components/http/test_static.py index 3e3f21d5002..2ac7c6ded93 100644 --- a/tests/components/http/test_static.py +++ b/tests/components/http/test_static.py @@ -4,12 +4,12 @@ from http import HTTPStatus from pathlib import Path from aiohttp.test_utils import TestClient -from aiohttp.web_exceptions import HTTPForbidden import pytest from homeassistant.components.http import StaticPathConfig -from homeassistant.components.http.static import CachingStaticResource, _get_file_path -from homeassistant.core import EVENT_HOMEASSISTANT_START, HomeAssistant +from homeassistant.components.http.static import CachingStaticResource +from homeassistant.const import EVENT_HOMEASSISTANT_START +from homeassistant.core import HomeAssistant from homeassistant.helpers.http import KEY_ALLOW_CONFIGURED_CORS from homeassistant.setup import async_setup_component @@ -30,37 +30,19 @@ async def mock_http_client(hass: HomeAssistant, aiohttp_client: ClientSessionGen return await aiohttp_client(hass.http.app, server_kwargs={"skip_url_asserts": True}) -@pytest.mark.parametrize( - ("url", "canonical_url"), - [ - ("//a", "//a"), - ("///a", "///a"), - ("/c:\\a\\b", "/c:%5Ca%5Cb"), - ], -) -async def test_static_path_blocks_anchors( - hass: HomeAssistant, - mock_http_client: TestClient, - tmp_path: Path, - url: str, - canonical_url: str, +async def test_static_resource_show_index( + hass: HomeAssistant, mock_http_client: TestClient, tmp_path: Path ) -> None: - """Test static paths block anchors.""" + """Test static resource will return a directory index.""" app = hass.http.app - resource = CachingStaticResource(url, str(tmp_path)) - assert resource.canonical == canonical_url + resource = CachingStaticResource("/", tmp_path, show_index=True) app.router.register_resource(resource) app[KEY_ALLOW_CONFIGURED_CORS](resource) - resp = await mock_http_client.get(canonical_url, allow_redirects=False) - assert resp.status == 403 - - # Tested directly since aiohttp will block it before - # it gets here but we want to make sure if aiohttp ever - # changes we still block it. - with pytest.raises(HTTPForbidden): - _get_file_path(canonical_url, tmp_path) + resp = await mock_http_client.get("/") + assert resp.status == 200 + assert resp.content_type == "text/html" async def test_async_register_static_paths( diff --git a/tests/components/hue/conftest.py b/tests/components/hue/conftest.py index fca950d6b7a..7fc6c5ae33f 100644 --- a/tests/components/hue/conftest.py +++ b/tests/components/hue/conftest.py @@ -2,7 +2,7 @@ import asyncio from collections import deque -import json +from collections.abc import Generator import logging from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -16,27 +16,24 @@ from homeassistant.components import hue from homeassistant.components.hue.v1 import sensor_base as hue_sensor_base from homeassistant.components.hue.v2.device import async_setup_devices from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonArrayType from .const import FAKE_BRIDGE, FAKE_BRIDGE_DEVICE -from tests.common import ( - MockConfigEntry, - async_mock_service, - load_fixture, - mock_device_registry, -) +from tests.common import MockConfigEntry, load_json_array_fixture @pytest.fixture(autouse=True) -def no_request_delay(): +def no_request_delay() -> Generator[None]: """Make the request refresh delay 0 for instant tests.""" with patch("homeassistant.components.hue.const.REQUEST_REFRESH_DELAY", 0): yield -def create_mock_bridge(hass, api_version=1): +def create_mock_bridge(hass: HomeAssistant, api_version: int = 1) -> Mock: """Create a mocked HueBridge instance.""" bridge = Mock( hass=hass, @@ -50,10 +47,10 @@ def create_mock_bridge(hass, api_version=1): bridge.logger = logging.getLogger(__name__) if bridge.api_version == 2: - bridge.api = create_mock_api_v2(hass) + bridge.api = create_mock_api_v2() bridge.mock_requests = bridge.api.mock_requests else: - bridge.api = create_mock_api_v1(hass) + bridge.api = create_mock_api_v1() bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = bridge.api.mock_requests bridge.mock_light_responses = bridge.api.mock_light_responses @@ -85,18 +82,18 @@ def create_mock_bridge(hass, api_version=1): @pytest.fixture -def mock_api_v1(hass): +def mock_api_v1() -> Mock: """Mock the Hue V1 api.""" - return create_mock_api_v1(hass) + return create_mock_api_v1() @pytest.fixture -def mock_api_v2(hass): +def mock_api_v2() -> Mock: """Mock the Hue V2 api.""" - return create_mock_api_v2(hass) + return create_mock_api_v2() -def create_mock_api_v1(hass): +def create_mock_api_v1() -> Mock: """Create a mock V1 API.""" api = Mock(spec=aiohue_v1.HueBridgeV1) api.initialize = AsyncMock() @@ -140,12 +137,12 @@ def create_mock_api_v1(hass): @pytest.fixture(scope="package") -def v2_resources_test_data(): +def v2_resources_test_data() -> JsonArrayType: """Load V2 resources mock data.""" - return json.loads(load_fixture("hue/v2_resources.json")) + return load_json_array_fixture("hue/v2_resources.json") -def create_mock_api_v2(hass): +def create_mock_api_v2() -> Mock: """Create a mock V2 API.""" api = Mock(spec=aiohue_v2.HueBridgeV2) api.initialize = AsyncMock() @@ -198,30 +195,32 @@ def create_mock_api_v2(hass): @pytest.fixture -def mock_bridge_v1(hass): +def mock_bridge_v1(hass: HomeAssistant) -> Mock: """Mock a Hue bridge with V1 api.""" return create_mock_bridge(hass, api_version=1) @pytest.fixture -def mock_bridge_v2(hass): +def mock_bridge_v2(hass: HomeAssistant) -> Mock: """Mock a Hue bridge with V2 api.""" return create_mock_bridge(hass, api_version=2) @pytest.fixture -def mock_config_entry_v1(hass): +def mock_config_entry_v1() -> MockConfigEntry: """Mock a config entry for a Hue V1 bridge.""" return create_config_entry(api_version=1) @pytest.fixture -def mock_config_entry_v2(hass): +def mock_config_entry_v2() -> MockConfigEntry: """Mock a config entry.""" return create_config_entry(api_version=2) -def create_config_entry(api_version=1, host="mock-host"): +def create_config_entry( + api_version: int = 1, host: str = "mock-host" +) -> MockConfigEntry: """Mock a config entry for a Hue bridge.""" return MockConfigEntry( domain=hue.DOMAIN, @@ -230,7 +229,7 @@ def create_config_entry(api_version=1, host="mock-host"): ) -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Mock setup Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( @@ -243,7 +242,9 @@ async def setup_component(hass): ) -async def setup_bridge(hass, mock_bridge, config_entry): +async def setup_bridge( + hass: HomeAssistant, mock_bridge: Mock, config_entry: MockConfigEntry +) -> None: """Load the Hue integration with the provided bridge.""" mock_bridge.config_entry = config_entry with patch.object( @@ -255,11 +256,11 @@ async def setup_bridge(hass, mock_bridge, config_entry): async def setup_platform( - hass, - mock_bridge, - platforms, - hostname=None, -): + hass: HomeAssistant, + mock_bridge: Mock, + platforms: list[Platform] | tuple[Platform] | Platform, + hostname: str | None = None, +) -> None: """Load the Hue integration with the provided bridge for given platform(s).""" if not isinstance(platforms, (list, tuple)): platforms = [platforms] @@ -282,15 +283,3 @@ async def setup_platform( # and make sure it completes before going further await hass.async_block_till_done() - - -@pytest.fixture(name="device_reg") -def get_device_reg(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture(name="calls") -def track_calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") diff --git a/tests/components/hue/fixtures/v2_resources.json b/tests/components/hue/fixtures/v2_resources.json index 662e1107ca9..980086d0988 100644 --- a/tests/components/hue/fixtures/v2_resources.json +++ b/tests/components/hue/fixtures/v2_resources.json @@ -1487,6 +1487,10 @@ "on": { "on": true }, + "owner": { + "rid": "7cee478d-6455-483a-9e32-9f9fdcbcc4f6", + "rtype": "zone" + }, "type": "grouped_light" }, { @@ -1498,6 +1502,10 @@ "on": { "on": true }, + "owner": { + "rid": "7cee478d-6455-483a-9e32-9f9fdcbcc4f6", + "rtype": "zone" + }, "type": "grouped_light" }, { @@ -1509,6 +1517,10 @@ "on": { "on": false }, + "owner": { + "rid": "7cee478d-6455-483a-9e32-9f9fdcbcc4f6", + "rtype": "zone" + }, "type": "grouped_light" }, { diff --git a/tests/components/hue/test_binary_sensor.py b/tests/components/hue/test_binary_sensor.py index 8f299a4b6a6..3721637a674 100644 --- a/tests/components/hue/test_binary_sensor.py +++ b/tests/components/hue/test_binary_sensor.py @@ -1,13 +1,16 @@ """Philips Hue binary_sensor platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_BINARY_SENSOR, FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY async def test_binary_sensors( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test if all v2 binary_sensors get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -78,7 +81,9 @@ async def test_binary_sensors( assert sensor.attributes["device_class"] == "motion" -async def test_binary_sensor_add_update(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_binary_sensor_add_update( + hass: HomeAssistant, mock_bridge_v2: Mock +) -> None: """Test if binary_sensor get added/updated from events.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) await setup_platform(hass, mock_bridge_v2, "binary_sensor") diff --git a/tests/components/hue/test_bridge.py b/tests/components/hue/test_bridge.py index 42631215035..be7a6738617 100644 --- a/tests/components/hue/test_bridge.py +++ b/tests/components/hue/test_bridge.py @@ -1,7 +1,7 @@ """Test Hue bridge.""" import asyncio -from unittest.mock import patch +from unittest.mock import Mock, patch from aiohttp import client_exceptions from aiohue.errors import Unauthorized @@ -21,7 +21,7 @@ from homeassistant.exceptions import ConfigEntryNotReady from tests.common import MockConfigEntry -async def test_bridge_setup_v1(hass: HomeAssistant, mock_api_v1) -> None: +async def test_bridge_setup_v1(hass: HomeAssistant, mock_api_v1: Mock) -> None: """Test a successful setup for V1 bridge.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -45,7 +45,7 @@ async def test_bridge_setup_v1(hass: HomeAssistant, mock_api_v1) -> None: assert forward_entries == {"light", "binary_sensor", "sensor"} -async def test_bridge_setup_v2(hass: HomeAssistant, mock_api_v2) -> None: +async def test_bridge_setup_v2(hass: HomeAssistant, mock_api_v2: Mock) -> None: """Test a successful setup for V2 bridge.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -113,7 +113,9 @@ async def test_bridge_setup_timeout(hass: HomeAssistant) -> None: await hue_bridge.async_initialize_bridge() -async def test_reset_unloads_entry_if_setup(hass: HomeAssistant, mock_api_v1) -> None: +async def test_reset_unloads_entry_if_setup( + hass: HomeAssistant, mock_api_v1: Mock +) -> None: """Test calling reset while the entry has been setup.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -143,7 +145,7 @@ async def test_reset_unloads_entry_if_setup(hass: HomeAssistant, mock_api_v1) -> assert len(hass.services.async_services()) == 0 -async def test_handle_unauthorized(hass: HomeAssistant, mock_api_v1) -> None: +async def test_handle_unauthorized(hass: HomeAssistant, mock_api_v1: Mock) -> None: """Test handling an unauthorized error on update.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/hue/test_device_trigger_v1.py b/tests/components/hue/test_device_trigger_v1.py index 3d8fa64baf4..37af8c6a880 100644 --- a/tests/components/hue/test_device_trigger_v1.py +++ b/tests/components/hue/test_device_trigger_v1.py @@ -1,5 +1,7 @@ """The tests for Philips Hue device triggers for V1 bridge.""" +from unittest.mock import Mock + from pytest_unordered import unordered from homeassistant.components import automation, hue @@ -20,8 +22,8 @@ REMOTES_RESPONSE = {"7": HUE_TAP_REMOTE_1, "8": HUE_DIMMER_REMOTE_1} async def test_get_triggers( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v1, - device_reg: dr.DeviceRegistry, + mock_bridge_v1: Mock, + device_registry: dr.DeviceRegistry, ) -> None: """Test we get the expected triggers from a hue remote.""" mock_bridge_v1.mock_sensor_responses.append(REMOTES_RESPONSE) @@ -32,7 +34,7 @@ async def test_get_triggers( assert len(hass.states.async_all()) == 1 # Get triggers for specific tap switch - hue_tap_device = device_reg.async_get_device( + hue_tap_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:00:00:00:00:44:23:08")} ) triggers = await async_get_device_automations( @@ -53,7 +55,7 @@ async def test_get_triggers( assert triggers == unordered(expected_triggers) # Get triggers for specific dimmer switch - hue_dimmer_device = device_reg.async_get_device( + hue_dimmer_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:17:88:01:10:3e:3a:dc")} ) hue_bat_sensor = entity_registry.async_get( @@ -90,9 +92,9 @@ async def test_get_triggers( async def test_if_fires_on_state_change( hass: HomeAssistant, - mock_bridge_v1, - device_reg: dr.DeviceRegistry, - calls: list[ServiceCall], + mock_bridge_v1: Mock, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for button press trigger firing.""" mock_bridge_v1.mock_sensor_responses.append(REMOTES_RESPONSE) @@ -101,7 +103,7 @@ async def test_if_fires_on_state_change( assert len(hass.states.async_all()) == 1 # Set an automation with a specific tap switch trigger - hue_tap_device = device_reg.async_get_device( + hue_tap_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:00:00:00:00:44:23:08")} ) assert await async_setup_component( @@ -158,8 +160,8 @@ async def test_if_fires_on_state_change( assert len(mock_bridge_v1.mock_requests) == 2 - assert len(calls) == 1 - assert calls[0].data["some"] == "B4 - 18" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "B4 - 18" # Fake another button press. new_sensor_response["7"] = dict(new_sensor_response["7"]) @@ -173,4 +175,4 @@ async def test_if_fires_on_state_change( await mock_bridge_v1.sensor_manager.coordinator.async_refresh() await hass.async_block_till_done() assert len(mock_bridge_v1.mock_requests) == 3 - assert len(calls) == 1 + assert len(service_calls) == 1 diff --git a/tests/components/hue/test_device_trigger_v2.py b/tests/components/hue/test_device_trigger_v2.py index 0a89b3263c7..1115e63fd92 100644 --- a/tests/components/hue/test_device_trigger_v2.py +++ b/tests/components/hue/test_device_trigger_v2.py @@ -1,5 +1,7 @@ """The tests for Philips Hue device triggers for V2 bridge.""" +from unittest.mock import Mock + from aiohue.v2.models.button import ButtonEvent from pytest_unordered import unordered @@ -8,7 +10,8 @@ from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.hue.v2.device import async_setup_devices from homeassistant.components.hue.v2.hue_event import async_setup_hue_events from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform @@ -16,7 +19,7 @@ from tests.common import async_capture_events, async_get_device_automations async def test_hue_event( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test hue button events.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -28,7 +31,12 @@ async def test_hue_event( # Emit button update event btn_event = { - "button": {"last_event": "initial_press"}, + "button": { + "button_report": { + "event": "initial_press", + "updated": "2021-10-01T12:00:00Z", + } + }, "id": "c658d3d8-a013-4b81-8ac6-78b248537e70", "metadata": {"control_id": 1}, "type": "button", @@ -41,23 +49,23 @@ async def test_hue_event( assert len(events) == 1 assert events[0].data["id"] == "wall_switch_with_2_controls_button" assert events[0].data["unique_id"] == btn_event["id"] - assert events[0].data["type"] == btn_event["button"]["last_event"] + assert events[0].data["type"] == btn_event["button"]["button_report"]["event"] assert events[0].data["subtype"] == btn_event["metadata"]["control_id"] async def test_get_triggers( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, - device_reg, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, + device_registry: dr.DeviceRegistry, ) -> None: """Test we get the expected triggers from a hue remote.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) await setup_platform(hass, mock_bridge_v2, ["binary_sensor", "sensor"]) # Get triggers for `Wall switch with 2 controls` - hue_wall_switch_device = device_reg.async_get_device( + hue_wall_switch_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "3ff06175-29e8-44a8-8fe7-af591b0025da")} ) hue_bat_sensor = entity_registry.async_get( diff --git a/tests/components/hue/test_diagnostics.py b/tests/components/hue/test_diagnostics.py index 7e64ba1ad93..49681601ebf 100644 --- a/tests/components/hue/test_diagnostics.py +++ b/tests/components/hue/test_diagnostics.py @@ -1,5 +1,7 @@ """Test Hue diagnostics.""" +from unittest.mock import Mock + from homeassistant.core import HomeAssistant from .conftest import setup_platform @@ -9,7 +11,7 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics_v1( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v1 + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v1: Mock ) -> None: """Test diagnostics v1.""" await setup_platform(hass, mock_bridge_v1, []) @@ -19,7 +21,7 @@ async def test_diagnostics_v1( async def test_diagnostics_v2( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v2 + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v2: Mock ) -> None: """Test diagnostics v2.""" mock_bridge_v2.api.get_diagnostics.return_value = {"hello": "world"} diff --git a/tests/components/hue/test_event.py b/tests/components/hue/test_event.py index aedf11a6e82..33b4d16f8be 100644 --- a/tests/components/hue/test_event.py +++ b/tests/components/hue/test_event.py @@ -1,14 +1,17 @@ """Philips Hue Event platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.components.event import ATTR_EVENT_TYPE, ATTR_EVENT_TYPES from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_DEVICE, FAKE_ROTARY, FAKE_ZIGBEE_CONNECTIVITY async def test_event( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test event entity for Hue integration.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -63,7 +66,7 @@ async def test_event( assert state.attributes[ATTR_EVENT_TYPE] == "long_release" -async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: """Test Event entity for newly added Relative Rotary resource.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) await setup_platform(hass, mock_bridge_v2, "event") diff --git a/tests/components/hue/test_light_v1.py b/tests/components/hue/test_light_v1.py index 21b35e6d5e8..c742124e4f0 100644 --- a/tests/components/hue/test_light_v1.py +++ b/tests/components/hue/test_light_v1.py @@ -175,7 +175,7 @@ LIGHT_GAMUT = color.GamutType( LIGHT_GAMUT_TYPE = "A" -async def setup_bridge(hass: HomeAssistant, mock_bridge_v1): +async def setup_bridge(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Load the Hue light platform with the provided bridge.""" hass.config.components.add(hue.DOMAIN) config_entry = create_config_entry() @@ -192,7 +192,7 @@ async def setup_bridge(hass: HomeAssistant, mock_bridge_v1): async def test_not_load_groups_if_old_bridge( - hass: HomeAssistant, mock_bridge_v1 + hass: HomeAssistant, mock_bridge_v1: Mock ) -> None: """Test that we don't try to load groups if bridge runs old software.""" mock_bridge_v1.api.config.apiversion = "1.12.0" @@ -203,7 +203,7 @@ async def test_not_load_groups_if_old_bridge( assert len(hass.states.async_all()) == 0 -async def test_no_lights_or_groups(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_no_lights_or_groups(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test the update_lights function when no lights are found.""" mock_bridge_v1.mock_light_responses.append({}) mock_bridge_v1.mock_group_responses.append({}) @@ -212,7 +212,7 @@ async def test_no_lights_or_groups(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(hass.states.async_all()) == 0 -async def test_lights(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_lights(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test the update_lights function with some lights.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -232,7 +232,7 @@ async def test_lights(hass: HomeAssistant, mock_bridge_v1) -> None: assert lamp_2.state == "off" -async def test_lights_color_mode(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_lights_color_mode(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test that lights only report appropriate color mode.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) mock_bridge_v1.mock_group_responses.append(GROUP_RESPONSE) @@ -278,7 +278,7 @@ async def test_lights_color_mode(hass: HomeAssistant, mock_bridge_v1) -> None: async def test_groups( - hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1 + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1: Mock ) -> None: """Test the update_lights function with some lights.""" mock_bridge_v1.mock_light_responses.append({}) @@ -303,7 +303,7 @@ async def test_groups( assert entity_registry.async_get("light.group_2").unique_id == "2" -async def test_new_group_discovered(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_new_group_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has a new group.""" mock_bridge_v1.allow_groups = True mock_bridge_v1.mock_light_responses.append({}) @@ -350,7 +350,7 @@ async def test_new_group_discovered(hass: HomeAssistant, mock_bridge_v1) -> None assert new_group.attributes["color_temp"] == 250 -async def test_new_light_discovered(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_new_light_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has a new light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -396,7 +396,7 @@ async def test_new_light_discovered(hass: HomeAssistant, mock_bridge_v1) -> None assert light.state == "off" -async def test_group_removed(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_group_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has removed group.""" mock_bridge_v1.allow_groups = True mock_bridge_v1.mock_light_responses.append({}) @@ -427,7 +427,7 @@ async def test_group_removed(hass: HomeAssistant, mock_bridge_v1) -> None: assert removed_group is None -async def test_light_removed(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_light_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has removed light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -456,7 +456,7 @@ async def test_light_removed(hass: HomeAssistant, mock_bridge_v1) -> None: assert removed_light is None -async def test_other_group_update(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_other_group_update(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test changing one group that will impact the state of other light.""" mock_bridge_v1.allow_groups = True mock_bridge_v1.mock_light_responses.append({}) @@ -509,7 +509,7 @@ async def test_other_group_update(hass: HomeAssistant, mock_bridge_v1) -> None: assert group_2.state == "off" -async def test_other_light_update(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_other_light_update(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test changing one light that will impact state of other light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -562,7 +562,7 @@ async def test_other_light_update(hass: HomeAssistant, mock_bridge_v1) -> None: assert lamp_2.attributes["brightness"] == 100 -async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test bridge marked as not available if timeout error during update.""" mock_bridge_v1.api.lights.update = Mock(side_effect=TimeoutError) mock_bridge_v1.api.groups.update = Mock(side_effect=TimeoutError) @@ -571,7 +571,7 @@ async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(hass.states.async_all()) == 0 -async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test bridge marked as not authorized if unauthorized during update.""" mock_bridge_v1.api.lights.update = Mock(side_effect=aiohue.Unauthorized) await setup_bridge(hass, mock_bridge_v1) @@ -580,7 +580,7 @@ async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(mock_bridge_v1.handle_unauthorized_error.mock_calls) == 1 -async def test_light_turn_on_service(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_light_turn_on_service(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test calling the turn on service on a light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -633,7 +633,9 @@ async def test_light_turn_on_service(hass: HomeAssistant, mock_bridge_v1) -> Non } -async def test_light_turn_off_service(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_light_turn_off_service( + hass: HomeAssistant, mock_bridge_v1: Mock +) -> None: """Test calling the turn on service on a light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -775,7 +777,7 @@ async def test_group_features( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - mock_bridge_v1, + mock_bridge_v1: Mock, ) -> None: """Test group features.""" color_temp_type = "Color temperature light" diff --git a/tests/components/hue/test_light_v2.py b/tests/components/hue/test_light_v2.py index fca907eabb0..417670a3769 100644 --- a/tests/components/hue/test_light_v2.py +++ b/tests/components/hue/test_light_v2.py @@ -1,15 +1,18 @@ """Philips Hue lights platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.components.light import ColorMode from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_DEVICE, FAKE_LIGHT, FAKE_ZIGBEE_CONNECTIVITY async def test_lights( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test if all v2 lights get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -77,7 +80,7 @@ async def test_lights( async def test_light_turn_on_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn on service on a light.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -229,7 +232,7 @@ async def test_light_turn_on_service( async def test_light_turn_off_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn off service on a light.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -318,7 +321,7 @@ async def test_light_turn_off_service( assert mock_bridge_v2.mock_requests[4]["json"]["identify"]["action"] == "identify" -async def test_light_added(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_light_added(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: """Test new light added to bridge.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) @@ -341,7 +344,7 @@ async def test_light_added(hass: HomeAssistant, mock_bridge_v2) -> None: async def test_light_availability( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test light availability property.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -375,8 +378,8 @@ async def test_light_availability( async def test_grouped_lights( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if all v2 grouped lights get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) diff --git a/tests/components/hue/test_migration.py b/tests/components/hue/test_migration.py index adcc582a314..388e2f68f99 100644 --- a/tests/components/hue/test_migration.py +++ b/tests/components/hue/test_migration.py @@ -1,10 +1,11 @@ """Test Hue migration logic.""" -from unittest.mock import patch +from unittest.mock import Mock, patch from homeassistant.components import hue from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util.json import JsonArrayType from tests.common import MockConfigEntry @@ -51,9 +52,9 @@ async def test_light_entity_migration( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - mock_bridge_v2, - mock_config_entry_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + mock_config_entry_v2: MockConfigEntry, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if entity schema for lights migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 @@ -98,9 +99,9 @@ async def test_sensor_entity_migration( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - mock_bridge_v2, - mock_config_entry_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + mock_config_entry_v2: MockConfigEntry, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if entity schema for sensors migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 @@ -159,9 +160,9 @@ async def test_sensor_entity_migration( async def test_group_entity_migration_with_v1_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - mock_config_entry_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + mock_config_entry_v2: MockConfigEntry, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if entity schema for grouped_lights migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 @@ -194,9 +195,9 @@ async def test_group_entity_migration_with_v1_id( async def test_group_entity_migration_with_v2_group_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - mock_config_entry_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + mock_config_entry_v2: MockConfigEntry, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if entity schema for grouped_lights migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 diff --git a/tests/components/hue/test_scene.py b/tests/components/hue/test_scene.py index 5e2fd939087..9488e0e14ce 100644 --- a/tests/components/hue/test_scene.py +++ b/tests/components/hue/test_scene.py @@ -1,8 +1,11 @@ """Philips Hue scene platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_SCENE @@ -11,8 +14,8 @@ from .const import FAKE_SCENE async def test_scene( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if (config) scenes get created.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -32,7 +35,7 @@ async def test_scene( assert test_entity.attributes["group_type"] == "zone" assert test_entity.attributes["name"] == "Dynamic Test Scene" assert test_entity.attributes["speed"] == 0.6269841194152832 - assert test_entity.attributes["brightness"] == 46.85 + assert test_entity.attributes["brightness"] == 119 assert test_entity.attributes["is_dynamic"] is True # test (regular) scene for a hue room @@ -44,7 +47,7 @@ async def test_scene( assert test_entity.attributes["group_type"] == "room" assert test_entity.attributes["name"] == "Regular Test Scene" assert test_entity.attributes["speed"] == 0.5 - assert test_entity.attributes["brightness"] == 100.0 + assert test_entity.attributes["brightness"] == 255 assert test_entity.attributes["is_dynamic"] is False # test smart scene @@ -72,7 +75,7 @@ async def test_scene( async def test_scene_turn_on_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn on service on a scene.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -109,7 +112,7 @@ async def test_scene_turn_on_service( async def test_scene_advanced_turn_on_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the advanced turn on service on a scene.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -146,7 +149,7 @@ async def test_scene_advanced_turn_on_service( async def test_scene_updates( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test scene events from bridge.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -167,7 +170,7 @@ async def test_scene_updates( assert test_entity is not None assert test_entity.state == STATE_UNKNOWN assert test_entity.name == "Test Room Mocked Scene" - assert test_entity.attributes["brightness"] == 65.0 + assert test_entity.attributes["brightness"] == 166 # test update updated_resource = {**FAKE_SCENE} @@ -176,7 +179,7 @@ async def test_scene_updates( await hass.async_block_till_done() test_entity = hass.states.get(test_entity_id) assert test_entity is not None - assert test_entity.attributes["brightness"] == 35.0 + assert test_entity.attributes["brightness"] == 89 # # test entity name changes on group name change mock_bridge_v2.api.emit_event( diff --git a/tests/components/hue/test_sensor_v1.py b/tests/components/hue/test_sensor_v1.py index b1ef94f8ed0..0c5d7cccfe2 100644 --- a/tests/components/hue/test_sensor_v1.py +++ b/tests/components/hue/test_sensor_v1.py @@ -10,7 +10,7 @@ from homeassistant.components.hue.const import ATTR_HUE_EVENT from homeassistant.components.hue.v1 import sensor_base from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from .conftest import create_mock_bridge, setup_platform @@ -282,7 +282,7 @@ SENSOR_RESPONSE = { } -async def test_no_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_no_sensors(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test the update_items function when no sensors are found.""" mock_bridge_v1.mock_sensor_responses.append({}) await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"]) @@ -291,7 +291,7 @@ async def test_no_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: async def test_sensors_with_multiple_bridges( - hass: HomeAssistant, mock_bridge_v1 + hass: HomeAssistant, mock_bridge_v1: Mock ) -> None: """Test the update_items function with some sensors.""" mock_bridge_2 = create_mock_bridge(hass, api_version=1) @@ -315,7 +315,7 @@ async def test_sensors_with_multiple_bridges( async def test_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1 + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1: Mock ) -> None: """Test the update_items function with some sensors.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -361,7 +361,7 @@ async def test_sensors( ) -async def test_unsupported_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_unsupported_sensors(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test that unsupported sensors don't get added and don't fail.""" response_with_unsupported = dict(SENSOR_RESPONSE) response_with_unsupported["7"] = UNSUPPORTED_SENSOR @@ -372,7 +372,7 @@ async def test_unsupported_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(hass.states.async_all()) == 7 -async def test_new_sensor_discovered(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_new_sensor_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has a new sensor.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -406,7 +406,7 @@ async def test_new_sensor_discovered(hass: HomeAssistant, mock_bridge_v1) -> Non assert temperature.state == "17.75" -async def test_sensor_removed(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_sensor_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has removed sensor.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -434,7 +434,7 @@ async def test_sensor_removed(hass: HomeAssistant, mock_bridge_v1) -> None: assert removed_sensor is None -async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test bridge marked as not available if timeout error during update.""" mock_bridge_v1.api.sensors.update = Mock(side_effect=TimeoutError) await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"]) @@ -442,7 +442,7 @@ async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(hass.states.async_all()) == 0 -async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test bridge marked as not authorized if unauthorized during update.""" mock_bridge_v1.api.sensors.update = Mock(side_effect=aiohue.Unauthorized) await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"]) @@ -452,7 +452,10 @@ async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: async def test_hue_events( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_bridge_v1, device_reg + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_bridge_v1: Mock, + device_registry: dr.DeviceRegistry, ) -> None: """Test that hue remotes fire events when pressed.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -464,7 +467,7 @@ async def test_hue_events( assert len(hass.states.async_all()) == 7 assert len(events) == 0 - hue_tap_device = device_reg.async_get_device( + hue_tap_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:00:00:00:00:44:23:08")} ) @@ -495,7 +498,7 @@ async def test_hue_events( "last_updated": "2019-12-28T22:58:03", } - hue_dimmer_device = device_reg.async_get_device( + hue_dimmer_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:17:88:01:10:3e:3a:dc")} ) @@ -594,7 +597,7 @@ async def test_hue_events( async_fire_time_changed(hass) await hass.async_block_till_done() - hue_aurora_device = device_reg.async_get_device( + hue_aurora_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "ff:ff:00:0f:e7:fd:bc:b7")} ) diff --git a/tests/components/hue/test_sensor_v2.py b/tests/components/hue/test_sensor_v2.py index beb86de505b..22888a411ba 100644 --- a/tests/components/hue/test_sensor_v2.py +++ b/tests/components/hue/test_sensor_v2.py @@ -1,19 +1,24 @@ """Philips Hue sensor platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.components import hue from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonArrayType from .conftest import setup_bridge, setup_platform from .const import FAKE_DEVICE, FAKE_SENSOR, FAKE_ZIGBEE_CONNECTIVITY +from tests.common import MockConfigEntry + async def test_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if all v2 sensors get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -65,9 +70,9 @@ async def test_sensors( async def test_enable_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, - mock_config_entry_v2, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, + mock_config_entry_v2: MockConfigEntry, ) -> None: """Test enabling of the by default disabled zigbee_connectivity sensor.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -105,7 +110,7 @@ async def test_enable_sensor( assert state.attributes["mac_address"] == "00:17:88:01:0b:aa:bb:99" -async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: """Test if sensors get added/updated from events.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) await setup_platform(hass, mock_bridge_v2, "sensor") diff --git a/tests/components/hue/test_services.py b/tests/components/hue/test_services.py index 6ce3cf2cc82..26a4cab8261 100644 --- a/tests/components/hue/test_services.py +++ b/tests/components/hue/test_services.py @@ -1,6 +1,6 @@ """Test Hue services.""" -from unittest.mock import patch +from unittest.mock import Mock, patch from homeassistant.components import hue from homeassistant.components.hue import bridge @@ -48,7 +48,7 @@ SCENE_RESPONSE = { } -async def test_hue_activate_scene(hass: HomeAssistant, mock_api_v1) -> None: +async def test_hue_activate_scene(hass: HomeAssistant, mock_api_v1: Mock) -> None: """Test successful hue_activate_scene.""" config_entry = MockConfigEntry( domain=hue.DOMAIN, @@ -83,7 +83,9 @@ async def test_hue_activate_scene(hass: HomeAssistant, mock_api_v1) -> None: assert mock_api_v1.mock_requests[2]["path"] == "groups/group_1/action" -async def test_hue_activate_scene_transition(hass: HomeAssistant, mock_api_v1) -> None: +async def test_hue_activate_scene_transition( + hass: HomeAssistant, mock_api_v1: Mock +) -> None: """Test successful hue_activate_scene with transition.""" config_entry = MockConfigEntry( domain=hue.DOMAIN, @@ -119,7 +121,7 @@ async def test_hue_activate_scene_transition(hass: HomeAssistant, mock_api_v1) - async def test_hue_activate_scene_group_not_found( - hass: HomeAssistant, mock_api_v1 + hass: HomeAssistant, mock_api_v1: Mock ) -> None: """Test failed hue_activate_scene due to missing group.""" config_entry = MockConfigEntry( @@ -151,7 +153,7 @@ async def test_hue_activate_scene_group_not_found( async def test_hue_activate_scene_scene_not_found( - hass: HomeAssistant, mock_api_v1 + hass: HomeAssistant, mock_api_v1: Mock ) -> None: """Test failed hue_activate_scene due to missing scene.""" config_entry = MockConfigEntry( @@ -184,10 +186,10 @@ async def test_hue_activate_scene_scene_not_found( async def test_hue_multi_bridge_activate_scene_all_respond( hass: HomeAssistant, - mock_bridge_v1, - mock_bridge_v2, - mock_config_entry_v1, - mock_config_entry_v2, + mock_bridge_v1: Mock, + mock_bridge_v2: Mock, + mock_config_entry_v1: MockConfigEntry, + mock_config_entry_v2: MockConfigEntry, ) -> None: """Test that makes multiple bridges successfully activate a scene.""" await setup_component(hass) @@ -218,10 +220,10 @@ async def test_hue_multi_bridge_activate_scene_all_respond( async def test_hue_multi_bridge_activate_scene_one_responds( hass: HomeAssistant, - mock_bridge_v1, - mock_bridge_v2, - mock_config_entry_v1, - mock_config_entry_v2, + mock_bridge_v1: Mock, + mock_bridge_v2: Mock, + mock_config_entry_v1: MockConfigEntry, + mock_config_entry_v2: MockConfigEntry, ) -> None: """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) @@ -251,10 +253,10 @@ async def test_hue_multi_bridge_activate_scene_one_responds( async def test_hue_multi_bridge_activate_scene_zero_responds( hass: HomeAssistant, - mock_bridge_v1, - mock_bridge_v2, - mock_config_entry_v1, - mock_config_entry_v2, + mock_bridge_v1: Mock, + mock_bridge_v2: Mock, + mock_config_entry_v1: MockConfigEntry, + mock_config_entry_v2: MockConfigEntry, ) -> None: """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) diff --git a/tests/components/hue/test_switch.py b/tests/components/hue/test_switch.py index 2e25dd715c1..478acbaa303 100644 --- a/tests/components/hue/test_switch.py +++ b/tests/components/hue/test_switch.py @@ -1,13 +1,16 @@ """Philips Hue switch platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_BINARY_SENSOR, FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY async def test_switch( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test if (config) switches get created.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -34,7 +37,7 @@ async def test_switch( async def test_switch_turn_on_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn on service on a switch.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -58,7 +61,7 @@ async def test_switch_turn_on_service( async def test_switch_turn_off_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn off service on a switch.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -98,7 +101,7 @@ async def test_switch_turn_off_service( assert test_entity.state == "off" -async def test_switch_added(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_switch_added(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: """Test new switch added to bridge.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) diff --git a/tests/components/humidifier/test_device_condition.py b/tests/components/humidifier/test_device_condition.py index 4f4d21adcba..ec8406bfe7b 100644 --- a/tests/components/humidifier/test_device_condition.py +++ b/tests/components/humidifier/test_device_condition.py @@ -17,11 +17,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -29,12 +25,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_condition_types"), [ @@ -153,7 +143,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -238,42 +228,42 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" hass.states.async_set(entry.entity_id, STATE_ON, {ATTR_MODE: const.MODE_AWAY}) hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_mode - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_mode - event - test_event3" hass.states.async_set(entry.entity_id, STATE_ON, {ATTR_MODE: const.MODE_HOME}) # Should not fire hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -316,15 +306,15 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_ON, {ATTR_MODE: const.MODE_AWAY}) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_mode - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_mode - event - test_event1" @pytest.mark.parametrize( diff --git a/tests/components/humidifier/test_device_trigger.py b/tests/components/humidifier/test_device_trigger.py index 83202e16675..3bb1f8c2551 100644 --- a/tests/components/humidifier/test_device_trigger.py +++ b/tests/components/humidifier/test_device_trigger.py @@ -30,7 +30,6 @@ from tests.common import ( MockConfigEntry, async_fire_time_changed, async_get_device_automations, - async_mock_service, ) @@ -39,12 +38,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -166,7 +159,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -356,8 +349,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 7, const.ATTR_CURRENT_HUMIDITY: 35}, ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "target_humidity_changed_below" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "target_humidity_changed_below" # Fake that the current humidity is changing hass.states.async_set( @@ -366,8 +359,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 7, const.ATTR_CURRENT_HUMIDITY: 18}, ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "current_humidity_changed_below" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "current_humidity_changed_below" # Fake that the humidity target is changing hass.states.async_set( @@ -376,8 +369,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 18}, ) await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "target_humidity_changed_above" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "target_humidity_changed_above" # Fake that the current humidity is changing hass.states.async_set( @@ -386,14 +379,14 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 41}, ) await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "current_humidity_changed_above" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "current_humidity_changed_above" # Wait 6 minutes async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(minutes=6)) await hass.async_block_till_done() - assert len(calls) == 6 - assert {calls[4].data["some"], calls[5].data["some"]} == { + assert len(service_calls) == 6 + assert {service_calls[4].data["some"], service_calls[5].data["some"]} == { "current_humidity_changed_above_for", "target_humidity_changed_above_for", } @@ -405,8 +398,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 41}, ) await hass.async_block_till_done() - assert len(calls) == 8 - assert {calls[6].data["some"], calls[7].data["some"]} == { + assert len(service_calls) == 8 + assert {service_calls[6].data["some"], service_calls[7].data["some"]} == { "turn_off device - humidifier.test_5678 - on - off - None", "turn_on_or_off device - humidifier.test_5678 - on - off - None", } @@ -418,8 +411,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 41}, ) await hass.async_block_till_done() - assert len(calls) == 10 - assert {calls[8].data["some"], calls[9].data["some"]} == { + assert len(service_calls) == 10 + assert {service_calls[8].data["some"], service_calls[9].data["some"]} == { "turn_on device - humidifier.test_5678 - off - on - None", "turn_on_or_off device - humidifier.test_5678 - off - on - None", } @@ -429,7 +422,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -479,12 +472,14 @@ async def test_if_fires_on_state_change_legacy( # Fake that the humidity is changing hass.states.async_set(entry.entity_id, STATE_ON, {const.ATTR_HUMIDITY: 7}) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "target_humidity_changed_below" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "target_humidity_changed_below" async def test_invalid_config( - hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" entry = entity_registry.async_get_or_create(DOMAIN, "test", "5678") @@ -528,7 +523,7 @@ async def test_invalid_config( hass.states.async_set(entry.entity_id, STATE_ON, {const.ATTR_HUMIDITY: 7}) await hass.async_block_till_done() # Should not trigger for invalid config - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_get_trigger_capabilities_on(hass: HomeAssistant) -> None: diff --git a/tests/components/humidifier/test_init.py b/tests/components/humidifier/test_init.py index b90e7084dd1..b31750a3a3b 100644 --- a/tests/components/humidifier/test_init.py +++ b/tests/components/humidifier/test_init.py @@ -48,7 +48,7 @@ async def test_sync_turn_off(hass: HomeAssistant) -> None: assert humidifier.turn_off.called -def _create_tuples(enum: Enum, constant_prefix: str) -> list[tuple[Enum, str]]: +def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: return [(enum_field, constant_prefix) for enum_field in enum] diff --git a/tests/components/hunterdouglas_powerview/conftest.py b/tests/components/hunterdouglas_powerview/conftest.py index da339914aac..d4433f93dcb 100644 --- a/tests/components/hunterdouglas_powerview/conftest.py +++ b/tests/components/hunterdouglas_powerview/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for Hunter Douglas Powerview tests.""" -from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, PropertyMock, patch from aiopvapi.resources.shade import ShadePosition import pytest -from typing_extensions import Generator from homeassistant.components.hunterdouglas_powerview.const import DOMAIN @@ -29,7 +29,7 @@ def mock_hunterdouglas_hub( rooms_json: str, scenes_json: str, shades_json: str, -) -> Generator[MagicMock]: +) -> Generator[None]: """Return a mocked Powerview Hub with all data populated.""" with ( patch( diff --git a/tests/components/hunterdouglas_powerview/test_scene.py b/tests/components/hunterdouglas_powerview/test_scene.py index 9628805d0e8..43074d55470 100644 --- a/tests/components/hunterdouglas_powerview/test_scene.py +++ b/tests/components/hunterdouglas_powerview/test_scene.py @@ -14,10 +14,10 @@ from .const import MOCK_MAC from tests.common import MockConfigEntry +@pytest.mark.usefixtures("mock_hunterdouglas_hub") @pytest.mark.parametrize("api_version", [1, 2, 3]) async def test_scenes( hass: HomeAssistant, - mock_hunterdouglas_hub: None, api_version: int, ) -> None: """Test the scenes.""" diff --git a/tests/components/husqvarna_automower/__init__.py b/tests/components/husqvarna_automower/__init__.py index 8c51d69ba3d..9473b68a5ed 100644 --- a/tests/components/husqvarna_automower/__init__.py +++ b/tests/components/husqvarna_automower/__init__.py @@ -7,6 +7,10 @@ from tests.common import MockConfigEntry async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Fixture for setting up the component.""" + # We lock the timezone, because the timezone is passed to the library to generate + # some values like the next start sensor. This is needed, as the device is not aware + # of its own timezone. So we assume the device is in the timezone which is selected in + # the Home Assistant config. + await hass.config.async_set_time_zone("Europe/Berlin") config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/husqvarna_automower/conftest.py b/tests/components/husqvarna_automower/conftest.py index 7ace3b76808..dbb8f3b4c72 100644 --- a/tests/components/husqvarna_automower/conftest.py +++ b/tests/components/husqvarna_automower/conftest.py @@ -1,5 +1,6 @@ """Test helpers for Husqvarna Automower.""" +from collections.abc import Generator import time from unittest.mock import AsyncMock, patch @@ -7,7 +8,6 @@ from aioautomower.session import AutomowerSession, _MowerCommands from aioautomower.utils import mower_list_to_dictionary_dataclass from aiohttp import ClientWebSocketResponse import pytest -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/husqvarna_automower/fixtures/mower.json b/tests/components/husqvarna_automower/fixtures/mower.json index a5cae68f47c..aa8ea2cbef4 100644 --- a/tests/components/husqvarna_automower/fixtures/mower.json +++ b/tests/components/husqvarna_automower/fixtures/mower.json @@ -13,6 +13,7 @@ "batteryPercent": 100 }, "capabilities": { + "canConfirmError": true, "headlights": true, "workAreas": true, "position": true, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index d8cd748c793..3838f2eb960 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -7,18 +7,22 @@ 'calendar': dict({ 'events': list([ dict({ - 'end': '2024-03-02T00:00:00+00:00', + 'end': '2024-03-02T00:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=MO,WE,FR', - 'start': '2024-03-01T19:00:00+00:00', + 'schedule_no': 1, + 'start': '2024-03-01T19:00:00', 'uid': '1140_300_MO,WE,FR', 'work_area_id': None, + 'work_area_name': None, }), dict({ - 'end': '2024-03-02T08:00:00+00:00', + 'end': '2024-03-02T08:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=TU,TH,SA', - 'start': '2024-03-02T00:00:00+00:00', + 'schedule_no': 2, + 'start': '2024-03-02T00:00:00', 'uid': '0_480_TU,TH,SA', 'work_area_id': None, + 'work_area_name': None, }), ]), 'tasks': list([ @@ -33,6 +37,7 @@ 'tuesday': False, 'wednesday': True, 'work_area_id': None, + 'work_area_name': None, }), dict({ 'duration': 480, @@ -45,10 +50,12 @@ 'tuesday': True, 'wednesday': False, 'work_area_id': None, + 'work_area_name': None, }), ]), }), 'capabilities': dict({ + 'can_confirm_error': True, 'headlights': True, 'position': True, 'stay_out_zones': True, @@ -61,17 +68,18 @@ 'mower': dict({ 'activity': 'PARKED_IN_CS', 'error_code': 0, - 'error_datetime': None, 'error_datetime_naive': None, 'error_key': None, + 'error_timestamp': 0, 'inactive_reason': 'NONE', 'is_error_confirmable': False, 'mode': 'MAIN_AREA', 'state': 'RESTRICTED', 'work_area_id': 123456, + 'work_area_name': 'Front lawn', }), 'planner': dict({ - 'next_start_datetime': '2023-06-05T19:00:00+00:00', + 'next_start': 1685991600000, 'next_start_datetime_naive': '2023-06-05T19:00:00', 'override': dict({ 'action': 'NOT_ACTIVE', @@ -113,6 +121,17 @@ 'name': 'Test Mower 1', 'serial_number': 123, }), + 'work_area_dict': dict({ + '0': 'my_lawn', + '123456': 'Front lawn', + '654321': 'Back lawn', + }), + 'work_area_names': list([ + 'Front lawn', + 'Back lawn', + 'my_lawn', + 'no_work_area_active', + ]), 'work_areas': dict({ '0': dict({ 'cutting_height': 50, diff --git a/tests/components/husqvarna_automower/snapshots/test_init.ambr b/tests/components/husqvarna_automower/snapshots/test_init.ambr index c3a7191b4b9..ccfb1bf3df4 100644 --- a/tests/components/husqvarna_automower/snapshots/test_init.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': 'Husqvarna', 'model': '450XH-TEST', + 'model_id': None, 'name': 'Test Mower 1', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 123, 'suggested_area': 'Garden', 'sw_version': None, diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index 0b0d76620d3..c260e6beba6 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -548,65 +548,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2023-06-05T19:00:00+00:00', - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Front lawn', - 'Back lawn', - 'my_lawn', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_1_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'work_area', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_work_area', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Mower 1 None', - 'options': list([ - 'Front lawn', - 'Back lawn', - 'my_lawn', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_mower_1_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Front lawn', + 'state': '2023-06-05T17:00:00+00:00', }) # --- # name: test_sensor_snapshot[sensor.test_mower_1_number_of_charging_cycles-entry] @@ -1059,6 +1001,7 @@ 'Front lawn', 'Back lawn', 'my_lawn', + 'no_work_area_active', ]), }), 'config_entry_id': , @@ -1097,7 +1040,13 @@ 'Front lawn', 'Back lawn', 'my_lawn', + 'no_work_area_active', ]), + 'work_area_id_assignment': dict({ + 0: 'my_lawn', + 123456: 'Front lawn', + 654321: 'Back lawn', + }), }), 'context': , 'entity_id': 'sensor.test_mower_1_work_area', diff --git a/tests/components/husqvarna_automower/test_button.py b/tests/components/husqvarna_automower/test_button.py index 6cc465df74b..5cbb9b893a8 100644 --- a/tests/components/husqvarna_automower/test_button.py +++ b/tests/components/husqvarna_automower/test_button.py @@ -34,7 +34,6 @@ from tests.common import ( @pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_button_states_and_commands( hass: HomeAssistant, mock_automower_client: AsyncMock, diff --git a/tests/components/husqvarna_automower/test_diagnostics.py b/tests/components/husqvarna_automower/test_diagnostics.py index eeb6b46e6c4..3166b09f1ee 100644 --- a/tests/components/husqvarna_automower/test_diagnostics.py +++ b/tests/components/husqvarna_automower/test_diagnostics.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock import pytest from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.core import HomeAssistant @@ -36,7 +37,7 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) @pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) diff --git a/tests/components/husqvarna_automower/test_lawn_mower.py b/tests/components/husqvarna_automower/test_lawn_mower.py index 5d5cacfc6bf..2ae427e0e1e 100644 --- a/tests/components/husqvarna_automower/test_lawn_mower.py +++ b/tests/components/husqvarna_automower/test_lawn_mower.py @@ -13,7 +13,7 @@ from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.components.lawn_mower import LawnMowerActivity from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from . import setup_integration from .const import TEST_MOWER_ID @@ -122,7 +122,7 @@ async def test_lawn_mower_commands( async def test_lawn_mower_service_commands( hass: HomeAssistant, aioautomower_command: str, - extra_data: int | None, + extra_data: timedelta, service: str, service_data: dict[str, int] | None, mock_automower_client: AsyncMock, @@ -158,27 +158,112 @@ async def test_lawn_mower_service_commands( @pytest.mark.parametrize( - ("service", "service_data"), + ("aioautomower_command", "extra_data1", "extra_data2", "service", "service_data"), [ ( - "override_schedule", + "start_in_workarea", + 123456, + timedelta(days=40), + "override_schedule_work_area", { - "duration": {"days": 1, "hours": 12, "minutes": 30}, - "override_mode": "fly_to_moon", + "work_area_id": 123456, + "duration": {"days": 40}, }, ), ], ) -async def test_lawn_mower_wrong_service_commands( +async def test_lawn_mower_override_work_area_command( hass: HomeAssistant, + aioautomower_command: str, + extra_data1: int, + extra_data2: timedelta, service: str, service_data: dict[str, int] | None, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test lawn_mower commands.""" + """Test lawn_mower work area override commands.""" await setup_integration(hass, mock_config_entry) - with pytest.raises(MultipleInvalid): + mocked_method = AsyncMock() + setattr(mock_automower_client.commands, aioautomower_command, mocked_method) + await hass.services.async_call( + domain=DOMAIN, + service=service, + target={"entity_id": "lawn_mower.test_mower_1"}, + service_data=service_data, + blocking=True, + ) + mocked_method.assert_called_once_with(TEST_MOWER_ID, extra_data1, extra_data2) + + getattr( + mock_automower_client.commands, aioautomower_command + ).side_effect = ApiException("Test error") + with pytest.raises( + HomeAssistantError, + match="Failed to send command: Test error", + ): + await hass.services.async_call( + domain=DOMAIN, + service=service, + target={"entity_id": "lawn_mower.test_mower_1"}, + service_data=service_data, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "service_data", "mower_support_wa", "exception"), + [ + ( + "override_schedule", + { + "duration": {"days": 1, "hours": 12, "minutes": 30}, + "override_mode": "fly_to_moon", + }, + False, + MultipleInvalid, + ), + ( + "override_schedule_work_area", + { + "work_area_id": 123456, + "duration": {"days": 40}, + }, + False, + ServiceValidationError, + ), + ( + "override_schedule_work_area", + { + "work_area_id": 12345, + "duration": {"days": 40}, + }, + True, + ServiceValidationError, + ), + ], +) +async def test_lawn_mower_wrong_service_commands( + hass: HomeAssistant, + service: str, + service_data: dict[str, int] | None, + mower_support_wa: bool, + exception, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test lawn_mower commands.""" + await setup_integration(hass, mock_config_entry) + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) + values[TEST_MOWER_ID].capabilities.work_areas = mower_support_wa + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + with pytest.raises(exception): await hass.services.async_call( domain=DOMAIN, service=service, diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index 0547d6a9b2e..9f2f8793bba 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -1,13 +1,18 @@ """Tests for number platform.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException from aioautomower.utils import mower_list_to_dictionary_dataclass +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN +from homeassistant.components.husqvarna_automower.const import ( + DOMAIN, + EXECUTION_TIME_DELAY, +) from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -16,7 +21,12 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import MockConfigEntry, load_json_value_fixture, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_value_fixture, + snapshot_platform, +) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -41,7 +51,7 @@ async def test_number_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="number", @@ -57,6 +67,7 @@ async def test_number_workarea_commands( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test number commands.""" entity_id = "number.test_mower_1_front_lawn_cutting_height" @@ -75,8 +86,11 @@ async def test_number_workarea_commands( service="set_value", target={"entity_id": entity_id}, service_data={"value": "75"}, - blocking=True, + blocking=False, ) + freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) + async_fire_time_changed(hass) + await hass.async_block_till_done() mocked_method.assert_called_once_with(TEST_MOWER_ID, 75, 123456) state = hass.states.get(entity_id) assert state.state is not None @@ -85,7 +99,7 @@ async def test_number_workarea_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="number", diff --git a/tests/components/husqvarna_automower/test_select.py b/tests/components/husqvarna_automower/test_select.py index 2728bb5e672..e885a4d3487 100644 --- a/tests/components/husqvarna_automower/test_select.py +++ b/tests/components/husqvarna_automower/test_select.py @@ -88,7 +88,7 @@ async def test_select_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="select", diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 8f30a3dcb04..1a4f545ac96 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -73,12 +73,12 @@ async def test_next_start_sensor( await setup_integration(hass, mock_config_entry) state = hass.states.get("sensor.test_mower_1_next_start") assert state is not None - assert state.state == "2023-06-05T19:00:00+00:00" + assert state.state == "2023-06-05T17:00:00+00:00" values = mower_list_to_dictionary_dataclass( load_json_value_fixture("mower.json", DOMAIN) ) - values[TEST_MOWER_ID].planner.next_start_datetime = None + values[TEST_MOWER_ID].planner.next_start_datetime_naive = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -87,6 +87,38 @@ async def test_next_start_sensor( assert state.state == STATE_UNKNOWN +async def test_work_area_sensor( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the work area sensor.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("sensor.test_mower_1_work_area") + assert state is not None + assert state.state == "Front lawn" + + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) + values[TEST_MOWER_ID].mower.work_area_id = None + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_mower_1_work_area") + assert state.state == "no_work_area_active" + + values[TEST_MOWER_ID].mower.work_area_id = 0 + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_mower_1_work_area") + assert state.state == "my_lawn" + + @pytest.mark.parametrize( ("sensor_to_test"), [ diff --git a/tests/components/husqvarna_automower/test_switch.py b/tests/components/husqvarna_automower/test_switch.py index 08450158876..5b4e465e253 100644 --- a/tests/components/husqvarna_automower/test_switch.py +++ b/tests/components/husqvarna_automower/test_switch.py @@ -1,5 +1,6 @@ """Tests for switch platform.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException @@ -9,7 +10,10 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN +from homeassistant.components.husqvarna_automower.const import ( + DOMAIN, + EXECUTION_TIME_DELAY, +) from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -83,7 +87,7 @@ async def test_switch_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="switch", @@ -109,6 +113,7 @@ async def test_stay_out_zone_switch_commands( excepted_state: str, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test switch commands.""" entity_id = "switch.test_mower_1_avoid_danger_zone" @@ -124,8 +129,11 @@ async def test_stay_out_zone_switch_commands( domain="switch", service=service, service_data={"entity_id": entity_id}, - blocking=True, + blocking=False, ) + freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) + async_fire_time_changed(hass) + await hass.async_block_till_done() mocked_method.assert_called_once_with(TEST_MOWER_ID, TEST_ZONE_ID, boolean) state = hass.states.get(entity_id) assert state is not None @@ -134,7 +142,7 @@ async def test_stay_out_zone_switch_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="switch", diff --git a/tests/components/hydrawise/conftest.py b/tests/components/hydrawise/conftest.py index eb1518eb7f2..a938322414b 100644 --- a/tests/components/hydrawise/conftest.py +++ b/tests/components/hydrawise/conftest.py @@ -1,6 +1,6 @@ """Common fixtures for the Hydrawise tests.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from datetime import datetime, timedelta from unittest.mock import AsyncMock, patch @@ -20,7 +20,6 @@ from pydrawise.schema import ( Zone, ) import pytest -from typing_extensions import Generator from homeassistant.components.hydrawise.const import DOMAIN from homeassistant.const import CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME @@ -67,9 +66,9 @@ def mock_pydrawise( """Mock Hydrawise.""" with patch("pydrawise.client.Hydrawise", autospec=True) as mock_pydrawise: user.controllers = [controller] - controller.zones = zones controller.sensors = sensors mock_pydrawise.return_value.get_user.return_value = user + mock_pydrawise.return_value.get_zones.return_value = zones mock_pydrawise.return_value.get_water_use_summary.return_value = ( controller_water_use_summary ) @@ -142,7 +141,7 @@ def sensors() -> list[Sensor]: ), status=SensorStatus( water_flow=LocalizedValueType(value=577.0044752010709, unit="gal"), - active=None, + active=False, ), ), ] @@ -154,7 +153,6 @@ def zones() -> list[Zone]: return [ Zone( name="Zone One", - number=1, id=5965394, scheduled_runs=ScheduledZoneRuns( summary="", @@ -171,7 +169,6 @@ def zones() -> list[Zone]: ), Zone( name="Zone Two", - number=2, id=5965395, scheduled_runs=ScheduledZoneRuns( current_run=ScheduledZoneRun( @@ -190,6 +187,8 @@ def controller_water_use_summary() -> ControllerWaterUseSummary: total_active_use=332.6, total_inactive_use=13.0, active_use_by_zone_id={5965394: 120.1, 5965395: 0.0}, + total_active_time=timedelta(seconds=123), + active_time_by_zone_id={5965394: timedelta(seconds=123), 5965395: timedelta()}, unit="gal", ) diff --git a/tests/components/hydrawise/snapshots/test_sensor.ambr b/tests/components/hydrawise/snapshots/test_sensor.ambr index 3472de98460..dadf3c44789 100644 --- a/tests/components/hydrawise/snapshots/test_sensor.ambr +++ b/tests/components/hydrawise/snapshots/test_sensor.ambr @@ -54,6 +54,55 @@ 'state': '1259.0279593584', }) # --- +# name: test_all_sensors[sensor.home_controller_daily_active_watering_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_controller_daily_active_watering_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily active watering time', + 'platform': 'hydrawise', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_active_water_time', + 'unique_id': '52496_daily_active_water_time', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.home_controller_daily_active_watering_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by hydrawise.com', + 'device_class': 'duration', + 'friendly_name': 'Home Controller Daily active watering time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_controller_daily_active_watering_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '123.0', + }) +# --- # name: test_all_sensors[sensor.home_controller_daily_inactive_water_use-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -219,6 +268,55 @@ 'state': '454.6279552584', }) # --- +# name: test_all_sensors[sensor.zone_one_daily_active_watering_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.zone_one_daily_active_watering_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily active watering time', + 'platform': 'hydrawise', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_active_water_time', + 'unique_id': '5965394_daily_active_water_time', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.zone_one_daily_active_watering_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by hydrawise.com', + 'device_class': 'duration', + 'friendly_name': 'Zone One Daily active watering time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.zone_one_daily_active_watering_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '123.0', + }) +# --- # name: test_all_sensors[sensor.zone_one_next_cycle-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -267,7 +365,7 @@ 'state': '2023-10-04T19:49:57+00:00', }) # --- -# name: test_all_sensors[sensor.zone_one_watering_time-entry] +# name: test_all_sensors[sensor.zone_one_remaining_watering_time-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -279,7 +377,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.zone_one_watering_time', + 'entity_id': 'sensor.zone_one_remaining_watering_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -291,7 +389,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Watering time', + 'original_name': 'Remaining watering time', 'platform': 'hydrawise', 'previous_unique_id': None, 'supported_features': 0, @@ -300,15 +398,15 @@ 'unit_of_measurement': , }) # --- -# name: test_all_sensors[sensor.zone_one_watering_time-state] +# name: test_all_sensors[sensor.zone_one_remaining_watering_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by hydrawise.com', - 'friendly_name': 'Zone One Watering time', + 'friendly_name': 'Zone One Remaining watering time', 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.zone_one_watering_time', + 'entity_id': 'sensor.zone_one_remaining_watering_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -371,6 +469,55 @@ 'state': '0.0', }) # --- +# name: test_all_sensors[sensor.zone_two_daily_active_watering_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.zone_two_daily_active_watering_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily active watering time', + 'platform': 'hydrawise', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_active_water_time', + 'unique_id': '5965395_daily_active_water_time', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.zone_two_daily_active_watering_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by hydrawise.com', + 'device_class': 'duration', + 'friendly_name': 'Zone Two Daily active watering time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.zone_two_daily_active_watering_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_all_sensors[sensor.zone_two_next_cycle-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -419,7 +566,7 @@ 'state': 'unknown', }) # --- -# name: test_all_sensors[sensor.zone_two_watering_time-entry] +# name: test_all_sensors[sensor.zone_two_remaining_watering_time-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -431,7 +578,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.zone_two_watering_time', + 'entity_id': 'sensor.zone_two_remaining_watering_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -443,7 +590,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Watering time', + 'original_name': 'Remaining watering time', 'platform': 'hydrawise', 'previous_unique_id': None, 'supported_features': 0, @@ -452,15 +599,15 @@ 'unit_of_measurement': , }) # --- -# name: test_all_sensors[sensor.zone_two_watering_time-state] +# name: test_all_sensors[sensor.zone_two_remaining_watering_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by hydrawise.com', - 'friendly_name': 'Zone Two Watering time', + 'friendly_name': 'Zone Two Remaining watering time', 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.zone_two_watering_time', + 'entity_id': 'sensor.zone_two_remaining_watering_time', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/hydrawise/snapshots/test_valve.ambr b/tests/components/hydrawise/snapshots/test_valve.ambr new file mode 100644 index 00000000000..cac08893324 --- /dev/null +++ b/tests/components/hydrawise/snapshots/test_valve.ambr @@ -0,0 +1,99 @@ +# serializer version: 1 +# name: test_all_valves[valve.zone_one-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'valve', + 'entity_category': None, + 'entity_id': 'valve.zone_one', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'hydrawise', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '5965394_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_valves[valve.zone_one-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by hydrawise.com', + 'device_class': 'water', + 'friendly_name': 'Zone One', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.zone_one', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_all_valves[valve.zone_two-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'valve', + 'entity_category': None, + 'entity_id': 'valve.zone_two', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'hydrawise', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '5965395_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_valves[valve.zone_two-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by hydrawise.com', + 'device_class': 'water', + 'friendly_name': 'Zone Two', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.zone_two', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/hydrawise/test_config_flow.py b/tests/components/hydrawise/test_config_flow.py index a7fbc008aab..e85b1b9b249 100644 --- a/tests/components/hydrawise/test_config_flow.py +++ b/tests/components/hydrawise/test_config_flow.py @@ -46,7 +46,7 @@ async def test_form( CONF_PASSWORD: "__password__", } assert len(mock_setup_entry.mock_calls) == 1 - mock_pydrawise.get_user.assert_called_once_with() + mock_pydrawise.get_user.assert_called_once_with(fetch_zones=False) async def test_form_api_error( diff --git a/tests/components/hydrawise/test_sensor.py b/tests/components/hydrawise/test_sensor.py index af75ad69ade..b9ff99f0013 100644 --- a/tests/components/hydrawise/test_sensor.py +++ b/tests/components/hydrawise/test_sensor.py @@ -3,7 +3,7 @@ from collections.abc import Awaitable, Callable from unittest.mock import patch -from pydrawise.schema import Controller, User, Zone +from pydrawise.schema import Controller, ControllerWaterUseSummary, User, Zone import pytest from syrupy.assertion import SnapshotAssertion @@ -53,10 +53,15 @@ async def test_suspended_state( async def test_no_sensor_and_water_state( hass: HomeAssistant, controller: Controller, + controller_water_use_summary: ControllerWaterUseSummary, mock_add_config_entry: Callable[[], Awaitable[MockConfigEntry]], ) -> None: """Test rain sensor, flow sensor, and water use in the absence of flow and rain sensors.""" controller.sensors = [] + controller_water_use_summary.total_use = None + controller_water_use_summary.total_active_use = None + controller_water_use_summary.total_inactive_use = None + controller_water_use_summary.active_use_by_zone_id = {} await mock_add_config_entry() assert hass.states.get("sensor.zone_one_daily_active_water_use") is None @@ -65,6 +70,18 @@ async def test_no_sensor_and_water_state( assert hass.states.get("sensor.home_controller_daily_inactive_water_use") is None assert hass.states.get("binary_sensor.home_controller_rain_sensor") is None + sensor = hass.states.get("sensor.home_controller_daily_active_watering_time") + assert sensor is not None + assert sensor.state == "123.0" + + sensor = hass.states.get("sensor.zone_one_daily_active_watering_time") + assert sensor is not None + assert sensor.state == "123.0" + + sensor = hass.states.get("sensor.zone_two_daily_active_watering_time") + assert sensor is not None + assert sensor.state == "0.0" + sensor = hass.states.get("binary_sensor.home_controller_connectivity") assert sensor is not None assert sensor.state == "on" diff --git a/tests/components/hydrawise/test_services.py b/tests/components/hydrawise/test_services.py new file mode 100644 index 00000000000..f61a6786270 --- /dev/null +++ b/tests/components/hydrawise/test_services.py @@ -0,0 +1,93 @@ +"""Test Hydrawise services.""" + +from datetime import datetime +from unittest.mock import AsyncMock + +from pydrawise.schema import Zone + +from homeassistant.components.hydrawise.const import ( + ATTR_DURATION, + ATTR_UNTIL, + DOMAIN, + SERVICE_RESUME, + SERVICE_START_WATERING, + SERVICE_SUSPEND, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_start_watering( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test that the start_watering service works as intended.""" + await hass.services.async_call( + DOMAIN, + SERVICE_START_WATERING, + { + ATTR_ENTITY_ID: "binary_sensor.zone_one_watering", + ATTR_DURATION: 20, + }, + blocking=True, + ) + mock_pydrawise.start_zone.assert_called_once_with( + zones[0], custom_run_duration=20 * 60 + ) + + +async def test_start_watering_no_duration( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test that the start_watering service works with no duration specified.""" + await hass.services.async_call( + DOMAIN, + SERVICE_START_WATERING, + {ATTR_ENTITY_ID: "binary_sensor.zone_one_watering"}, + blocking=True, + ) + mock_pydrawise.start_zone.assert_called_once_with(zones[0], custom_run_duration=0) + + +async def test_resume( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test that the resume service works as intended.""" + await hass.services.async_call( + DOMAIN, + SERVICE_RESUME, + {ATTR_ENTITY_ID: "binary_sensor.zone_one_watering"}, + blocking=True, + ) + mock_pydrawise.resume_zone.assert_called_once_with(zones[0]) + + +async def test_suspend( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test that the suspend service works as intended.""" + await hass.services.async_call( + DOMAIN, + SERVICE_SUSPEND, + { + ATTR_ENTITY_ID: "binary_sensor.zone_one_watering", + ATTR_UNTIL: datetime(2026, 1, 1, 0, 0, 0), + }, + blocking=True, + ) + mock_pydrawise.suspend_zone.assert_called_once_with( + zones[0], until=datetime(2026, 1, 1, 0, 0, 0) + ) diff --git a/tests/components/hydrawise/test_valve.py b/tests/components/hydrawise/test_valve.py new file mode 100644 index 00000000000..918fae00017 --- /dev/null +++ b/tests/components/hydrawise/test_valve.py @@ -0,0 +1,59 @@ +"""Test Hydrawise valve.""" + +from collections.abc import Awaitable, Callable +from unittest.mock import AsyncMock, patch + +from pydrawise.schema import Zone +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.valve import DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_CLOSE_VALVE, + SERVICE_OPEN_VALVE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_valves( + hass: HomeAssistant, + mock_add_config_entry: Callable[[], Awaitable[MockConfigEntry]], + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that all valves are working.""" + with patch( + "homeassistant.components.hydrawise.PLATFORMS", + [Platform.VALVE], + ): + config_entry = await mock_add_config_entry() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +async def test_services( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test valve services.""" + await hass.services.async_call( + DOMAIN, + SERVICE_OPEN_VALVE, + service_data={ATTR_ENTITY_ID: "valve.zone_one"}, + blocking=True, + ) + mock_pydrawise.start_zone.assert_called_once_with(zones[0]) + mock_pydrawise.reset_mock() + + await hass.services.async_call( + DOMAIN, + SERVICE_CLOSE_VALVE, + service_data={ATTR_ENTITY_ID: "valve.zone_one"}, + blocking=True, + ) + mock_pydrawise.stop_zone.assert_called_once_with(zones[0]) diff --git a/tests/components/hyperion/test_camera.py b/tests/components/hyperion/test_camera.py index 41b66f4ad4a..0169759f328 100644 --- a/tests/components/hyperion/test_camera.py +++ b/tests/components/hyperion/test_camera.py @@ -198,7 +198,7 @@ async def test_device_info( device = device_registry.async_get_device(identifiers={(DOMAIN, device_id)}) assert device - assert device.config_entries == [TEST_CONFIG_ENTRY_ID] + assert device.config_entries == {TEST_CONFIG_ENTRY_ID} assert device.identifiers == {(DOMAIN, device_id)} assert device.manufacturer == HYPERION_MANUFACTURER_NAME assert device.model == HYPERION_MODEL_NAME diff --git a/tests/components/hyperion/test_config_flow.py b/tests/components/hyperion/test_config_flow.py index 57749f5eedc..fb4fa1fe671 100644 --- a/tests/components/hyperion/test_config_flow.py +++ b/tests/components/hyperion/test_config_flow.py @@ -427,7 +427,7 @@ async def test_auth_create_token_approval_declined_task_canceled( class CanceledAwaitableMock(AsyncMock): """A canceled awaitable mock.""" - def __init__(self): + def __init__(self) -> None: super().__init__() self.done = Mock(return_value=False) self.cancel = Mock() diff --git a/tests/components/hyperion/test_light.py b/tests/components/hyperion/test_light.py index b7aef3ac2ac..e1e7711e702 100644 --- a/tests/components/hyperion/test_light.py +++ b/tests/components/hyperion/test_light.py @@ -803,7 +803,7 @@ async def test_device_info( device = device_registry.async_get_device(identifiers={(DOMAIN, device_id)}) assert device - assert device.config_entries == [TEST_CONFIG_ENTRY_ID] + assert device.config_entries == {TEST_CONFIG_ENTRY_ID} assert device.identifiers == {(DOMAIN, device_id)} assert device.manufacturer == HYPERION_MANUFACTURER_NAME assert device.model == HYPERION_MODEL_NAME diff --git a/tests/components/hyperion/test_sensor.py b/tests/components/hyperion/test_sensor.py index bc58c07ac7b..5ace34eaac0 100644 --- a/tests/components/hyperion/test_sensor.py +++ b/tests/components/hyperion/test_sensor.py @@ -66,7 +66,7 @@ async def test_device_info( device = device_registry.async_get_device(identifiers={(DOMAIN, device_identifer)}) assert device - assert device.config_entries == [TEST_CONFIG_ENTRY_ID] + assert device.config_entries == {TEST_CONFIG_ENTRY_ID} assert device.identifiers == {(DOMAIN, device_identifer)} assert device.manufacturer == HYPERION_MANUFACTURER_NAME assert device.model == HYPERION_MODEL_NAME diff --git a/tests/components/hyperion/test_switch.py b/tests/components/hyperion/test_switch.py index 17a1872f832..da458820c81 100644 --- a/tests/components/hyperion/test_switch.py +++ b/tests/components/hyperion/test_switch.py @@ -170,7 +170,7 @@ async def test_device_info( device = device_registry.async_get_device(identifiers={(DOMAIN, device_identifer)}) assert device - assert device.config_entries == [TEST_CONFIG_ENTRY_ID] + assert device.config_entries == {TEST_CONFIG_ENTRY_ID} assert device.identifiers == {(DOMAIN, device_identifer)} assert device.manufacturer == HYPERION_MANUFACTURER_NAME assert device.model == HYPERION_MODEL_NAME diff --git a/tests/components/idasen_desk/conftest.py b/tests/components/idasen_desk/conftest.py index 91f3f2de40e..24ef8311445 100644 --- a/tests/components/idasen_desk/conftest.py +++ b/tests/components/idasen_desk/conftest.py @@ -1,11 +1,10 @@ """IKEA Idasen Desk fixtures.""" -from collections.abc import Callable +from collections.abc import Callable, Generator from unittest import mock from unittest.mock import AsyncMock, MagicMock import pytest -from typing_extensions import Generator @pytest.fixture(autouse=True) diff --git a/tests/components/idasen_desk/test_init.py b/tests/components/idasen_desk/test_init.py index 60f1fb3e5e3..ae7bd5e3fdf 100644 --- a/tests/components/idasen_desk/test_init.py +++ b/tests/components/idasen_desk/test_init.py @@ -1,6 +1,5 @@ """Test the IKEA Idasen Desk init.""" -import asyncio from unittest import mock from unittest.mock import AsyncMock, MagicMock @@ -66,63 +65,21 @@ async def test_reconnect_on_bluetooth_callback( mock_desk_api.connect.assert_called_once() mock_register_callback.assert_called_once() - mock_desk_api.is_connected = False _, register_callback_args, _ = mock_register_callback.mock_calls[0] bt_callback = register_callback_args[1] + + mock_desk_api.connect.reset_mock() bt_callback(None, None) await hass.async_block_till_done() - assert mock_desk_api.connect.call_count == 2 + mock_desk_api.connect.assert_called_once() - -async def test_duplicated_disconnect_is_no_op( - hass: HomeAssistant, mock_desk_api: MagicMock -) -> None: - """Test that calling disconnect while disconnecting is a no-op.""" - await init_integration(hass) - - await hass.services.async_call( - "button", "press", {"entity_id": "button.test_disconnect"}, blocking=True - ) - await hass.async_block_till_done() - - async def mock_disconnect(): - await asyncio.sleep(0) - - mock_desk_api.disconnect.reset_mock() - mock_desk_api.disconnect.side_effect = mock_disconnect - - # Since the disconnect button was pressed but the desk indicates "connected", - # any update event will call disconnect() - mock_desk_api.is_connected = True - mock_desk_api.trigger_update_callback(None) - mock_desk_api.trigger_update_callback(None) - mock_desk_api.trigger_update_callback(None) - await hass.async_block_till_done() - mock_desk_api.disconnect.assert_called_once() - - -async def test_ensure_connection_state( - hass: HomeAssistant, mock_desk_api: MagicMock -) -> None: - """Test that the connection state is ensured.""" - await init_integration(hass) - - mock_desk_api.connect.reset_mock() - mock_desk_api.is_connected = False - mock_desk_api.trigger_update_callback(None) - await hass.async_block_till_done() - mock_desk_api.connect.assert_called_once() - - await hass.services.async_call( - "button", "press", {"entity_id": "button.test_disconnect"}, blocking=True - ) - await hass.async_block_till_done() - - mock_desk_api.disconnect.reset_mock() - mock_desk_api.is_connected = True - mock_desk_api.trigger_update_callback(None) - await hass.async_block_till_done() - mock_desk_api.disconnect.assert_called_once() + mock_desk_api.connect.reset_mock() + await hass.services.async_call( + "button", "press", {"entity_id": "button.test_disconnect"}, blocking=True + ) + bt_callback(None, None) + await hass.async_block_till_done() + assert mock_desk_api.connect.call_count == 0 async def test_unload_entry(hass: HomeAssistant, mock_desk_api: MagicMock) -> None: diff --git a/tests/components/idasen_desk/test_sensors.py b/tests/components/idasen_desk/test_sensors.py index a236555a506..614bce523e6 100644 --- a/tests/components/idasen_desk/test_sensors.py +++ b/tests/components/idasen_desk/test_sensors.py @@ -4,10 +4,13 @@ from unittest.mock import MagicMock import pytest +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from . import init_integration +EXPECTED_INITIAL_HEIGHT = "1" + @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_height_sensor(hass: HomeAssistant, mock_desk_api: MagicMock) -> None: @@ -17,7 +20,7 @@ async def test_height_sensor(hass: HomeAssistant, mock_desk_api: MagicMock) -> N entity_id = "sensor.test_height" state = hass.states.get(entity_id) assert state - assert state.state == "1" + assert state.state == EXPECTED_INITIAL_HEIGHT mock_desk_api.height = 1.2 mock_desk_api.trigger_update_callback(None) @@ -25,3 +28,24 @@ async def test_height_sensor(hass: HomeAssistant, mock_desk_api: MagicMock) -> N state = hass.states.get(entity_id) assert state assert state.state == "1.2" + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_available( + hass: HomeAssistant, + mock_desk_api: MagicMock, +) -> None: + """Test sensor available property.""" + await init_integration(hass) + + entity_id = "sensor.test_height" + state = hass.states.get(entity_id) + assert state + assert state.state == EXPECTED_INITIAL_HEIGHT + + mock_desk_api.is_connected = False + mock_desk_api.trigger_update_callback(None) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/image/conftest.py b/tests/components/image/conftest.py index 65bbf2e0c4f..8bb5d19b6db 100644 --- a/tests/components/image/conftest.py +++ b/tests/components/image/conftest.py @@ -1,7 +1,8 @@ """Test helpers for image.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.components import image from homeassistant.config_entries import ConfigEntry, ConfigFlow diff --git a/tests/components/image/test_media_source.py b/tests/components/image/test_media_source.py index 2037641a1a3..73cc76b9fb7 100644 --- a/tests/components/image/test_media_source.py +++ b/tests/components/image/test_media_source.py @@ -8,7 +8,7 @@ from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) -async def setup_media_source(hass): +async def setup_media_source(hass: HomeAssistant) -> None: """Set up media source.""" assert await async_setup_component(hass, "media_source", {}) diff --git a/tests/components/imap/conftest.py b/tests/components/imap/conftest.py index 354c9fbe24e..87663031e7a 100644 --- a/tests/components/imap/conftest.py +++ b/tests/components/imap/conftest.py @@ -1,10 +1,10 @@ """Fixtures for imap tests.""" +from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, MagicMock, patch from aioimaplib import AUTH, LOGOUT, NONAUTH, SELECTED, STARTED, Response import pytest -from typing_extensions import AsyncGenerator, Generator from .const import EMPTY_SEARCH_RESPONSE, TEST_FETCH_RESPONSE_TEXT_PLAIN diff --git a/tests/components/imgw_pib/conftest.py b/tests/components/imgw_pib/conftest.py index 1d278856b5b..6f23ed3ee80 100644 --- a/tests/components/imgw_pib/conftest.py +++ b/tests/components/imgw_pib/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the IMGW-PIB tests.""" +from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch from imgw_pib import HydrologicalData, SensorData import pytest -from typing_extensions import Generator from homeassistant.components.imgw_pib.const import DOMAIN diff --git a/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr b/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr index f314a4be590..c5ae6880022 100644 --- a/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr +++ b/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr @@ -95,101 +95,3 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[binary_sensor.station_name_flood_alarm-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.station_name_flood_alarm', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Flood alarm', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flood_alarm', - 'unique_id': '123_flood_alarm', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.station_name_flood_alarm-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'alarm_level': 630.0, - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'safety', - 'friendly_name': 'Station Name Flood alarm', - }), - 'context': , - 'entity_id': 'binary_sensor.station_name_flood_alarm', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.station_name_flood_warning-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.station_name_flood_warning', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Flood warning', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flood_warning', - 'unique_id': '123_flood_warning', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.station_name_flood_warning-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'safety', - 'friendly_name': 'Station Name Flood warning', - 'warning_level': 590.0, - }), - 'context': , - 'entity_id': 'binary_sensor.station_name_flood_warning', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/imgw_pib/snapshots/test_sensor.ambr b/tests/components/imgw_pib/snapshots/test_sensor.ambr index 2638e468d92..6c69b890842 100644 --- a/tests/components/imgw_pib/snapshots/test_sensor.ambr +++ b/tests/components/imgw_pib/snapshots/test_sensor.ambr @@ -213,113 +213,3 @@ 'state': '10.8', }) # --- -# name: test_sensor[sensor.station_name_water_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.station_name_water_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water level', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_level', - 'unique_id': '123_water_level', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.station_name_water_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'distance', - 'friendly_name': 'Station Name Water level', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.station_name_water_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '526.0', - }) -# --- -# name: test_sensor[sensor.station_name_water_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.station_name_water_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water temperature', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_temperature', - 'unique_id': '123_water_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.station_name_water_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'temperature', - 'friendly_name': 'Station Name Water temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.station_name_water_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.8', - }) -# --- diff --git a/tests/components/imgw_pib/test_diagnostics.py b/tests/components/imgw_pib/test_diagnostics.py index 62dabc982c4..14d4e7a5224 100644 --- a/tests/components/imgw_pib/test_diagnostics.py +++ b/tests/components/imgw_pib/test_diagnostics.py @@ -28,4 +28,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("entry_id")) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/improv_ble/test_config_flow.py b/tests/components/improv_ble/test_config_flow.py index 53da1f28425..640a931bee5 100644 --- a/tests/components/improv_ble/test_config_flow.py +++ b/tests/components/improv_ble/test_config_flow.py @@ -543,7 +543,7 @@ async def test_authorize_fails(hass: HomeAssistant, exc, error) -> None: assert result["reason"] == error -async def _test_provision_error(hass: HomeAssistant, exc) -> None: +async def _test_provision_error(hass: HomeAssistant, exc) -> str: """Test bluetooth flow with error.""" result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/incomfort/conftest.py b/tests/components/incomfort/conftest.py index 64885e38b65..f17547a1445 100644 --- a/tests/components/incomfort/conftest.py +++ b/tests/components/incomfort/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Intergas InComfort integration.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from incomfortclient import DisplayCode import pytest -from typing_extensions import Generator from homeassistant.components.incomfort import DOMAIN from homeassistant.config_entries import ConfigEntry @@ -77,10 +77,9 @@ def mock_room_status() -> dict[str, Any]: @pytest.fixture def mock_incomfort( - hass: HomeAssistant, mock_heater_status: dict[str, Any], mock_room_status: dict[str, Any], -) -> Generator[MagicMock, None]: +) -> Generator[MagicMock]: """Mock the InComfort gateway client.""" class MockRoom: diff --git a/tests/components/influxdb/test_init.py b/tests/components/influxdb/test_init.py index aba153cf8a8..e9592a06fe2 100644 --- a/tests/components/influxdb/test_init.py +++ b/tests/components/influxdb/test_init.py @@ -1,5 +1,6 @@ """The tests for the InfluxDB component.""" +from collections.abc import Generator from dataclasses import dataclass import datetime from http import HTTPStatus @@ -7,7 +8,6 @@ import logging from unittest.mock import ANY, MagicMock, Mock, call, patch import pytest -from typing_extensions import Generator from homeassistant.components import influxdb from homeassistant.components.influxdb.const import DEFAULT_BUCKET @@ -43,7 +43,7 @@ class FilterTest: @pytest.fixture(autouse=True) -def mock_batch_timeout(hass, monkeypatch): +def mock_batch_timeout(monkeypatch: pytest.MonkeyPatch) -> None: """Mock the event bus listener and the batch timeout for tests.""" monkeypatch.setattr( f"{INFLUX_PATH}.InfluxThread.batch_timeout", @@ -79,7 +79,6 @@ def get_mock_call_fixture(request: pytest.FixtureRequest): if request.param == influxdb.API_VERSION_2: return lambda body, precision=None: v2_call(body, precision) - # pylint: disable-next=unnecessary-lambda return lambda body, precision=None: call(body, time_precision=precision) diff --git a/tests/components/influxdb/test_sensor.py b/tests/components/influxdb/test_sensor.py index 48cae2a3ae6..73dd8375a00 100644 --- a/tests/components/influxdb/test_sensor.py +++ b/tests/components/influxdb/test_sensor.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from dataclasses import dataclass from datetime import timedelta from http import HTTPStatus @@ -10,7 +11,6 @@ from unittest.mock import MagicMock, patch from influxdb.exceptions import InfluxDBClientError, InfluxDBServerError from influxdb_client.rest import ApiException import pytest -from typing_extensions import Generator from voluptuous import Invalid from homeassistant.components import sensor diff --git a/tests/components/insteon/const.py b/tests/components/insteon/const.py index c35db3b7092..a4e4e8a390d 100644 --- a/tests/components/insteon/const.py +++ b/tests/components/insteon/const.py @@ -79,5 +79,4 @@ PATCH_CONNECTION = "homeassistant.components.insteon.config_flow.async_connect" PATCH_CONNECTION_CLOSE = "homeassistant.components.insteon.config_flow.async_close" PATCH_DEVICES = "homeassistant.components.insteon.config_flow.devices" PATCH_USB_LIST = "homeassistant.components.insteon.config_flow.async_get_usb_ports" -PATCH_ASYNC_SETUP = "homeassistant.components.insteon.async_setup" PATCH_ASYNC_SETUP_ENTRY = "homeassistant.components.insteon.async_setup_entry" diff --git a/tests/components/insteon/mock_devices.py b/tests/components/insteon/mock_devices.py index 6b5f5cf5e09..2c385c337fd 100644 --- a/tests/components/insteon/mock_devices.py +++ b/tests/components/insteon/mock_devices.py @@ -30,7 +30,7 @@ class MockSwitchLinc(SwitchedLightingControl_SwitchLinc02): class MockDevices: """Mock devices class.""" - def __init__(self, connected=True): + def __init__(self, connected=True) -> None: """Init the MockDevices class.""" self._devices = {} self.modem = None diff --git a/tests/components/insteon/test_api_scenes.py b/tests/components/insteon/test_api_scenes.py index 1b8d4d50f08..14001e0495d 100644 --- a/tests/components/insteon/test_api_scenes.py +++ b/tests/components/insteon/test_api_scenes.py @@ -1,7 +1,8 @@ """Test the Insteon Scenes APIs.""" -import json +from collections.abc import Generator import os +from typing import Any from unittest.mock import AsyncMock, patch from pyinsteon.constants import ResponseStatus @@ -11,21 +12,22 @@ import pytest from homeassistant.components.insteon.api import async_load_api, scenes from homeassistant.components.insteon.const import ID, TYPE from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonArrayType from .mock_devices import MockDevices -from tests.common import load_fixture -from tests.typing import WebSocketGenerator +from tests.common import load_json_array_fixture +from tests.typing import MockHAClientWebSocket, WebSocketGenerator @pytest.fixture(name="scene_data", scope="module") -def aldb_data_fixture(): +def aldb_data_fixture() -> JsonArrayType: """Load the controller state fixture data.""" - return json.loads(load_fixture("insteon/scene_data.json")) + return load_json_array_fixture("insteon/scene_data.json") @pytest.fixture(name="remove_json") -def remove_insteon_devices_json(hass): +def remove_insteon_devices_json(hass: HomeAssistant) -> Generator[None]: """Fixture to remove insteon_devices.json at the end of the test.""" yield file = os.path.join(hass.config.config_dir, "insteon_devices.json") @@ -33,7 +35,7 @@ def remove_insteon_devices_json(hass): os.remove(file) -def _scene_to_array(scene): +def _scene_to_array(scene: dict[str, Any]) -> list[dict[str, Any]]: """Convert a scene object to a dictionary.""" scene_list = [] for device, links in scene["devices"].items(): @@ -47,7 +49,9 @@ def _scene_to_array(scene): return scene_list -async def _setup(hass, hass_ws_client, scene_data): +async def _setup( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data: JsonArrayType +) -> tuple[MockHAClientWebSocket, MockDevices]: """Set up tests.""" ws_client = await hass_ws_client(hass) devices = MockDevices() @@ -63,7 +67,7 @@ async def _setup(hass, hass_ws_client, scene_data): # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_scenes( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data: JsonArrayType ) -> None: """Test getting all Insteon scenes.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -79,7 +83,7 @@ async def test_get_scenes( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_scene( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data: JsonArrayType ) -> None: """Test getting an Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -93,8 +97,11 @@ async def test_get_scene( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("remove_json") async def test_save_scene( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + scene_data: JsonArrayType, ) -> None: """Test saving an Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -125,8 +132,11 @@ async def test_save_scene( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("remove_json") async def test_save_new_scene( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + scene_data: JsonArrayType, ) -> None: """Test saving a new Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -157,8 +167,11 @@ async def test_save_new_scene( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("remove_json") async def test_save_scene_error( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + scene_data: JsonArrayType, ) -> None: """Test saving an Insteon scene with error.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -189,8 +202,11 @@ async def test_save_scene_error( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("remove_json") async def test_delete_scene( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + scene_data: JsonArrayType, ) -> None: """Test delete an Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) diff --git a/tests/components/insteon/test_config_flow.py b/tests/components/insteon/test_config_flow.py index 4d3fb815463..51fdd7a550d 100644 --- a/tests/components/insteon/test_config_flow.py +++ b/tests/components/insteon/test_config_flow.py @@ -25,7 +25,6 @@ from .const import ( MOCK_USER_INPUT_HUB_V2, MOCK_USER_INPUT_PLM, MOCK_USER_INPUT_PLM_MANUAL, - PATCH_ASYNC_SETUP, PATCH_ASYNC_SETUP_ENTRY, PATCH_CONNECTION, PATCH_USB_LIST, @@ -81,7 +80,6 @@ async def _device_form(hass, flow_id, connection, user_input): PATCH_CONNECTION, new=connection, ), - patch(PATCH_ASYNC_SETUP, return_value=True) as mock_setup, patch( PATCH_ASYNC_SETUP_ENTRY, return_value=True, @@ -89,7 +87,7 @@ async def _device_form(hass, flow_id, connection, user_input): ): result = await hass.config_entries.flow.async_configure(flow_id, user_input) await hass.async_block_till_done() - return result, mock_setup, mock_setup_entry + return result, mock_setup_entry async def test_form_select_modem(hass: HomeAssistant) -> None: @@ -125,13 +123,12 @@ async def test_form_select_plm(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"] == MOCK_USER_INPUT_PLM - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -142,7 +139,7 @@ async def test_form_select_plm_no_usb(hass: HomeAssistant) -> None: USB_PORTS.clear() result = await _init_form(hass, STEP_PLM) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_successful_connection, None ) USB_PORTS.update(temp_usb_list) @@ -155,18 +152,17 @@ async def test_form_select_plm_manual(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM_MANUAL ) - result3, mock_setup, mock_setup_entry = await _device_form( + result3, mock_setup_entry = await _device_form( hass, result2["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.FORM assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["data"] == MOCK_USER_INPUT_PLM - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -175,7 +171,7 @@ async def test_form_select_hub_v1(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V1) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_HUB_V1 ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -184,7 +180,6 @@ async def test_form_select_hub_v1(hass: HomeAssistant) -> None: CONF_HUB_VERSION: 1, } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -193,7 +188,7 @@ async def test_form_select_hub_v2(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V2) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_HUB_V2 ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -202,7 +197,6 @@ async def test_form_select_hub_v2(hass: HomeAssistant) -> None: CONF_HUB_VERSION: 2, } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -233,7 +227,7 @@ async def test_failed_connection_plm(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.FORM @@ -245,10 +239,10 @@ async def test_failed_connection_plm_manually(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM_MANUAL ) - result3, _, _ = await _device_form( + result3, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM ) assert result3["type"] is FlowResultType.FORM @@ -260,7 +254,7 @@ async def test_failed_connection_hub(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V2) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_HUB_V2 ) assert result2["type"] is FlowResultType.FORM @@ -284,7 +278,7 @@ async def test_discovery_via_usb(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm_usb" - with patch(PATCH_CONNECTION), patch(PATCH_ASYNC_SETUP, return_value=True): + with patch(PATCH_CONNECTION): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) diff --git a/tests/components/integration/test_sensor.py b/tests/components/integration/test_sensor.py index 243504cb3e0..974c8bb8691 100644 --- a/tests/components/integration/test_sensor.py +++ b/tests/components/integration/test_sensor.py @@ -294,7 +294,24 @@ async def test_restore_state_failed(hass: HomeAssistant, extra_attributes) -> No assert state.state == STATE_UNKNOWN -async def test_trapezoidal(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("force_update", [False, True]) +@pytest.mark.parametrize( + "sequence", + [ + ( + (20, 10, 1.67), + (30, 30, 5.0), + (40, 5, 7.92), + (50, 5, 8.75), + (60, 0, 9.17), + ), + ], +) +async def test_trapezoidal( + hass: HomeAssistant, + sequence: tuple[tuple[float, float, float], ...], + force_update: bool, +) -> None: """Test integration sensor state.""" config = { "sensor": { @@ -314,25 +331,39 @@ async def test_trapezoidal(hass: HomeAssistant) -> None: start_time = dt_util.utcnow() with freeze_time(start_time) as freezer: # Testing a power sensor with non-monotonic intervals and values - for time, value in ((20, 10), (30, 30), (40, 5), (50, 0)): + for time, value, expected in sequence: freezer.move_to(start_time + timedelta(minutes=time)) hass.states.async_set( entity_id, value, {ATTR_UNIT_OF_MEASUREMENT: UnitOfPower.KILO_WATT}, - force_update=True, + force_update=force_update, ) await hass.async_block_till_done() - - state = hass.states.get("sensor.integration") - assert state is not None - - assert round(float(state.state), config["sensor"]["round"]) == 8.33 + state = hass.states.get("sensor.integration") + assert round(float(state.state), config["sensor"]["round"]) == expected assert state.attributes.get("unit_of_measurement") == UnitOfEnergy.KILO_WATT_HOUR -async def test_left(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("force_update", [False, True]) +@pytest.mark.parametrize( + "sequence", + [ + ( + (20, 10, 0.0), + (30, 30, 1.67), + (40, 5, 6.67), + (50, 5, 7.5), + (60, 0, 8.33), + ), + ], +) +async def test_left( + hass: HomeAssistant, + sequence: tuple[tuple[float, float, float], ...], + force_update: bool, +) -> None: """Test integration sensor state with left reimann method.""" config = { "sensor": { @@ -353,26 +384,41 @@ async def test_left(hass: HomeAssistant) -> None: await hass.async_block_till_done() # Testing a power sensor with non-monotonic intervals and values - for time, value in ((20, 10), (30, 30), (40, 5), (50, 0)): - now = dt_util.utcnow() + timedelta(minutes=time) - with freeze_time(now): + start_time = dt_util.utcnow() + with freeze_time(start_time) as freezer: + for time, value, expected in sequence: + freezer.move_to(start_time + timedelta(minutes=time)) hass.states.async_set( entity_id, value, {ATTR_UNIT_OF_MEASUREMENT: UnitOfPower.KILO_WATT}, - force_update=True, + force_update=force_update, ) await hass.async_block_till_done() - - state = hass.states.get("sensor.integration") - assert state is not None - - assert round(float(state.state), config["sensor"]["round"]) == 7.5 + state = hass.states.get("sensor.integration") + assert round(float(state.state), config["sensor"]["round"]) == expected assert state.attributes.get("unit_of_measurement") == UnitOfEnergy.KILO_WATT_HOUR -async def test_right(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("force_update", [False, True]) +@pytest.mark.parametrize( + "sequence", + [ + ( + (20, 10, 3.33), + (30, 30, 8.33), + (40, 5, 9.17), + (50, 5, 10.0), + (60, 0, 10.0), + ), + ], +) +async def test_right( + hass: HomeAssistant, + sequence: tuple[tuple[float, float, float], ...], + force_update: bool, +) -> None: """Test integration sensor state with left reimann method.""" config = { "sensor": { @@ -393,21 +439,19 @@ async def test_right(hass: HomeAssistant) -> None: await hass.async_block_till_done() # Testing a power sensor with non-monotonic intervals and values - for time, value in ((20, 10), (30, 30), (40, 5), (50, 0)): - now = dt_util.utcnow() + timedelta(minutes=time) - with freeze_time(now): + start_time = dt_util.utcnow() + with freeze_time(start_time) as freezer: + for time, value, expected in sequence: + freezer.move_to(start_time + timedelta(minutes=time)) hass.states.async_set( entity_id, value, {ATTR_UNIT_OF_MEASUREMENT: UnitOfPower.KILO_WATT}, - force_update=True, + force_update=force_update, ) await hass.async_block_till_done() - - state = hass.states.get("sensor.integration") - assert state is not None - - assert round(float(state.state), config["sensor"]["round"]) == 9.17 + state = hass.states.get("sensor.integration") + assert round(float(state.state), config["sensor"]["round"]) == expected assert state.attributes.get("unit_of_measurement") == UnitOfEnergy.KILO_WATT_HOUR diff --git a/tests/components/intellifire/conftest.py b/tests/components/intellifire/conftest.py index 1aae4fb6dd6..cf1e085c10f 100644 --- a/tests/components/intellifire/conftest.py +++ b/tests/components/intellifire/conftest.py @@ -1,10 +1,10 @@ """Fixtures for IntelliFire integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, Mock, patch from aiohttp.client_reqrep import ConnectionKey import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/intent/test_timers.py b/tests/components/intent/test_timers.py index a884fd13de5..d194d532513 100644 --- a/tests/components/intent/test_timers.py +++ b/tests/components/intent/test_timers.py @@ -54,6 +54,7 @@ async def test_start_finish_timer(hass: HomeAssistant, init_components) -> None: assert timer.start_minutes is None assert timer.start_seconds == 0 assert timer.seconds_left == 0 + assert timer.created_seconds == 0 if event_type == TimerEventType.STARTED: timer_id = timer.id @@ -64,6 +65,7 @@ async def test_start_finish_timer(hass: HomeAssistant, init_components) -> None: async_register_timer_handler(hass, device_id, handle_timer) + # A device that has been registered to handle timers is required result = await intent.async_handle( hass, "test", @@ -185,6 +187,27 @@ async def test_cancel_timer(hass: HomeAssistant, init_components) -> None: async with asyncio.timeout(1): await cancelled_event.wait() + # Cancel without a device + timer_name = None + started_event.clear() + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + { + "hours": {"value": 1}, + "minutes": {"value": 2}, + "seconds": {"value": 3}, + }, + device_id=device_id, + ) + + async with asyncio.timeout(1): + await started_event.wait() + + result = await intent.async_handle(hass, "test", intent.INTENT_CANCEL_TIMER, {}) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + async def test_increase_timer(hass: HomeAssistant, init_components) -> None: """Test increasing the time of a running timer.""" @@ -196,6 +219,7 @@ async def test_increase_timer(hass: HomeAssistant, init_components) -> None: timer_name = "test timer" timer_id: str | None = None original_total_seconds = -1 + seconds_added = 0 @callback def handle_timer(event_type: TimerEventType, timer: TimerInfo) -> None: @@ -216,12 +240,14 @@ async def test_increase_timer(hass: HomeAssistant, init_components) -> None: + (60 * timer.start_minutes) + timer.start_seconds ) + assert timer.created_seconds == original_total_seconds started_event.set() elif event_type == TimerEventType.UPDATED: assert timer.id == timer_id # Timer was increased assert timer.seconds_left > original_total_seconds + assert timer.created_seconds == original_total_seconds + seconds_added updated_event.set() elif event_type == TimerEventType.CANCELLED: assert timer.id == timer_id @@ -248,6 +274,7 @@ async def test_increase_timer(hass: HomeAssistant, init_components) -> None: await started_event.wait() # Adding 0 seconds has no effect + seconds_added = 0 result = await intent.async_handle( hass, "test", @@ -260,13 +287,13 @@ async def test_increase_timer(hass: HomeAssistant, init_components) -> None: "minutes": {"value": 0}, "seconds": {"value": 0}, }, - device_id=device_id, ) assert result.response_type == intent.IntentResponseType.ACTION_DONE assert not updated_event.is_set() # Add 30 seconds to the timer + seconds_added = (1 * 60 * 60) + (5 * 60) + 30 result = await intent.async_handle( hass, "test", @@ -279,7 +306,6 @@ async def test_increase_timer(hass: HomeAssistant, init_components) -> None: "minutes": {"value": 5}, "seconds": {"value": 30}, }, - device_id=device_id, ) assert result.response_type == intent.IntentResponseType.ACTION_DONE @@ -293,7 +319,6 @@ async def test_increase_timer(hass: HomeAssistant, init_components) -> None: "test", intent.INTENT_CANCEL_TIMER, {"name": {"value": timer_name}}, - device_id=device_id, ) assert result.response_type == intent.IntentResponseType.ACTION_DONE @@ -338,6 +363,7 @@ async def test_decrease_timer(hass: HomeAssistant, init_components) -> None: # Timer was decreased assert timer.seconds_left <= (original_total_seconds - 30) + assert timer.created_seconds == original_total_seconds updated_event.set() elif event_type == TimerEventType.CANCELLED: @@ -375,7 +401,6 @@ async def test_decrease_timer(hass: HomeAssistant, init_components) -> None: "start_seconds": {"value": 3}, "seconds": {"value": 30}, }, - device_id=device_id, ) assert result.response_type == intent.IntentResponseType.ACTION_DONE @@ -389,7 +414,6 @@ async def test_decrease_timer(hass: HomeAssistant, init_components) -> None: "test", intent.INTENT_CANCEL_TIMER, {"name": {"value": timer_name}}, - device_id=device_id, ) assert result.response_type == intent.IntentResponseType.ACTION_DONE @@ -467,7 +491,6 @@ async def test_decrease_timer_below_zero(hass: HomeAssistant, init_components) - "start_seconds": {"value": 3}, "seconds": {"value": original_total_seconds + 1}, }, - device_id=device_id, ) assert result.response_type == intent.IntentResponseType.ACTION_DONE @@ -482,43 +505,25 @@ async def test_find_timer_failed(hass: HomeAssistant, init_components) -> None: """Test finding a timer with the wrong info.""" device_id = "test_device" - for intent_name in ( - intent.INTENT_START_TIMER, - intent.INTENT_CANCEL_TIMER, - intent.INTENT_PAUSE_TIMER, - intent.INTENT_UNPAUSE_TIMER, - intent.INTENT_INCREASE_TIMER, - intent.INTENT_DECREASE_TIMER, - intent.INTENT_TIMER_STATUS, - ): - if intent_name in ( + # No device id + with pytest.raises(TimersNotSupportedError): + await intent.async_handle( + hass, + "test", intent.INTENT_START_TIMER, - intent.INTENT_INCREASE_TIMER, - intent.INTENT_DECREASE_TIMER, - ): - slots = {"minutes": {"value": 5}} - else: - slots = {} + {"minutes": {"value": 5}}, + device_id=None, + ) - # No device id - with pytest.raises(TimersNotSupportedError): - await intent.async_handle( - hass, - "test", - intent_name, - slots, - device_id=None, - ) - - # Unregistered device - with pytest.raises(TimersNotSupportedError): - await intent.async_handle( - hass, - "test", - intent_name, - slots, - device_id=device_id, - ) + # Unregistered device + with pytest.raises(TimersNotSupportedError): + await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"minutes": {"value": 5}}, + device_id=device_id, + ) # Must register a handler before we can do anything with timers @callback @@ -543,7 +548,6 @@ async def test_find_timer_failed(hass: HomeAssistant, init_components) -> None: "test", intent.INTENT_INCREASE_TIMER, {"name": {"value": "PIZZA "}, "minutes": {"value": 1}}, - device_id=device_id, ) assert result.response_type == intent.IntentResponseType.ACTION_DONE @@ -554,7 +558,6 @@ async def test_find_timer_failed(hass: HomeAssistant, init_components) -> None: "test", intent.INTENT_CANCEL_TIMER, {"name": {"value": "does-not-exist"}}, - device_id=device_id, ) # Right start time @@ -563,7 +566,6 @@ async def test_find_timer_failed(hass: HomeAssistant, init_components) -> None: "test", intent.INTENT_INCREASE_TIMER, {"start_minutes": {"value": 5}, "minutes": {"value": 1}}, - device_id=device_id, ) assert result.response_type == intent.IntentResponseType.ACTION_DONE @@ -574,7 +576,6 @@ async def test_find_timer_failed(hass: HomeAssistant, init_components) -> None: "test", intent.INTENT_CANCEL_TIMER, {"start_minutes": {"value": 1}}, - device_id=device_id, ) @@ -903,9 +904,7 @@ async def test_pause_unpause_timer(hass: HomeAssistant, init_components) -> None # Pause the timer expected_active = False - result = await intent.async_handle( - hass, "test", intent.INTENT_PAUSE_TIMER, {}, device_id=device_id - ) + result = await intent.async_handle(hass, "test", intent.INTENT_PAUSE_TIMER, {}) assert result.response_type == intent.IntentResponseType.ACTION_DONE async with asyncio.timeout(1): @@ -913,16 +912,12 @@ async def test_pause_unpause_timer(hass: HomeAssistant, init_components) -> None # Pausing again will fail because there are no running timers with pytest.raises(TimerNotFoundError): - await intent.async_handle( - hass, "test", intent.INTENT_PAUSE_TIMER, {}, device_id=device_id - ) + await intent.async_handle(hass, "test", intent.INTENT_PAUSE_TIMER, {}) # Unpause the timer updated_event.clear() expected_active = True - result = await intent.async_handle( - hass, "test", intent.INTENT_UNPAUSE_TIMER, {}, device_id=device_id - ) + result = await intent.async_handle(hass, "test", intent.INTENT_UNPAUSE_TIMER, {}) assert result.response_type == intent.IntentResponseType.ACTION_DONE async with asyncio.timeout(1): @@ -930,9 +925,7 @@ async def test_pause_unpause_timer(hass: HomeAssistant, init_components) -> None # Unpausing again will fail because there are no paused timers with pytest.raises(TimerNotFoundError): - await intent.async_handle( - hass, "test", intent.INTENT_UNPAUSE_TIMER, {}, device_id=device_id - ) + await intent.async_handle(hass, "test", intent.INTENT_UNPAUSE_TIMER, {}) async def test_timer_not_found(hass: HomeAssistant) -> None: @@ -1101,13 +1094,14 @@ async def test_timer_status_with_names(hass: HomeAssistant, init_components) -> await started_event.wait() # No constraints returns all timers - result = await intent.async_handle( - hass, "test", intent.INTENT_TIMER_STATUS, {}, device_id=device_id - ) - assert result.response_type == intent.IntentResponseType.ACTION_DONE - timers = result.speech_slots.get("timers", []) - assert len(timers) == 4 - assert {t.get(ATTR_NAME) for t in timers} == {"pizza", "cookies", "chicken"} + for handle_device_id in (device_id, None): + result = await intent.async_handle( + hass, "test", intent.INTENT_TIMER_STATUS, {}, device_id=handle_device_id + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + timers = result.speech_slots.get("timers", []) + assert len(timers) == 4 + assert {t.get(ATTR_NAME) for t in timers} == {"pizza", "cookies", "chicken"} # Get status of cookie timer result = await intent.async_handle( @@ -1430,18 +1424,10 @@ async def test_start_timer_with_conversation_command( timer_name = "test timer" test_command = "turn on the lights" agent_id = "test_agent" - finished_event = asyncio.Event() - @callback - def handle_timer(event_type: TimerEventType, timer: TimerInfo) -> None: - if event_type == TimerEventType.FINISHED: - assert timer.conversation_command == test_command - assert timer.conversation_agent_id == agent_id - finished_event.set() + mock_handle_timer = MagicMock() + async_register_timer_handler(hass, device_id, mock_handle_timer) - async_register_timer_handler(hass, device_id, handle_timer) - - # Device id is required if no conversation command timer_manager = TimerManager(hass) with pytest.raises(ValueError): timer_manager.start_timer( @@ -1468,9 +1454,11 @@ async def test_start_timer_with_conversation_command( assert result.response_type == intent.IntentResponseType.ACTION_DONE - async with asyncio.timeout(1): - await finished_event.wait() + # No timer events for delayed commands + mock_handle_timer.assert_not_called() + # Wait for process service call to finish + await hass.async_block_till_done() mock_converse.assert_called_once() assert mock_converse.call_args.args[1] == test_command diff --git a/tests/components/intent_script/test_init.py b/tests/components/intent_script/test_init.py index 5f4c7b97b63..86f3a7aba46 100644 --- a/tests/components/intent_script/test_init.py +++ b/tests/components/intent_script/test_init.py @@ -3,11 +3,11 @@ from unittest.mock import patch from homeassistant import config as hass_config -from homeassistant.bootstrap import async_setup_component from homeassistant.components.intent_script import DOMAIN from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant from homeassistant.helpers import intent +from homeassistant.setup import async_setup_component from tests.common import async_mock_service, get_fixture_path diff --git a/tests/components/ios/test_init.py b/tests/components/ios/test_init.py index afefec1530c..ddf5835a1be 100644 --- a/tests/components/ios/test_init.py +++ b/tests/components/ios/test_init.py @@ -19,7 +19,7 @@ def mock_load_json(): @pytest.fixture(autouse=True) -def mock_dependencies(hass): +def mock_dependencies(hass: HomeAssistant) -> None: """Mock dependencies loaded.""" mock_component(hass, "zeroconf") mock_component(hass, "device_tracker") diff --git a/tests/components/iotawatt/conftest.py b/tests/components/iotawatt/conftest.py index f3a60e69021..9380154b53e 100644 --- a/tests/components/iotawatt/conftest.py +++ b/tests/components/iotawatt/conftest.py @@ -1,16 +1,18 @@ """Test fixtures for IoTaWatt.""" -from unittest.mock import AsyncMock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch import pytest from homeassistant.components.iotawatt import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture -def entry(hass): +def entry(hass: HomeAssistant) -> MockConfigEntry: """Mock config entry added to HA.""" entry = MockConfigEntry(domain=DOMAIN, data={"host": "1.2.3.4"}) entry.add_to_hass(hass) @@ -18,7 +20,7 @@ def entry(hass): @pytest.fixture -def mock_iotawatt(entry): +def mock_iotawatt(entry: MockConfigEntry) -> Generator[MagicMock]: """Mock iotawatt.""" with patch("homeassistant.components.iotawatt.coordinator.Iotawatt") as mock: instance = mock.return_value diff --git a/tests/components/iotawatt/test_init.py b/tests/components/iotawatt/test_init.py index 8b707780eb4..de3a2f9f829 100644 --- a/tests/components/iotawatt/test_init.py +++ b/tests/components/iotawatt/test_init.py @@ -1,5 +1,7 @@ """Test init.""" +from unittest.mock import MagicMock + import httpx from homeassistant.config_entries import ConfigEntryState @@ -8,8 +10,12 @@ from homeassistant.setup import async_setup_component from . import INPUT_SENSOR +from tests.common import MockConfigEntry -async def test_setup_unload(hass: HomeAssistant, mock_iotawatt, entry) -> None: + +async def test_setup_unload( + hass: HomeAssistant, mock_iotawatt: MagicMock, entry: MockConfigEntry +) -> None: """Test we can setup and unload an entry.""" mock_iotawatt.getSensors.return_value["sensors"]["my_sensor_key"] = INPUT_SENSOR assert await async_setup_component(hass, "iotawatt", {}) @@ -18,7 +24,7 @@ async def test_setup_unload(hass: HomeAssistant, mock_iotawatt, entry) -> None: async def test_setup_connection_failed( - hass: HomeAssistant, mock_iotawatt, entry + hass: HomeAssistant, mock_iotawatt: MagicMock, entry: MockConfigEntry ) -> None: """Test connection error during startup.""" mock_iotawatt.connect.side_effect = httpx.ConnectError("") @@ -27,7 +33,9 @@ async def test_setup_connection_failed( assert entry.state is ConfigEntryState.SETUP_RETRY -async def test_setup_auth_failed(hass: HomeAssistant, mock_iotawatt, entry) -> None: +async def test_setup_auth_failed( + hass: HomeAssistant, mock_iotawatt: MagicMock, entry: MockConfigEntry +) -> None: """Test auth error during startup.""" mock_iotawatt.connect.return_value = False assert await async_setup_component(hass, "iotawatt", {}) diff --git a/tests/components/iotawatt/test_sensor.py b/tests/components/iotawatt/test_sensor.py index ecf2f97c67a..eb1a240a82f 100644 --- a/tests/components/iotawatt/test_sensor.py +++ b/tests/components/iotawatt/test_sensor.py @@ -1,6 +1,7 @@ """Test setting up sensors.""" from datetime import timedelta +from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -25,7 +26,7 @@ from tests.common import async_fire_time_changed async def test_sensor_type_input( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt + hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt: MagicMock ) -> None: """Test input sensors work.""" assert await async_setup_component(hass, "iotawatt", {}) @@ -60,7 +61,7 @@ async def test_sensor_type_input( async def test_sensor_type_output( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt + hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt: MagicMock ) -> None: """Tests the sensor type of Output.""" mock_iotawatt.getSensors.return_value["sensors"]["my_watthour_sensor_key"] = ( diff --git a/tests/components/iotty/__init__.py b/tests/components/iotty/__init__.py new file mode 100644 index 00000000000..705b8218c8b --- /dev/null +++ b/tests/components/iotty/__init__.py @@ -0,0 +1 @@ +"""Tests for iotty.""" diff --git a/tests/components/iotty/conftest.py b/tests/components/iotty/conftest.py new file mode 100644 index 00000000000..9f858879cb9 --- /dev/null +++ b/tests/components/iotty/conftest.py @@ -0,0 +1,180 @@ +"""Fixtures for iotty integration tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from aiohttp import ClientSession +from iottycloud.device import Device +from iottycloud.lightswitch import LightSwitch +from iottycloud.verbs import LS_DEVICE_TYPE_UID, RESULT, STATUS, STATUS_OFF, STATUS_ON +import pytest + +from homeassistant import setup +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker, mock_aiohttp_client + +CLIENT_ID = "client_id" +CLIENT_SECRET = "client_secret" +REDIRECT_URI = "https://example.com/auth/external/callback" + +test_devices = [ + Device("TestDevice0", "TEST_SERIAL_0", LS_DEVICE_TYPE_UID, "[TEST] Device Name 0"), + Device("TestDevice1", "TEST_SERIAL_1", LS_DEVICE_TYPE_UID, "[TEST] Device Name 1"), +] + + +ls_0 = LightSwitch( + "TestLS", "TEST_SERIAL_0", LS_DEVICE_TYPE_UID, "[TEST] Light switch 0" +) +ls_1 = LightSwitch( + "TestLS1", "TEST_SERIAL_1", LS_DEVICE_TYPE_UID, "[TEST] Light switch 1" +) +ls_2 = LightSwitch( + "TestLS2", "TEST_SERIAL_2", LS_DEVICE_TYPE_UID, "[TEST] Light switch 2" +) + +test_ls = [ls_0, ls_1] + +test_ls_one_removed = [ls_0] + +test_ls_one_added = [ + ls_0, + ls_1, + ls_2, +] + + +@pytest.fixture +async def local_oauth_impl(hass: HomeAssistant): + """Local implementation.""" + assert await setup.async_setup_component(hass, "auth", {}) + return config_entry_oauth2_flow.LocalOAuth2Implementation( + hass, DOMAIN, "client_id", "client_secret", "authorize_url", "https://token.url" + ) + + +@pytest.fixture +def aiohttp_client_session() -> None: + """AIOHTTP client session.""" + return ClientSession + + +@pytest.fixture +def mock_aioclient() -> Generator[AiohttpClientMocker]: + """Fixture to mock aioclient calls.""" + with mock_aiohttp_client() as mock_session: + yield mock_session + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="IOTTY00001", + domain=DOMAIN, + data={ + "auth_implementation": DOMAIN, + "token": { + "refresh_token": "REFRESH_TOKEN", + "access_token": "ACCESS_TOKEN_1", + "expires_in": 10, + "expires_at": 0, + "token_type": "bearer", + "random_other_data": "should_stay", + }, + CONF_HOST: "127.0.0.1", + CONF_MAC: "AA:BB:CC:DD:EE:FF", + CONF_PORT: 9123, + }, + unique_id="IOTTY00001", + ) + + +@pytest.fixture +def mock_config_entries_async_forward_entry_setup() -> Generator[AsyncMock]: + """Mock async_forward_entry_setup.""" + with patch( + "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups" + ) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.iotty.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup + + +@pytest.fixture +def mock_iotty() -> Generator[MagicMock]: + """Mock IottyProxy.""" + with patch( + "homeassistant.components.iotty.api.IottyProxy", autospec=True + ) as iotty_mock: + yield iotty_mock + + +@pytest.fixture +def mock_coordinator() -> Generator[MagicMock]: + """Mock IottyDataUpdateCoordinator.""" + with patch( + "homeassistant.components.iotty.coordinator.IottyDataUpdateCoordinator", + autospec=True, + ) as coordinator_mock: + yield coordinator_mock + + +@pytest.fixture +def mock_get_devices_nodevices() -> Generator[AsyncMock]: + """Mock for get_devices, returning two objects.""" + + with patch("iottycloud.cloudapi.CloudApi.get_devices") as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_get_devices_twolightswitches() -> Generator[AsyncMock]: + """Mock for get_devices, returning two objects.""" + + with patch( + "iottycloud.cloudapi.CloudApi.get_devices", return_value=test_ls + ) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_command_fn() -> Generator[AsyncMock]: + """Mock for command.""" + + with patch("iottycloud.cloudapi.CloudApi.command", return_value=None) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_get_status_filled_off() -> Generator[AsyncMock]: + """Mock setting up a get_status.""" + + retval = {RESULT: {STATUS: STATUS_OFF}} + with patch( + "iottycloud.cloudapi.CloudApi.get_status", return_value=retval + ) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_get_status_filled() -> Generator[AsyncMock]: + """Mock setting up a get_status.""" + + retval = {RESULT: {STATUS: STATUS_ON}} + with patch( + "iottycloud.cloudapi.CloudApi.get_status", return_value=retval + ) as mock_fn: + yield mock_fn diff --git a/tests/components/iotty/snapshots/test_switch.ambr b/tests/components/iotty/snapshots/test_switch.ambr new file mode 100644 index 00000000000..8ec22ed162a --- /dev/null +++ b/tests/components/iotty/snapshots/test_switch.ambr @@ -0,0 +1,126 @@ +# serializer version: 1 +# name: test_api_not_ok_entities_stay_the_same_as_before + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_api_throws_response_entities_stay_the_same_as_before + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_devices_creaction_ok[device] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'iotty', + 'TestLS', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'iotty', + 'model': None, + 'model_id': None, + 'name': '[TEST] Light switch 0 (TEST_SERIAL_0)', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_devices_creaction_ok[entity-ids] + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_devices_creaction_ok[entity] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_light_switch_0_test_serial_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'iotty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'TestLS', + 'unit_of_measurement': None, + }) +# --- +# name: test_devices_creaction_ok[state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': '[TEST] Light switch 0 (TEST_SERIAL_0)', + }), + 'context': , + 'entity_id': 'switch.test_light_switch_0_test_serial_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_devices_deletion_ok + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_devices_deletion_ok.1 + list([ + 'switch.test_light_switch_0_test_serial_0', + ]) +# --- +# name: test_devices_insertion_ok + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_devices_insertion_ok.1 + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + 'switch.test_light_switch_2_test_serial_2', + ]) +# --- +# name: test_setup_entry_ok_nodevices + list([ + ]) +# --- diff --git a/tests/components/iotty/test_api.py b/tests/components/iotty/test_api.py new file mode 100644 index 00000000000..6bb396f5d4d --- /dev/null +++ b/tests/components/iotty/test_api.py @@ -0,0 +1,82 @@ +"""Unit tests for iottycloud API.""" + +from unittest.mock import patch + +from aiohttp import ClientSession +import pytest + +from homeassistant.components.iotty import api +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + + +async def test_api_create_fail( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test API creation with no session.""" + + with pytest.raises(ValueError, match="websession"): + api.IottyProxy(hass, None, None) + + with pytest.raises(ValueError, match="oauth_session"): + api.IottyProxy(hass, aioclient_mock, None) + + +async def test_api_create_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + aiohttp_client_session: None, + local_oauth_impl: ClientSession, +) -> None: + """Test API creation. We're checking that we can create an IottyProxy without raising.""" + + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data["auth_implementation"] is not None + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + api.IottyProxy(hass, aiohttp_client_session, local_oauth_impl) + + +@patch( + "homeassistant.helpers.config_entry_oauth2_flow.OAuth2Session.valid_token", False +) +async def test_api_getaccesstoken_tokennotvalid_reloadtoken( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_aioclient: None, + aiohttp_client_session: ClientSession, +) -> None: + """Test getting access token. + + If a request with an invalid token is made, a request for a new token is done, + and the resulting token is used for future calls. + """ + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + new_token = "ACCESS_TOKEN_1" + + mock_aioclient.post( + "https://token.url", json={"access_token": new_token, "expires_in": 100} + ) + + mock_aioclient.post("https://example.com", status=201) + + mock_config_entry.add_to_hass(hass) + oauth2_session = config_entry_oauth2_flow.OAuth2Session( + hass, mock_config_entry, local_oauth_impl + ) + + iotty = api.IottyProxy(hass, aiohttp_client_session, oauth2_session) + + tok = await iotty.async_get_access_token() + assert tok == new_token diff --git a/tests/components/iotty/test_config_flow.py b/tests/components/iotty/test_config_flow.py new file mode 100644 index 00000000000..83fa16ece56 --- /dev/null +++ b/tests/components/iotty/test_config_flow.py @@ -0,0 +1,102 @@ +"""Test the iotty config flow.""" + +from http import HTTPStatus +from unittest.mock import AsyncMock, MagicMock + +import multidict +import pytest + +from homeassistant import config_entries +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.iotty.application_credentials import OAUTH2_TOKEN +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.setup import async_setup_component + +from .conftest import CLIENT_ID, CLIENT_SECRET, REDIRECT_URI + +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + + +@pytest.fixture +async def setup_credentials(hass: HomeAssistant) -> None: + """Fixture to setup application credentials component.""" + await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, CLIENT_SECRET), + ) + + +@pytest.fixture +def current_request_with_host(current_request: MagicMock) -> None: + """Mock current request with a host header.""" + new_headers = multidict.CIMultiDict(current_request.get.return_value.headers) + new_headers[config_entry_oauth2_flow.HEADER_FRONTEND_BASE] = "https://example.com" + current_request.get.return_value = current_request.get.return_value.clone( + headers=new_headers + ) + + +async def test_config_flow_no_credentials(hass: HomeAssistant) -> None: + """Test config flow base case with no credentials registered.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result.get("type") == FlowResultType.ABORT + assert result.get("reason") == "missing_credentials" + + +@pytest.mark.usefixtures("current_request_with_host", "setup_credentials") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_setup_entry: AsyncMock, +) -> None: + """Check full flow.""" + + await async_import_client_credential( + hass, DOMAIN, ClientCredential(CLIENT_ID, CLIENT_SECRET) + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER, "entry_id": DOMAIN} + ) + + assert result.get("type") == FlowResultType.EXTERNAL_STEP + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT_URI, + }, + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == HTTPStatus.OK + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/iotty/test_init.py b/tests/components/iotty/test_init.py new file mode 100644 index 00000000000..ee8168fdf2f --- /dev/null +++ b/tests/components/iotty/test_init.py @@ -0,0 +1,73 @@ +"""Tests for the iotty integration.""" + +from unittest.mock import MagicMock + +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from tests.common import MockConfigEntry + + +async def test_load_unload_coordinator_called( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_coordinator: MagicMock, + local_oauth_impl, +) -> None: + """Test the configuration entry loading/unloading.""" + + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data["auth_implementation"] is not None + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + await hass.async_block_till_done() + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + mock_coordinator.assert_called_once() + + assert mock_config_entry.state is ConfigEntryState.LOADED + method_call = mock_coordinator.method_calls[0] + name, _, _ = method_call + assert name == "().async_config_entry_first_refresh" + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_load_unload_iottyproxy_called( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iotty: MagicMock, + local_oauth_impl, + mock_config_entries_async_forward_entry_setup, +) -> None: + """Test the configuration entry loading/unloading.""" + + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data["auth_implementation"] is not None + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + mock_iotty.assert_called_once() + + assert mock_config_entry.state is ConfigEntryState.LOADED + method_call = mock_iotty.method_calls[0] + name, _, _ = method_call + assert name == "().get_devices" + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/iotty/test_switch.py b/tests/components/iotty/test_switch.py new file mode 100644 index 00000000000..235a897c305 --- /dev/null +++ b/tests/components/iotty/test_switch.py @@ -0,0 +1,300 @@ +"""Unit tests the Hass SWITCH component.""" + +from aiohttp import ClientSession +from freezegun.api import FrozenDateTimeFactory +from iottycloud.verbs import RESULT, STATUS, STATUS_OFF, STATUS_ON +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.components.iotty.coordinator import UPDATE_INTERVAL +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import ( + config_entry_oauth2_flow, + device_registry as dr, + entity_registry as er, +) + +from .conftest import test_ls_one_added, test_ls_one_removed + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_turn_on_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled_off, + mock_command_fn, +) -> None: + """Issue a turnon command.""" + + entity_id = "switch.test_light_switch_0_test_serial_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATUS_OFF + + mock_get_status_filled_off.return_value = {RESULT: {STATUS: STATUS_ON}} + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == STATUS_ON + + +async def test_turn_off_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + mock_command_fn, +) -> None: + """Issue a turnoff command.""" + + entity_id = "switch.test_light_switch_0_test_serial_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATUS_ON + + mock_get_status_filled.return_value = {RESULT: {STATUS: STATUS_OFF}} + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == STATUS_OFF + + +async def test_setup_entry_ok_nodevices( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_status_filled, + snapshot: SnapshotAssertion, + mock_get_devices_nodevices, +) -> None: + """Correctly setup, with no iotty Devices to add to Hass.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert hass.states.async_entity_ids_count() == 0 + assert hass.states.async_entity_ids() == snapshot + + +async def test_devices_creaction_ok( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, +) -> None: + """Test iotty switch creation.""" + + entity_id = "switch.test_light_switch_0_test_serial_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state == snapshot(name="state") + + assert (entry := entity_registry.async_get(entity_id)) + assert entry == snapshot(name="entity") + + assert entry.device_id + assert (device_entry := device_registry.async_get(entry.device_id)) + assert device_entry == snapshot(name="device") + + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == snapshot(name="entity-ids") + + +async def test_devices_deletion_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test iotty switch deletion.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == snapshot + + mock_get_devices_twolightswitches.return_value = test_ls_one_removed + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should have one device + assert hass.states.async_entity_ids_count() == 1 + assert hass.states.async_entity_ids() == snapshot + + +async def test_devices_insertion_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test iotty switch insertion.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == snapshot + + mock_get_devices_twolightswitches.return_value = test_ls_one_added + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should have three devices + assert hass.states.async_entity_ids_count() == 3 + assert hass.states.async_entity_ids() == snapshot + + +async def test_api_not_ok_entities_stay_the_same_as_before( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test case of incorrect response from iotty API on getting device status.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + entity_ids = hass.states.async_entity_ids() + assert entity_ids == snapshot + + mock_get_status_filled.return_value = {RESULT: "Not a valid restul"} + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should still have have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == entity_ids + + +async def test_api_throws_response_entities_stay_the_same_as_before( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test case of incorrect response from iotty API on getting device status.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + entity_ids = hass.states.async_entity_ids() + assert entity_ids == snapshot + + mock_get_devices_twolightswitches.return_value = test_ls_one_added + mock_get_status_filled.side_effect = Exception("Something went wrong") + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should still have have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == entity_ids diff --git a/tests/components/ipma/__init__.py b/tests/components/ipma/__init__.py index 799120e3966..ab5998c922f 100644 --- a/tests/components/ipma/__init__.py +++ b/tests/components/ipma/__init__.py @@ -108,6 +108,7 @@ class MockLocation: location=Forecast_Location(0, "", 0, 0, 0, "", (0, 0)), ), ] + raise ValueError(f"Unknown forecast period: {period}") name = "HomeTown" station = "HomeTown Station" diff --git a/tests/components/ipma/conftest.py b/tests/components/ipma/conftest.py index 7f3e82a8819..8f2a017dcb8 100644 --- a/tests/components/ipma/conftest.py +++ b/tests/components/ipma/conftest.py @@ -14,7 +14,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def config_entry(hass): +def config_entry(hass: HomeAssistant) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/ipma/snapshots/test_weather.ambr b/tests/components/ipma/snapshots/test_weather.ambr index 1142cb7cfe5..80f385546d1 100644 --- a/tests/components/ipma/snapshots/test_weather.ambr +++ b/tests/components/ipma/snapshots/test_weather.ambr @@ -1,119 +1,4 @@ # serializer version: 1 -# name: test_forecast_service - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 16, 0, 0), - 'precipitation_probability': '100.0', - 'temperature': 16.2, - 'templow': 10.6, - 'wind_bearing': 'S', - 'wind_speed': 10.0, - }), - ]), - }) -# --- -# name: test_forecast_service.1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - dict({ - 'condition': 'clear-night', - 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - ]), - }) -# --- -# name: test_forecast_service[forecast] - dict({ - 'weather.hometown': dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 16, 0, 0), - 'precipitation_probability': '100.0', - 'temperature': 16.2, - 'templow': 10.6, - 'wind_bearing': 'S', - 'wind_speed': 10.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[forecast].1 - dict({ - 'weather.hometown': dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - dict({ - 'condition': 'clear-night', - 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 16, 0, 0), - 'precipitation_probability': 100.0, - 'temperature': 16.2, - 'templow': 10.6, - 'wind_bearing': 'S', - 'wind_speed': 10.0, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - dict({ - 'condition': 'clear-night', - 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.hometown': dict({ diff --git a/tests/components/ipma/test_config_flow.py b/tests/components/ipma/test_config_flow.py index 38bb1dbf126..2a4c3517b2a 100644 --- a/tests/components/ipma/test_config_flow.py +++ b/tests/components/ipma/test_config_flow.py @@ -1,10 +1,10 @@ """Tests for IPMA config flow.""" +from collections.abc import Generator from unittest.mock import patch from pyipma import IPMAException import pytest -from typing_extensions import Generator from homeassistant.components.ipma.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -14,6 +14,8 @@ from homeassistant.data_entry_flow import FlowResultType from . import MockLocation +from tests.common import MockConfigEntry + @pytest.fixture(name="ipma_setup", autouse=True) def ipma_setup_fixture() -> Generator[None]: @@ -93,7 +95,9 @@ async def test_config_flow_failures(hass: HomeAssistant) -> None: } -async def test_flow_entry_already_exists(hass: HomeAssistant, init_integration) -> None: +async def test_flow_entry_already_exists( + hass: HomeAssistant, init_integration: MockConfigEntry +) -> None: """Test user input for config_entry that already exists. Test when the form should show when user puts existing location diff --git a/tests/components/ipma/test_diagnostics.py b/tests/components/ipma/test_diagnostics.py index b7d421a2ee5..26e54454947 100644 --- a/tests/components/ipma/test_diagnostics.py +++ b/tests/components/ipma/test_diagnostics.py @@ -4,6 +4,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -11,7 +12,7 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - init_integration, + init_integration: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" diff --git a/tests/components/ipma/test_weather.py b/tests/components/ipma/test_weather.py index b7ef1347ca5..997eb582083 100644 --- a/tests/components/ipma/test_weather.py +++ b/tests/components/ipma/test_weather.py @@ -4,6 +4,7 @@ import datetime from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory +from pyipma.observation import Observation import pytest from syrupy.assertion import SnapshotAssertion @@ -43,7 +44,7 @@ TEST_CONFIG_HOURLY = { class MockBadLocation(MockLocation): """Mock Location with unresponsive api.""" - async def observation(self, api): + async def observation(self, api) -> Observation | None: """Mock Observation.""" return None diff --git a/tests/components/ipp/conftest.py b/tests/components/ipp/conftest.py index 5e39a16f3b1..9a47cc3c355 100644 --- a/tests/components/ipp/conftest.py +++ b/tests/components/ipp/conftest.py @@ -1,11 +1,11 @@ """Fixtures for IPP integration tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from pyipp import Printer import pytest -from typing_extensions import Generator from homeassistant.components.ipp.const import CONF_BASE_PATH, DOMAIN from homeassistant.const import ( diff --git a/tests/components/iqvia/conftest.py b/tests/components/iqvia/conftest.py index 6fb14ca4d28..0d23b825c5a 100644 --- a/tests/components/iqvia/conftest.py +++ b/tests/components/iqvia/conftest.py @@ -1,18 +1,23 @@ """Define test fixtures for IQVIA.""" -import json +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import patch import pytest from homeassistant.components.iqvia.const import CONF_ZIP_CODE, DOMAIN +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -25,7 +30,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_ZIP_CODE: "12345", @@ -33,59 +38,59 @@ def config_fixture(hass): @pytest.fixture(name="data_allergy_forecast", scope="package") -def data_allergy_forecast_fixture(): +def data_allergy_forecast_fixture() -> JsonObjectType: """Define allergy forecast data.""" - return json.loads(load_fixture("allergy_forecast_data.json", "iqvia")) + return load_json_object_fixture("allergy_forecast_data.json", "iqvia") @pytest.fixture(name="data_allergy_index", scope="package") -def data_allergy_index_fixture(): +def data_allergy_index_fixture() -> JsonObjectType: """Define allergy index data.""" - return json.loads(load_fixture("allergy_index_data.json", "iqvia")) + return load_json_object_fixture("allergy_index_data.json", "iqvia") @pytest.fixture(name="data_allergy_outlook", scope="package") -def data_allergy_outlook_fixture(): +def data_allergy_outlook_fixture() -> JsonObjectType: """Define allergy outlook data.""" - return json.loads(load_fixture("allergy_outlook_data.json", "iqvia")) + return load_json_object_fixture("allergy_outlook_data.json", "iqvia") @pytest.fixture(name="data_asthma_forecast", scope="package") -def data_asthma_forecast_fixture(): +def data_asthma_forecast_fixture() -> JsonObjectType: """Define asthma forecast data.""" - return json.loads(load_fixture("asthma_forecast_data.json", "iqvia")) + return load_json_object_fixture("asthma_forecast_data.json", "iqvia") @pytest.fixture(name="data_asthma_index", scope="package") -def data_asthma_index_fixture(): +def data_asthma_index_fixture() -> JsonObjectType: """Define asthma index data.""" - return json.loads(load_fixture("asthma_index_data.json", "iqvia")) + return load_json_object_fixture("asthma_index_data.json", "iqvia") @pytest.fixture(name="data_disease_forecast", scope="package") -def data_disease_forecast_fixture(): +def data_disease_forecast_fixture() -> JsonObjectType: """Define disease forecast data.""" - return json.loads(load_fixture("disease_forecast_data.json", "iqvia")) + return load_json_object_fixture("disease_forecast_data.json", "iqvia") @pytest.fixture(name="data_disease_index", scope="package") -def data_disease_index_fixture(): +def data_disease_index_fixture() -> JsonObjectType: """Define disease index data.""" - return json.loads(load_fixture("disease_index_data.json", "iqvia")) + return load_json_object_fixture("disease_index_data.json", "iqvia") @pytest.fixture(name="setup_iqvia") async def setup_iqvia_fixture( - hass, - config, - data_allergy_forecast, - data_allergy_index, - data_allergy_outlook, - data_asthma_forecast, - data_asthma_index, - data_disease_forecast, - data_disease_index, -): + hass: HomeAssistant, + config: dict[str, Any], + data_allergy_forecast: JsonObjectType, + data_allergy_index: JsonObjectType, + data_allergy_outlook: JsonObjectType, + data_asthma_forecast: JsonObjectType, + data_asthma_index: JsonObjectType, + data_disease_forecast: JsonObjectType, + data_disease_index: JsonObjectType, +) -> AsyncGenerator[None]: """Define a fixture to set up IQVIA.""" with ( patch( diff --git a/tests/components/iqvia/test_config_flow.py b/tests/components/iqvia/test_config_flow.py index 17c977a6b4c..22f473a3fb5 100644 --- a/tests/components/iqvia/test_config_flow.py +++ b/tests/components/iqvia/test_config_flow.py @@ -1,12 +1,17 @@ """Define tests for the IQVIA config flow.""" +from typing import Any + +import pytest + from homeassistant.components.iqvia import CONF_ZIP_CODE, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -async def test_duplicate_error(hass: HomeAssistant, config, config_entry) -> None: +@pytest.mark.usefixtures("config_entry") +async def test_duplicate_error(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test that errors are shown when duplicates are added.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=config @@ -33,7 +38,8 @@ async def test_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "user" -async def test_step_user(hass: HomeAssistant, config, setup_iqvia) -> None: +@pytest.mark.usefixtures("setup_iqvia") +async def test_step_user(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test that the user step works (without MFA).""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=config diff --git a/tests/components/iqvia/test_diagnostics.py b/tests/components/iqvia/test_diagnostics.py index 7c445c9b3e4..9d5639c311c 100644 --- a/tests/components/iqvia/test_diagnostics.py +++ b/tests/components/iqvia/test_diagnostics.py @@ -1,23 +1,24 @@ """Test IQVIA diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, - config_entry, + config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, - setup_iqvia, + setup_iqvia: None, # Needs to be injected after config_entry snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/iron_os/__init__.py b/tests/components/iron_os/__init__.py new file mode 100644 index 00000000000..4e27f2c741c --- /dev/null +++ b/tests/components/iron_os/__init__.py @@ -0,0 +1 @@ +"""Tests for the Pinecil integration.""" diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py new file mode 100644 index 00000000000..f489d7b7bb5 --- /dev/null +++ b/tests/components/iron_os/conftest.py @@ -0,0 +1,141 @@ +"""Fixtures for Pinecil tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from bleak.backends.device import BLEDevice +from habluetooth import BluetoothServiceInfoBleak +from pynecil import DeviceInfoResponse, LiveDataResponse, OperatingMode, PowerSource +import pytest + +from homeassistant.components.iron_os import DOMAIN +from homeassistant.const import CONF_ADDRESS + +from tests.common import MockConfigEntry +from tests.components.bluetooth import generate_advertisement_data, generate_ble_device + +USER_INPUT = {CONF_ADDRESS: "c0:ff:ee:c0:ff:ee"} +DEFAULT_NAME = "Pinecil-C0FFEEE" +PINECIL_SERVICE_INFO = BluetoothServiceInfoBleak( + name="Pinecil-C0FFEEE", + address="c0:ff:ee:c0:ff:ee", + device=generate_ble_device( + address="c0:ff:ee:c0:ff:ee", + name="Pinecil-C0FFEEE", + ), + rssi=-61, + manufacturer_data={}, + service_data={}, + service_uuids=["9eae1000-9d0d-48c5-aa55-33e27f9bc533"], + source="local", + advertisement=generate_advertisement_data( + manufacturer_data={}, + service_uuids=["9eae1000-9d0d-48c5-aa55-33e27f9bc533"], + ), + connectable=True, + time=0, + tx_power=None, +) + +UNKNOWN_SERVICE_INFO = BluetoothServiceInfoBleak( + name="", + address="c0:ff:ee:c0:ff:ee", + device=generate_ble_device( + address="c0:ff:ee:c0:ff:ee", + name="", + ), + rssi=-61, + manufacturer_data={}, + service_data={}, + service_uuids=[], + source="local", + advertisement=generate_advertisement_data( + manufacturer_data={}, + service_uuids=[], + ), + connectable=True, + time=0, + tx_power=None, +) + + +@pytest.fixture(autouse=True) +def mock_bluetooth(enable_bluetooth: None) -> None: + """Auto mock bluetooth.""" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.iron_os.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="discovery") +def mock_async_discovered_service_info() -> Generator[MagicMock]: + """Mock service discovery.""" + with patch( + "homeassistant.components.iron_os.config_flow.async_discovered_service_info", + return_value=[PINECIL_SERVICE_INFO, UNKNOWN_SERVICE_INFO], + ) as discovery: + yield discovery + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Mock Pinecil configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=DEFAULT_NAME, + data={}, + unique_id="c0:ff:ee:c0:ff:ee", + entry_id="1234567890", + ) + + +@pytest.fixture(name="ble_device") +def mock_ble_device() -> Generator[MagicMock]: + """Mock BLEDevice.""" + with patch( + "homeassistant.components.bluetooth.async_ble_device_from_address", + return_value=BLEDevice( + address="c0:ff:ee:c0:ff:ee", name=DEFAULT_NAME, rssi=-50, details={} + ), + ) as ble_device: + yield ble_device + + +@pytest.fixture +def mock_pynecil() -> Generator[AsyncMock]: + """Mock Pynecil library.""" + with patch( + "homeassistant.components.iron_os.Pynecil", autospec=True + ) as mock_client: + client = mock_client.return_value + + client.get_device_info.return_value = DeviceInfoResponse( + build="v2.22", + device_id="c0ffeeC0", + address="c0:ff:ee:c0:ff:ee", + device_sn="0000c0ffeec0ffee", + name=DEFAULT_NAME, + ) + client.get_live_data.return_value = LiveDataResponse( + live_temp=298, + setpoint_temp=300, + dc_voltage=20.6, + handle_temp=36.3, + pwm_level=41, + power_src=PowerSource.PD, + tip_resistance=6.2, + uptime=1671, + movement_time=10000, + max_tip_temp_ability=460, + tip_voltage=2212, + hall_sensor=0, + operating_mode=OperatingMode.SOLDERING, + estimated_power=24.8, + ) + yield client diff --git a/tests/components/iron_os/snapshots/test_number.ambr b/tests/components/iron_os/snapshots/test_number.ambr new file mode 100644 index 00000000000..2f5ee62e37e --- /dev/null +++ b/tests/components/iron_os/snapshots/test_number.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_state[number.pinecil_setpoint_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 450, + 'min': 10, + 'mode': , + 'step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.pinecil_setpoint_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Setpoint temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_setpoint_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_setpoint_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Setpoint temperature', + 'max': 450, + 'min': 10, + 'mode': , + 'step': 5, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_setpoint_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '300', + }) +# --- diff --git a/tests/components/iron_os/snapshots/test_sensor.ambr b/tests/components/iron_os/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..64cb951dacc --- /dev/null +++ b/tests/components/iron_os/snapshots/test_sensor.ambr @@ -0,0 +1,683 @@ +# serializer version: 1 +# name: test_sensors[sensor.pinecil_dc_input_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_dc_input_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC input voltage', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_dc_input_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Pinecil DC input voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_dc_input_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.6', + }) +# --- +# name: test_sensors[sensor.pinecil_estimated_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pinecil_estimated_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Estimated power', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_estimated_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_estimated_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Pinecil Estimated power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_estimated_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24.8', + }) +# --- +# name: test_sensors[sensor.pinecil_hall_effect_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_hall_effect_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hall effect strength', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_hall_sensor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.pinecil_hall_effect_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Hall effect strength', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_hall_effect_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.pinecil_handle_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pinecil_handle_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Handle temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_handle_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_handle_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Handle temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_handle_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '36.3', + }) +# --- +# name: test_sensors[sensor.pinecil_last_movement_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_last_movement_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last movement time', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_movement_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_last_movement_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Pinecil Last movement time', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_last_movement_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10000', + }) +# --- +# name: test_sensors[sensor.pinecil_max_tip_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_max_tip_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max tip temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_max_tip_temp_ability', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_max_tip_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Max tip temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_max_tip_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '460', + }) +# --- +# name: test_sensors[sensor.pinecil_operating_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'idle', + 'soldering', + 'boost', + 'sleeping', + 'settings', + 'debug', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pinecil_operating_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Operating mode', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_operating_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.pinecil_operating_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Pinecil Operating mode', + 'options': list([ + 'idle', + 'soldering', + 'boost', + 'sleeping', + 'settings', + 'debug', + ]), + }), + 'context': , + 'entity_id': 'sensor.pinecil_operating_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'soldering', + }) +# --- +# name: test_sensors[sensor.pinecil_power_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_power_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power level', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_power_pwm_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.pinecil_power_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Pinecil Power level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.pinecil_power_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '41', + }) +# --- +# name: test_sensors[sensor.pinecil_power_source-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'dc', + 'qc', + 'pd_vbus', + 'pd', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_power_source', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power source', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_power_source', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.pinecil_power_source-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Pinecil Power source', + 'options': list([ + 'dc', + 'qc', + 'pd_vbus', + 'pd', + ]), + }), + 'context': , + 'entity_id': 'sensor.pinecil_power_source', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'pd', + }) +# --- +# name: test_sensors[sensor.pinecil_raw_tip_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_raw_tip_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Raw tip voltage', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_raw_tip_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Pinecil Raw tip voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_raw_tip_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2212', + }) +# --- +# name: test_sensors[sensor.pinecil_tip_resistance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_tip_resistance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tip resistance', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_resistance', + 'unit_of_measurement': 'Ω', + }) +# --- +# name: test_sensors[sensor.pinecil_tip_resistance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Tip resistance', + 'unit_of_measurement': 'Ω', + }), + 'context': , + 'entity_id': 'sensor.pinecil_tip_resistance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.2', + }) +# --- +# name: test_sensors[sensor.pinecil_tip_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pinecil_tip_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tip temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_live_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_tip_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Tip temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_tip_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '298', + }) +# --- +# name: test_sensors[sensor.pinecil_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_uptime', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Pinecil Uptime', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1671', + }) +# --- diff --git a/tests/components/iron_os/test_config_flow.py b/tests/components/iron_os/test_config_flow.py new file mode 100644 index 00000000000..231ec6cc3d6 --- /dev/null +++ b/tests/components/iron_os/test_config_flow.py @@ -0,0 +1,66 @@ +"""Tests for the Pinecil config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock + +from homeassistant.components.iron_os import DOMAIN +from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import DEFAULT_NAME, PINECIL_SERVICE_INFO, USER_INPUT + + +async def test_form( + hass: HomeAssistant, mock_setup_entry: AsyncMock, discovery: MagicMock +) -> None: + """Test the user config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == {} + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_no_device_discovered( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + discovery: MagicMock, +) -> None: + """Test setup with no device discoveries.""" + discovery.return_value = [] + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices_found" + + +async def test_async_step_bluetooth(hass: HomeAssistant) -> None: + """Test discovery via bluetooth..""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_BLUETOOTH}, + data=PINECIL_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "bluetooth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == {} + assert result["result"].unique_id == "c0:ff:ee:c0:ff:ee" diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py new file mode 100644 index 00000000000..fb0a782ea36 --- /dev/null +++ b/tests/components/iron_os/test_init.py @@ -0,0 +1,26 @@ +"""Test init of IronOS integration.""" + +from unittest.mock import AsyncMock + +from pynecil import CommunicationError +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("ble_device") +async def test_setup_config_entry_not_ready( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test config entry not ready.""" + mock_pynecil.get_device_info.side_effect = CommunicationError + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py new file mode 100644 index 00000000000..781492987ee --- /dev/null +++ b/tests/components/iron_os/test_number.py @@ -0,0 +1,104 @@ +"""Tests for the IronOS number platform.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +from pynecil import CharSetting, CommunicationError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +async def sensor_only() -> AsyncGenerator[None]: + """Enable only the number platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.NUMBER], + ): + yield + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_pynecil", "ble_device" +) +async def test_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test the IronOS number platform states.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_set_value( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test the IronOS number platform set value service.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 300}, + target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, + blocking=True, + ) + assert len(mock_pynecil.write.mock_calls) == 1 + mock_pynecil.write.assert_called_once_with(CharSetting.SETPOINT_TEMP, 300) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_set_value_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test the IronOS number platform set value service with exception.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_pynecil.write.side_effect = CommunicationError + + with pytest.raises( + ServiceValidationError, + match="Failed to submit setting to device, try again later", + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 300}, + target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, + blocking=True, + ) diff --git a/tests/components/iron_os/test_sensor.py b/tests/components/iron_os/test_sensor.py new file mode 100644 index 00000000000..2f79487a7fd --- /dev/null +++ b/tests/components/iron_os/test_sensor.py @@ -0,0 +1,73 @@ +"""Tests for the Pinecil Sensors.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pynecil import CommunicationError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.iron_os.coordinator import SCAN_INTERVAL +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.fixture(autouse=True) +async def sensor_only() -> AsyncGenerator[None]: + """Enable only the sensor platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.SENSOR], + ): + yield + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_pynecil: AsyncMock, + ble_device: MagicMock, +) -> None: + """Test the Pinecil sensor platform.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors_unavailable( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_pynecil: AsyncMock, + ble_device: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the sensors when device disconnects.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_pynecil.get_live_data.side_effect = CommunicationError + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + for entity_entry in entity_entries: + assert hass.states.get(entity_entry.entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/islamic_prayer_times/conftest.py b/tests/components/islamic_prayer_times/conftest.py index ae9b1f45eb9..ae0b6741fdf 100644 --- a/tests/components/islamic_prayer_times/conftest.py +++ b/tests/components/islamic_prayer_times/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the islamic_prayer_times tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/islamic_prayer_times/test_config_flow.py b/tests/components/islamic_prayer_times/test_config_flow.py index cb37a6b147d..695be636a84 100644 --- a/tests/components/islamic_prayer_times/test_config_flow.py +++ b/tests/components/islamic_prayer_times/test_config_flow.py @@ -3,7 +3,6 @@ import pytest from homeassistant import config_entries -from homeassistant.components import islamic_prayer_times from homeassistant.components.islamic_prayer_times.const import ( CONF_CALC_METHOD, CONF_LAT_ADJ_METHOD, @@ -24,7 +23,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_flow_works(hass: HomeAssistant) -> None: """Test user config.""" result = await hass.config_entries.flow.async_init( - islamic_prayer_times.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -76,7 +75,7 @@ async def test_integration_already_configured(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - islamic_prayer_times.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/islamic_prayer_times/test_init.py b/tests/components/islamic_prayer_times/test_init.py index 025a202e6da..7961b79676b 100644 --- a/tests/components/islamic_prayer_times/test_init.py +++ b/tests/components/islamic_prayer_times/test_init.py @@ -6,8 +6,7 @@ from unittest.mock import patch from freezegun import freeze_time import pytest -from homeassistant.components import islamic_prayer_times -from homeassistant.components.islamic_prayer_times.const import CONF_CALC_METHOD +from homeassistant.components.islamic_prayer_times.const import CONF_CALC_METHOD, DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE @@ -30,7 +29,7 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: """Test that Islamic Prayer Times is configured successfully.""" entry = MockConfigEntry( - domain=islamic_prayer_times.DOMAIN, + domain=DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -48,7 +47,7 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: async def test_unload_entry(hass: HomeAssistant) -> None: """Test removing Islamic Prayer Times.""" entry = MockConfigEntry( - domain=islamic_prayer_times.DOMAIN, + domain=DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -66,7 +65,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: async def test_options_listener(hass: HomeAssistant) -> None: """Ensure updating options triggers a coordinator refresh.""" - entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) + entry = MockConfigEntry(domain=DOMAIN, data={}) entry.add_to_hass(hass) with ( @@ -110,13 +109,13 @@ async def test_migrate_unique_id( old_unique_id: str, ) -> None: """Test unique id migration.""" - entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) + entry = MockConfigEntry(domain=DOMAIN, data={}) entry.add_to_hass(hass) entity: er.RegistryEntry = entity_registry.async_get_or_create( suggested_object_id=object_id, domain=SENSOR_DOMAIN, - platform=islamic_prayer_times.DOMAIN, + platform=DOMAIN, unique_id=old_unique_id, config_entry=entry, ) @@ -140,7 +139,7 @@ async def test_migrate_unique_id( async def test_migration_from_1_1_to_1_2(hass: HomeAssistant) -> None: """Test migrating from version 1.1 to 1.2.""" entry = MockConfigEntry( - domain=islamic_prayer_times.DOMAIN, + domain=DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -164,7 +163,7 @@ async def test_migration_from_1_1_to_1_2(hass: HomeAssistant) -> None: async def test_update_scheduling(hass: HomeAssistant) -> None: """Test that integration schedules update immediately after Islamic midnight.""" - entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) + entry = MockConfigEntry(domain=DOMAIN, data={}) entry.add_to_hass(hass) with ( diff --git a/tests/components/israel_rail/__init__.py b/tests/components/israel_rail/__init__.py new file mode 100644 index 00000000000..23cf9f5a821 --- /dev/null +++ b/tests/components/israel_rail/__init__.py @@ -0,0 +1,28 @@ +"""Tests for the israel_rail component.""" + +from datetime import timedelta + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.israel_rail.const import DEFAULT_SCAN_INTERVAL +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def init_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Set up the israel rail integration in Home Assistant.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +async def goto_future(hass: HomeAssistant, freezer: FrozenDateTimeFactory): + """Move to future.""" + freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() diff --git a/tests/components/israel_rail/conftest.py b/tests/components/israel_rail/conftest.py new file mode 100644 index 00000000000..07a101d40c7 --- /dev/null +++ b/tests/components/israel_rail/conftest.py @@ -0,0 +1,137 @@ +"""Configuration for Israel rail tests.""" + +from collections.abc import Generator +from datetime import datetime +from unittest.mock import AsyncMock, patch +from zoneinfo import ZoneInfo + +from israelrailapi.api import TrainRoute +import pytest + +from homeassistant.components.israel_rail import CONF_DESTINATION, CONF_START, DOMAIN + +from tests.common import MockConfigEntry + +VALID_CONFIG = { + CONF_START: "באר יעקב", + CONF_DESTINATION: "אשקלון", +} + +SOURCE_DEST = "באר יעקב אשקלון" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.israel_rail.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=VALID_CONFIG, + unique_id=SOURCE_DEST, + ) + + +@pytest.fixture +def mock_israelrail() -> AsyncMock: + """Build a fixture for the Israel rail API.""" + with ( + patch( + "homeassistant.components.israel_rail.TrainSchedule", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.israel_rail.config_flow.TrainSchedule", + new=mock_client, + ), + ): + client = mock_client.return_value + client.query.return_value = TRAINS + + yield client + + +def get_time(hour: int, minute: int) -> str: + """Return a time in isoformat.""" + return datetime(2021, 10, 10, hour, minute, 10, tzinfo=ZoneInfo("UTC")).isoformat() + + +def get_train_route( + train_number: str = "1234", + departure_time: str = "2021-10-10T10:10:10", + arrival_time: str = "2021-10-10T10:10:10", + origin_platform: str = "1", + dest_platform: str = "2", + origin_station: str = "3500", + destination_station: str = "3700", +) -> TrainRoute: + """Build a TrainRoute of the israelrail API.""" + return TrainRoute( + [ + { + "orignStation": origin_station, + "destinationStation": destination_station, + "departureTime": departure_time, + "arrivalTime": arrival_time, + "originPlatform": origin_platform, + "destPlatform": dest_platform, + "trainNumber": train_number, + } + ] + ) + + +TRAINS = [ + get_train_route( + train_number="1234", + departure_time=get_time(10, 10), + arrival_time=get_time(10, 30), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1235", + departure_time=get_time(10, 20), + arrival_time=get_time(10, 40), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1236", + departure_time=get_time(10, 30), + arrival_time=get_time(10, 50), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1237", + departure_time=get_time(10, 40), + arrival_time=get_time(11, 00), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1238", + departure_time=get_time(10, 50), + arrival_time=get_time(11, 10), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), +] diff --git a/tests/components/israel_rail/snapshots/test_sensor.ambr b/tests/components/israel_rail/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..f851f1cd726 --- /dev/null +++ b/tests/components/israel_rail/snapshots/test_sensor.ambr @@ -0,0 +1,286 @@ +# serializer version: 1 +# name: test_valid_config[sensor.mock_title_departure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_departure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Departure', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure0', + 'unique_id': 'באר יעקב אשקלון_departure', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_departure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Departure', + }), + 'context': , + 'entity_id': 'sensor.mock_title_departure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:10:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_departure_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Departure +1', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure1', + 'unique_id': 'באר יעקב אשקלון_departure1', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Departure +1', + }), + 'context': , + 'entity_id': 'sensor.mock_title_departure_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:20:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_departure_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Departure +2', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure2', + 'unique_id': 'באר יעקב אשקלון_departure2', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Departure +2', + }), + 'context': , + 'entity_id': 'sensor.mock_title_departure_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:30:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_platform-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_platform', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Platform', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'platform', + 'unique_id': 'באר יעקב אשקלון_platform', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_platform-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title Platform', + }), + 'context': , + 'entity_id': 'sensor.mock_title_platform', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_valid_config[sensor.mock_title_train_number-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_train_number', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Train number', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'train_number', + 'unique_id': 'באר יעקב אשקלון_train_number', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_train_number-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title Train number', + }), + 'context': , + 'entity_id': 'sensor.mock_title_train_number', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1234', + }) +# --- +# name: test_valid_config[sensor.mock_title_trains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_trains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Trains', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'trains', + 'unique_id': 'באר יעקב אשקלון_trains', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_trains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title Trains', + }), + 'context': , + 'entity_id': 'sensor.mock_title_trains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- diff --git a/tests/components/israel_rail/test_config_flow.py b/tests/components/israel_rail/test_config_flow.py new file mode 100644 index 00000000000..a27d9b3420b --- /dev/null +++ b/tests/components/israel_rail/test_config_flow.py @@ -0,0 +1,87 @@ +"""Define tests for the israel rail config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.israel_rail import CONF_DESTINATION, CONF_START, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import VALID_CONFIG + +from tests.common import MockConfigEntry + + +async def test_create_entry( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_israelrail: AsyncMock +) -> None: + """Test that the user step works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "באר יעקב אשקלון" + assert result["data"] == { + CONF_START: "באר יעקב", + CONF_DESTINATION: "אשקלון", + } + + +async def test_flow_fails( + hass: HomeAssistant, + mock_israelrail: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test that the user step fails.""" + mock_israelrail.query.side_effect = Exception("error") + failed_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data=VALID_CONFIG, + ) + + assert failed_result["errors"] == {"base": "unknown"} + assert failed_result["type"] is FlowResultType.FORM + + mock_israelrail.query.side_effect = None + + result = await hass.config_entries.flow.async_configure( + failed_result["flow_id"], + VALID_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "באר יעקב אשקלון" + assert result["data"] == { + CONF_START: "באר יעקב", + CONF_DESTINATION: "אשקלון", + } + + +async def test_flow_already_configured( + hass: HomeAssistant, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test that the user step fails when the entry is already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result_aborted = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + + assert result_aborted["type"] is FlowResultType.ABORT + assert result_aborted["reason"] == "already_configured" diff --git a/tests/components/israel_rail/test_init.py b/tests/components/israel_rail/test_init.py new file mode 100644 index 00000000000..c4dd4e5721e --- /dev/null +++ b/tests/components/israel_rail/test_init.py @@ -0,0 +1,22 @@ +"""Test init of israel_rail integration.""" + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.common import MockConfigEntry + + +async def test_invalid_config( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_israelrail: AsyncMock, +) -> None: + """Ensure nothing is created when config is wrong.""" + mock_israelrail.query.side_effect = Exception("error") + await init_integration(hass, mock_config_entry) + assert not hass.states.async_entity_ids("sensor") + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/israel_rail/test_sensor.py b/tests/components/israel_rail/test_sensor.py new file mode 100644 index 00000000000..d044dfe1d7c --- /dev/null +++ b/tests/components/israel_rail/test_sensor.py @@ -0,0 +1,69 @@ +"""Tests for the israel_rail sensor.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import goto_future, init_integration +from .conftest import TRAINS, get_time + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_valid_config( + hass: HomeAssistant, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Ensure everything starts correctly.""" + await init_integration(hass, mock_config_entry) + assert len(hass.states.async_entity_ids()) == 6 + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_update_train( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure the train data is updated.""" + await init_integration(hass, mock_config_entry) + assert len(hass.states.async_entity_ids()) == 6 + departure_sensor = hass.states.get("sensor.mock_title_departure") + expected_time = get_time(10, 10) + assert departure_sensor.state == expected_time + + mock_israelrail.query.return_value = TRAINS[1:] + + await goto_future(hass, freezer) + + assert len(hass.states.async_entity_ids()) == 6 + departure_sensor = hass.states.get("sensor.mock_title_departure") + expected_time = get_time(10, 20) + assert departure_sensor.state == expected_time + + +async def test_fail_query( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure the integration handles query failures.""" + await init_integration(hass, mock_config_entry) + assert len(hass.states.async_entity_ids()) == 6 + mock_israelrail.query.side_effect = Exception("error") + await goto_future(hass, freezer) + assert len(hass.states.async_entity_ids()) == 6 + departure_sensor = hass.states.get("sensor.mock_title_departure") + assert departure_sensor.state == STATE_UNAVAILABLE diff --git a/tests/components/ista_ecotrend/conftest.py b/tests/components/ista_ecotrend/conftest.py index 2218ef05ba7..7edf2e4717b 100644 --- a/tests/components/ista_ecotrend/conftest.py +++ b/tests/components/ista_ecotrend/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the ista EcoTrend tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.ista_ecotrend.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD @@ -166,3 +166,52 @@ def get_consumption_data(obj_uuid: str | None = None) -> dict[str, Any]: }, ], } + + +def extend_statistics(obj_uuid: str | None = None) -> dict[str, Any]: + """Extend statistics data with new values.""" + stats = get_consumption_data(obj_uuid) + + stats["costs"].insert( + 0, + { + "date": {"month": 6, "year": 2024}, + "costsByEnergyType": [ + { + "type": "heating", + "value": 9000, + }, + { + "type": "warmwater", + "value": 9000, + }, + { + "type": "water", + "value": 9000, + }, + ], + }, + ) + stats["consumptions"].insert( + 0, + { + "date": {"month": 6, "year": 2024}, + "readings": [ + { + "type": "heating", + "value": "9000", + "additionalValue": "9000,0", + }, + { + "type": "warmwater", + "value": "9999,0", + "additionalValue": "90000,0", + }, + { + "type": "water", + "value": "9000,0", + }, + ], + }, + ) + return stats diff --git a/tests/components/ista_ecotrend/snapshots/test_init.ambr b/tests/components/ista_ecotrend/snapshots/test_init.ambr index a9d13510b54..c84d55c059c 100644 --- a/tests/components/ista_ecotrend/snapshots/test_init.ambr +++ b/tests/components/ista_ecotrend/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': 'ista SE', 'model': 'ista EcoTrend', + 'model_id': None, 'name': 'Luxemburger Str. 1', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -51,8 +53,10 @@ }), 'manufacturer': 'ista SE', 'model': 'ista EcoTrend', + 'model_id': None, 'name': 'Bahnhofsstr. 1A', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr index c312f9b6350..b5056019c74 100644 --- a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr +++ b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr @@ -1,70 +1,12 @@ # serializer version: 1 -# name: test_setup.32 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'https://ecotrend.ista.de/', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'ista_ecotrend', - '26e93f1a-c828-11ea-87d0-0242ac130003', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'ista SE', - 'model': 'ista EcoTrend', - 'name': 'Luxemburger Str. 1', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_setup.33 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'https://ecotrend.ista.de/', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'ista_ecotrend', - 'eaf5c5c8-889f-4a3c-b68c-e9a676505762', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'ista SE', - 'model': 'ista EcoTrend', - 'name': 'Bahnhofsstr. 1A', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- # name: test_setup[sensor.bahnhofsstr_1a_heating-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -92,13 +34,15 @@ 'supported_features': 0, 'translation_key': , 'unique_id': 'eaf5c5c8-889f-4a3c-b68c-e9a676505762_heating', - 'unit_of_measurement': None, + 'unit_of_measurement': 'units', }) # --- # name: test_setup[sensor.bahnhofsstr_1a_heating-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Bahnhofsstr. 1A Heating', + 'state_class': , + 'unit_of_measurement': 'units', }), 'context': , 'entity_id': 'sensor.bahnhofsstr_1a_heating', @@ -491,7 +435,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -519,13 +465,15 @@ 'supported_features': 0, 'translation_key': , 'unique_id': '26e93f1a-c828-11ea-87d0-0242ac130003_heating', - 'unit_of_measurement': None, + 'unit_of_measurement': 'units', }) # --- # name: test_setup[sensor.luxemburger_str_1_heating-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Luxemburger Str. 1 Heating', + 'state_class': , + 'unit_of_measurement': 'units', }), 'context': , 'entity_id': 'sensor.luxemburger_str_1_heating', diff --git a/tests/components/ista_ecotrend/snapshots/test_statistics.ambr b/tests/components/ista_ecotrend/snapshots/test_statistics.ambr new file mode 100644 index 00000000000..78ecd6a6b6b --- /dev/null +++ b/tests/components/ista_ecotrend/snapshots/test_statistics.ambr @@ -0,0 +1,609 @@ +# serializer version: 1 +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9083.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9999.0, + 'sum': 10001.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9014.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 90000.0, + 'sum': 90118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9011.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9005.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9083.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9999.0, + 'sum': 10001.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9014.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 90000.0, + 'sum': 90118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9011.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9005.0, + }), + ]) +# --- diff --git a/tests/components/ista_ecotrend/test_statistics.py b/tests/components/ista_ecotrend/test_statistics.py new file mode 100644 index 00000000000..6b2f98affe9 --- /dev/null +++ b/tests/components/ista_ecotrend/test_statistics.py @@ -0,0 +1,82 @@ +"""Tests for the ista EcoTrend Statistics import.""" + +import datetime +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.recorder.statistics import statistics_during_period +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import extend_statistics + +from tests.common import MockConfigEntry +from tests.components.recorder.common import async_wait_recording_done + + +@pytest.mark.usefixtures("recorder_mock", "entity_registry_enabled_by_default") +async def test_statistics_import( + hass: HomeAssistant, + ista_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_ista: MagicMock, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test setup of ista EcoTrend sensor platform.""" + + ista_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(ista_config_entry.entry_id) + await hass.async_block_till_done() + + assert ista_config_entry.state is ConfigEntryState.LOADED + entities = er.async_entries_for_config_entry( + entity_registry, ista_config_entry.entry_id + ) + await async_wait_recording_done(hass) + + # Test that consumption statistics for 2 months have been added + for entity in entities: + statistic_id = f"ista_ecotrend:{entity.entity_id.removeprefix("sensor.")}" + stats = await hass.async_add_executor_job( + statistics_during_period, + hass, + datetime.datetime.fromtimestamp(0, tz=datetime.UTC), + None, + {statistic_id}, + "month", + None, + {"state", "sum"}, + ) + assert stats[statistic_id] == snapshot(name=f"{statistic_id}_2months") + assert len(stats[statistic_id]) == 2 + + # Add another monthly consumption and forward + # 1 day and test if the new values have been + # appended to the statistics + mock_ista.get_consumption_data = extend_statistics + + freezer.tick(datetime.timedelta(days=1)) + await async_wait_recording_done(hass) + freezer.tick(datetime.timedelta(days=1)) + await async_wait_recording_done(hass) + + for entity in entities: + statistic_id = f"ista_ecotrend:{entity.entity_id.removeprefix("sensor.")}" + stats = await hass.async_add_executor_job( + statistics_during_period, + hass, + datetime.datetime.fromtimestamp(0, tz=datetime.UTC), + None, + {statistic_id}, + "month", + None, + {"state", "sum"}, + ) + assert stats[statistic_id] == snapshot(name=f"{statistic_id}_3months") + + assert len(stats[statistic_id]) == 3 diff --git a/tests/components/jellyfin/conftest.py b/tests/components/jellyfin/conftest.py index 40d03212ceb..c3732714177 100644 --- a/tests/components/jellyfin/conftest.py +++ b/tests/components/jellyfin/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, create_autospec, patch from jellyfin_apiclient_python import JellyfinClient @@ -9,7 +10,6 @@ from jellyfin_apiclient_python.api import API from jellyfin_apiclient_python.configuration import Config from jellyfin_apiclient_python.connection_manager import ConnectionManager import pytest -from typing_extensions import Generator from homeassistant.components.jellyfin.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME diff --git a/tests/components/jellyfin/test_init.py b/tests/components/jellyfin/test_init.py index 51d7af2ae94..1af59737296 100644 --- a/tests/components/jellyfin/test_init.py +++ b/tests/components/jellyfin/test_init.py @@ -68,12 +68,10 @@ async def test_load_unload_config_entry( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.LOADED await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id not in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/jewish_calendar/__init__.py b/tests/components/jewish_calendar/__init__.py index 60726fc3a3e..440bffc2256 100644 --- a/tests/components/jewish_calendar/__init__.py +++ b/tests/components/jewish_calendar/__init__.py @@ -8,7 +8,7 @@ from freezegun import freeze_time as alter_time # noqa: F401 from homeassistant.components import jewish_calendar import homeassistant.util.dt as dt_util -_LatLng = namedtuple("_LatLng", ["lat", "lng"]) +_LatLng = namedtuple("_LatLng", ["lat", "lng"]) # noqa: PYI024 HDATE_DEFAULT_ALTITUDE = 754 NYC_LATLNG = _LatLng(40.7128, -74.0060) diff --git a/tests/components/jewish_calendar/conftest.py b/tests/components/jewish_calendar/conftest.py index 5e16289f473..97909291f27 100644 --- a/tests/components/jewish_calendar/conftest.py +++ b/tests/components/jewish_calendar/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the jewish_calendar tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.jewish_calendar.const import DEFAULT_NAME, DOMAIN diff --git a/tests/components/jewish_calendar/test_binary_sensor.py b/tests/components/jewish_calendar/test_binary_sensor.py index b60e7698266..8abaaecb77d 100644 --- a/tests/components/jewish_calendar/test_binary_sensor.py +++ b/tests/components/jewish_calendar/test_binary_sensor.py @@ -10,6 +10,7 @@ from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, CONF_HAVDALAH_OFFSET_MINUTES, + DEFAULT_NAME, DOMAIN, ) from homeassistant.const import CONF_LANGUAGE, CONF_PLATFORM, STATE_OFF, STATE_ON @@ -192,6 +193,7 @@ async def test_issur_melacha_sensor( with alter_time(test_time): entry = MockConfigEntry( + title=DEFAULT_NAME, domain=DOMAIN, data={ CONF_LANGUAGE: "english", @@ -264,6 +266,7 @@ async def test_issur_melacha_sensor_update( with alter_time(test_time): entry = MockConfigEntry( + title=DEFAULT_NAME, domain=DOMAIN, data={ CONF_LANGUAGE: "english", diff --git a/tests/components/jewish_calendar/test_config_flow.py b/tests/components/jewish_calendar/test_config_flow.py index 3189571a5a7..466d3a1e4f0 100644 --- a/tests/components/jewish_calendar/test_config_flow.py +++ b/tests/components/jewish_calendar/test_config_flow.py @@ -9,6 +9,7 @@ from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, CONF_HAVDALAH_OFFSET_MINUTES, + DEFAULT_CANDLE_LIGHT, DEFAULT_DIASPORA, DEFAULT_LANGUAGE, DOMAIN, @@ -138,3 +139,28 @@ async def test_options(hass: HomeAssistant, mock_config_entry: MockConfigEntry) assert len(entries) == 1 assert entries[0].options[CONF_CANDLE_LIGHT_MINUTES] == 25 assert entries[0].options[CONF_HAVDALAH_OFFSET_MINUTES] == 34 + + +async def test_options_reconfigure( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that updating the options of the Jewish Calendar integration triggers a value update.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert CONF_CANDLE_LIGHT_MINUTES not in mock_config_entry.options + + # Update the CONF_CANDLE_LIGHT_MINUTES option to a new value + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_CANDLE_LIGHT_MINUTES: DEFAULT_CANDLE_LIGHT + 1, + }, + ) + assert result["result"] + + # The value of the "upcoming_shabbat_candle_lighting" sensor should be the new value + assert ( + mock_config_entry.options[CONF_CANDLE_LIGHT_MINUTES] == DEFAULT_CANDLE_LIGHT + 1 + ) diff --git a/tests/components/jewish_calendar/test_sensor.py b/tests/components/jewish_calendar/test_sensor.py index 509e17017d5..cb054751f67 100644 --- a/tests/components/jewish_calendar/test_sensor.py +++ b/tests/components/jewish_calendar/test_sensor.py @@ -10,6 +10,7 @@ from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, CONF_HAVDALAH_OFFSET_MINUTES, + DEFAULT_NAME, DOMAIN, ) from homeassistant.const import CONF_LANGUAGE, CONF_PLATFORM @@ -24,7 +25,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed async def test_jewish_calendar_min_config(hass: HomeAssistant) -> None: """Test minimum jewish calendar configuration.""" - entry = MockConfigEntry(domain=DOMAIN, data={}) + entry = MockConfigEntry(title=DEFAULT_NAME, domain=DOMAIN, data={}) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -33,7 +34,9 @@ async def test_jewish_calendar_min_config(hass: HomeAssistant) -> None: async def test_jewish_calendar_hebrew(hass: HomeAssistant) -> None: """Test jewish calendar sensor with language set to hebrew.""" - entry = MockConfigEntry(domain=DOMAIN, data={"language": "hebrew"}) + entry = MockConfigEntry( + title=DEFAULT_NAME, domain=DOMAIN, data={"language": "hebrew"} + ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -201,6 +204,7 @@ TEST_IDS = [ TEST_PARAMS, ids=TEST_IDS, ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_jewish_calendar_sensor( hass: HomeAssistant, now, @@ -223,6 +227,7 @@ async def test_jewish_calendar_sensor( with alter_time(test_time): entry = MockConfigEntry( + title=DEFAULT_NAME, domain=DOMAIN, data={ CONF_LANGUAGE: language, @@ -541,6 +546,7 @@ SHABBAT_TEST_IDS = [ SHABBAT_PARAMS, ids=SHABBAT_TEST_IDS, ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_shabbat_times_sensor( hass: HomeAssistant, language, @@ -563,6 +569,7 @@ async def test_shabbat_times_sensor( with alter_time(test_time): entry = MockConfigEntry( + title=DEFAULT_NAME, domain=DOMAIN, data={ CONF_LANGUAGE: language, @@ -617,12 +624,13 @@ OMER_TEST_IDS = [ @pytest.mark.parametrize(("test_time", "result"), OMER_PARAMS, ids=OMER_TEST_IDS) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_omer_sensor(hass: HomeAssistant, test_time, result) -> None: """Test Omer Count sensor output.""" test_time = test_time.replace(tzinfo=dt_util.get_time_zone(hass.config.time_zone)) with alter_time(test_time): - entry = MockConfigEntry(domain=DOMAIN) + entry = MockConfigEntry(title=DEFAULT_NAME, domain=DOMAIN) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -651,12 +659,13 @@ DAFYOMI_TEST_IDS = [ @pytest.mark.parametrize(("test_time", "result"), DAFYOMI_PARAMS, ids=DAFYOMI_TEST_IDS) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_dafyomi_sensor(hass: HomeAssistant, test_time, result) -> None: """Test Daf Yomi sensor output.""" test_time = test_time.replace(tzinfo=dt_util.get_time_zone(hass.config.time_zone)) with alter_time(test_time): - entry = MockConfigEntry(domain=DOMAIN) + entry = MockConfigEntry(title=DEFAULT_NAME, domain=DOMAIN) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/jvc_projector/conftest.py b/tests/components/jvc_projector/conftest.py index dd012d3f355..3115cbfe252 100644 --- a/tests/components/jvc_projector/conftest.py +++ b/tests/components/jvc_projector/conftest.py @@ -1,9 +1,9 @@ """Fixtures for JVC Projector integration.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.jvc_projector.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT diff --git a/tests/components/kaleidescape/conftest.py b/tests/components/kaleidescape/conftest.py index 5cd2a8ebb18..e5aeedc3895 100644 --- a/tests/components/kaleidescape/conftest.py +++ b/tests/components/kaleidescape/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Kaleidescape integration.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from kaleidescape import Dispatcher from kaleidescape.device import Automation, Movie, Power, System import pytest -from typing_extensions import Generator from homeassistant.components.kaleidescape.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/keymitt_ble/__init__.py b/tests/components/keymitt_ble/__init__.py index 1e717b805c5..6fa608ad3b4 100644 --- a/tests/components/keymitt_ble/__init__.py +++ b/tests/components/keymitt_ble/__init__.py @@ -53,7 +53,7 @@ SERVICE_INFO = BluetoothServiceInfoBleak( class MockMicroBotApiClient: """Mock MicroBotApiClient.""" - def __init__(self, device, token): + def __init__(self, device, token) -> None: """Mock init.""" async def connect(self, init): @@ -70,7 +70,7 @@ class MockMicroBotApiClient: class MockMicroBotApiClientFail: """Mock MicroBotApiClient.""" - def __init__(self, device, token): + def __init__(self, device, token) -> None: """Mock init.""" async def connect(self, init): diff --git a/tests/components/kira/test_init.py b/tests/components/kira/test_init.py index e57519667ce..8e6c70c83a4 100644 --- a/tests/components/kira/test_init.py +++ b/tests/components/kira/test_init.py @@ -1,6 +1,7 @@ """The tests for Kira.""" import os +from pathlib import Path import shutil import tempfile from unittest.mock import patch @@ -76,10 +77,9 @@ async def test_kira_creates_codes(work_dir) -> None: assert os.path.exists(code_path), "Kira component didn't create codes file" -async def test_load_codes(work_dir) -> None: +async def test_load_codes(hass: HomeAssistant, work_dir) -> None: """Kira should ignore invalid codes.""" code_path = os.path.join(work_dir, "codes.yaml") - with open(code_path, "w", encoding="utf8") as code_file: - code_file.write(KIRA_CODES) + await hass.async_add_executor_job(Path(code_path).write_text, KIRA_CODES) res = kira.load_codes(code_path) assert len(res) == 1, "Expected exactly 1 valid Kira code" diff --git a/tests/components/kitchen_sink/snapshots/test_switch.ambr b/tests/components/kitchen_sink/snapshots/test_switch.ambr index 1cd903a59d6..fe4311ad711 100644 --- a/tests/components/kitchen_sink/snapshots/test_switch.ambr +++ b/tests/components/kitchen_sink/snapshots/test_switch.ambr @@ -67,8 +67,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Outlet 1', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -97,8 +99,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Power strip with 2 sockets', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -173,8 +177,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Outlet 2', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -203,8 +209,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Power strip with 2 sockets', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/kitchen_sink/test_config_flow.py b/tests/components/kitchen_sink/test_config_flow.py index e530ed0e6f3..5f163d1342e 100644 --- a/tests/components/kitchen_sink/test_config_flow.py +++ b/tests/components/kitchen_sink/test_config_flow.py @@ -1,13 +1,28 @@ """Test the Everything but the Kitchen Sink config flow.""" +from collections.abc import Generator from unittest.mock import patch +import pytest + from homeassistant import config_entries, setup from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry + + +@pytest.fixture +def no_platforms() -> Generator[None]: + """Don't enable any platforms.""" + with patch( + "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", + [], + ): + yield + async def test_import(hass: HomeAssistant) -> None: """Test that we can import a config entry.""" @@ -66,3 +81,26 @@ async def test_reauth(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" + + +@pytest.mark.usefixtures("no_platforms") +async def test_options_flow(hass: HomeAssistant) -> None: + """Test config flow options.""" + config_entry = MockConfigEntry(domain=DOMAIN) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "options_1" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"section_1": {"bool": True, "int": 15}}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert config_entry.options == {"section_1": {"bool": True, "int": 15}} + + await hass.async_block_till_done() diff --git a/tests/components/kitchen_sink/test_init.py b/tests/components/kitchen_sink/test_init.py index 1547a10bd2b..0575141bb3b 100644 --- a/tests/components/kitchen_sink/test_init.py +++ b/tests/components/kitchen_sink/test_init.py @@ -7,7 +7,7 @@ from unittest.mock import ANY import pytest from homeassistant.components.kitchen_sink import DOMAIN -from homeassistant.components.recorder import Recorder, get_instance +from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -24,14 +24,13 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator @pytest.fixture -def mock_history(hass): +def mock_history(hass: HomeAssistant) -> None: """Mock history component loaded.""" hass.config.components.add("history") -async def test_demo_statistics( - recorder_mock: Recorder, mock_history, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock", "mock_history") +async def test_demo_statistics(hass: HomeAssistant) -> None: """Test that the kitchen sink component makes some statistics available.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) await hass.async_block_till_done() @@ -63,9 +62,8 @@ async def test_demo_statistics( } in statistic_ids -async def test_demo_statistics_growth( - recorder_mock: Recorder, mock_history, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock", "mock_history") +async def test_demo_statistics_growth(hass: HomeAssistant) -> None: """Test that the kitchen sink sum statistics adds to the previous state.""" hass.config.units = US_CUSTOMARY_SYSTEM @@ -104,8 +102,8 @@ async def test_demo_statistics_growth( @pytest.mark.freeze_time("2023-10-21") +@pytest.mark.usefixtures("mock_history") async def test_issues_created( - mock_history, hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, diff --git a/tests/components/kitchen_sink/test_notify.py b/tests/components/kitchen_sink/test_notify.py index df025087b6b..12e19ffaa49 100644 --- a/tests/components/kitchen_sink/test_notify.py +++ b/tests/components/kitchen_sink/test_notify.py @@ -1,10 +1,10 @@ """The tests for the demo button component.""" +from collections.abc import AsyncGenerator from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.components.notify import ( diff --git a/tests/components/kitchen_sink/test_switch.py b/tests/components/kitchen_sink/test_switch.py index c744ba2be44..d006908e264 100644 --- a/tests/components/kitchen_sink/test_switch.py +++ b/tests/components/kitchen_sink/test_switch.py @@ -1,5 +1,6 @@ """The tests for the demo switch component.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -20,7 +21,7 @@ SWITCH_ENTITY_IDS = ["switch.outlet_1", "switch.outlet_2"] @pytest.fixture -async def switch_only() -> None: +def switch_only() -> Generator[None]: """Enable only the switch platform.""" with patch( "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", @@ -30,7 +31,7 @@ async def switch_only() -> None: @pytest.fixture(autouse=True) -async def setup_comp(hass, switch_only): +async def setup_comp(hass: HomeAssistant, switch_only: None) -> None: """Set up demo component.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) await hass.async_block_till_done() @@ -55,7 +56,7 @@ async def test_state( @pytest.mark.parametrize("switch_entity_id", SWITCH_ENTITY_IDS) -async def test_turn_on(hass: HomeAssistant, switch_entity_id) -> None: +async def test_turn_on(hass: HomeAssistant, switch_entity_id: str) -> None: """Test switch turn on method.""" await hass.services.async_call( SWITCH_DOMAIN, @@ -79,7 +80,7 @@ async def test_turn_on(hass: HomeAssistant, switch_entity_id) -> None: @pytest.mark.parametrize("switch_entity_id", SWITCH_ENTITY_IDS) -async def test_turn_off(hass: HomeAssistant, switch_entity_id) -> None: +async def test_turn_off(hass: HomeAssistant, switch_entity_id: str) -> None: """Test switch turn off method.""" await hass.services.async_call( SWITCH_DOMAIN, diff --git a/tests/components/kmtronic/conftest.py b/tests/components/kmtronic/conftest.py index 5dc349508e3..11abd2a4d7b 100644 --- a/tests/components/kmtronic/conftest.py +++ b/tests/components/kmtronic/conftest.py @@ -1,9 +1,9 @@ """Define fixtures for kmtronic tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/knocki/conftest.py b/tests/components/knocki/conftest.py index e1bc2e29cde..2fae89c730d 100644 --- a/tests/components/knocki/conftest.py +++ b/tests/components/knocki/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Knocki tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from knocki import TokenResponse, Trigger import pytest -from typing_extensions import Generator from homeassistant.components.knocki.const import DOMAIN from homeassistant.const import CONF_TOKEN diff --git a/tests/components/knocki/fixtures/more_triggers.json b/tests/components/knocki/fixtures/more_triggers.json new file mode 100644 index 00000000000..dbe4823e3d5 --- /dev/null +++ b/tests/components/knocki/fixtures/more_triggers.json @@ -0,0 +1,30 @@ +[ + { + "device": "KNC1-W-00000214", + "gesture": "d060b870-15ba-42c9-a932-2d2951087152", + "details": { + "description": "Eeee", + "name": "Aaaa", + "id": 31 + }, + "type": "homeassistant", + "user": "7a4d5bf9-01b1-413b-bb4d-77728e931dcc", + "updatedAt": 1716378013721, + "createdAt": 1716378013721, + "id": "1a050b25-7fed-4e0e-b5af-792b8b4650de" + }, + { + "device": "KNC1-W-00000214", + "gesture": "d060b870-15ba-42c9-a932-2d2951087152", + "details": { + "description": "Eeee", + "name": "Bbbb", + "id": 32 + }, + "type": "homeassistant", + "user": "7a4d5bf9-01b1-413b-bb4d-77728e931dcc", + "updatedAt": 1716378013721, + "createdAt": 1716378013721, + "id": "1a050b25-7fed-4e0e-b5af-792b8b4650de" + } +] diff --git a/tests/components/knocki/test_config_flow.py b/tests/components/knocki/test_config_flow.py index baf43c3ad30..188175035da 100644 --- a/tests/components/knocki/test_config_flow.py +++ b/tests/components/knocki/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from knocki import KnockiConnectionError +from knocki import KnockiConnectionError, KnockiInvalidAuthError import pytest from homeassistant.components.knocki.const import DOMAIN @@ -72,7 +72,11 @@ async def test_duplcate_entry( @pytest.mark.parametrize(("field"), ["login", "link"]) @pytest.mark.parametrize( ("exception", "error"), - [(KnockiConnectionError, "cannot_connect"), (Exception, "unknown")], + [ + (KnockiConnectionError, "cannot_connect"), + (KnockiInvalidAuthError, "invalid_auth"), + (Exception, "unknown"), + ], ) async def test_exceptions( hass: HomeAssistant, diff --git a/tests/components/knocki/test_event.py b/tests/components/knocki/test_event.py index a53e2811854..4f639e08773 100644 --- a/tests/components/knocki/test_event.py +++ b/tests/components/knocki/test_event.py @@ -1,19 +1,20 @@ """Tests for the Knocki event platform.""" -from collections.abc import Callable +from collections.abc import Awaitable, Callable from unittest.mock import AsyncMock from knocki import Event, EventType, Trigger, TriggerDetails import pytest from syrupy import SnapshotAssertion +from homeassistant.components.knocki.const import DOMAIN from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, load_json_array_fixture, snapshot_platform async def test_entities( @@ -73,3 +74,54 @@ async def test_subscription( await hass.async_block_till_done() assert mock_knocki_client.register_listener.return_value.called + + +async def test_adding_runtime_entities( + hass: HomeAssistant, + mock_knocki_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we can create devices on runtime.""" + mock_knocki_client.get_triggers.return_value = [] + + await setup_integration(hass, mock_config_entry) + + assert not hass.states.get("event.knc1_w_00000214_aaaa") + + add_trigger_function: Callable[[Event], None] = ( + mock_knocki_client.register_listener.call_args_list[0][0][1] + ) + trigger = Trigger.from_dict(load_json_array_fixture("triggers.json", DOMAIN)[0]) + + add_trigger_function(Event(EventType.CREATED, trigger)) + + assert hass.states.get("event.knc1_w_00000214_aaaa") is not None + + +async def test_removing_runtime_entities( + hass: HomeAssistant, + mock_knocki_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we can create devices on runtime.""" + mock_knocki_client.get_triggers.return_value = [ + Trigger.from_dict(trigger) + for trigger in load_json_array_fixture("more_triggers.json", DOMAIN) + ] + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("event.knc1_w_00000214_aaaa") is not None + assert hass.states.get("event.knc1_w_00000214_bbbb") is not None + + remove_trigger_function: Callable[[Event], Awaitable[None]] = ( + mock_knocki_client.register_listener.call_args_list[1][0][1] + ) + trigger = Trigger.from_dict(load_json_array_fixture("triggers.json", DOMAIN)[0]) + + mock_knocki_client.get_triggers.return_value = [trigger] + + await remove_trigger_function(Event(EventType.DELETED, trigger)) + + assert hass.states.get("event.knc1_w_00000214_aaaa") is not None + assert hass.states.get("event.knc1_w_00000214_bbbb") is None diff --git a/tests/components/knx/README.md b/tests/components/knx/README.md index 930b9e71c28..8778feb2251 100644 --- a/tests/components/knx/README.md +++ b/tests/components/knx/README.md @@ -24,9 +24,10 @@ All outgoing telegrams are pushed to an assertion queue. Assert them in order th Asserts that no telegram was sent (assertion queue is empty). - `knx.assert_telegram_count(count: int)` Asserts that `count` telegrams were sent. -- `knx.assert_read(group_address: str)` +- `knx.assert_read(group_address: str, response: int | tuple[int, ...] | None = None)` Asserts that a GroupValueRead telegram was sent to `group_address`. The telegram will be removed from the assertion queue. + Optionally inject incoming GroupValueResponse telegram after reception to clear the value reader waiting task. This can also be done manually with `knx.receive_response`. - `knx.assert_response(group_address: str, payload: int | tuple[int, ...])` Asserts that a GroupValueResponse telegram with `payload` was sent to `group_address`. The telegram will be removed from the assertion queue. diff --git a/tests/components/knx/__init__.py b/tests/components/knx/__init__.py index eaa84714dc5..76ae91a193d 100644 --- a/tests/components/knx/__init__.py +++ b/tests/components/knx/__init__.py @@ -1 +1,7 @@ """Tests for the KNX integration.""" + +from collections.abc import Awaitable, Callable + +from homeassistant.helpers import entity_registry as er + +KnxEntityGenerator = Callable[..., Awaitable[er.RegistryEntry]] diff --git a/tests/components/knx/conftest.py b/tests/components/knx/conftest.py index cd7146b565b..19f2bc4d845 100644 --- a/tests/components/knx/conftest.py +++ b/tests/components/knx/conftest.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -import json from typing import Any from unittest.mock import DEFAULT, AsyncMock, Mock, patch @@ -30,13 +29,22 @@ from homeassistant.components.knx.const import ( DOMAIN as KNX_DOMAIN, ) from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY +from homeassistant.components.knx.storage.config_store import ( + STORAGE_KEY as KNX_CONFIG_STORAGE_KEY, +) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_fixture +from . import KnxEntityGenerator -FIXTURE_PROJECT_DATA = json.loads(load_fixture("project.json", KNX_DOMAIN)) +from tests.common import MockConfigEntry, load_json_object_fixture +from tests.typing import WebSocketGenerator + +FIXTURE_PROJECT_DATA = load_json_object_fixture("project.json", KNX_DOMAIN) +FIXTURE_CONFIG_STORAGE_DATA = load_json_object_fixture("config_store.json", KNX_DOMAIN) class KNXTestKit: @@ -75,7 +83,7 @@ class KNXTestKit: self.xknx.rate_limit = 0 # set XknxConnectionState.CONNECTED to avoid `unavailable` entities at startup # and start StateUpdater. This would be awaited on normal startup too. - await self.xknx.connection_manager.connection_state_changed( + self.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.CONNECTED, connection_type=XknxConnectionType.TUNNEL_TCP, ) @@ -85,6 +93,7 @@ class KNXTestKit: mock = Mock() mock.start = AsyncMock(side_effect=patch_xknx_start) mock.stop = AsyncMock() + mock.gateway_info = AsyncMock() return mock def fish_xknx(*args, **kwargs): @@ -143,8 +152,6 @@ class KNXTestKit: ) -> None: """Assert outgoing telegram. One by one in timely order.""" await self.xknx.telegrams.join() - await self.hass.async_block_till_done() - await self.hass.async_block_till_done() try: telegram = self._outgoing_telegrams.get_nowait() except asyncio.QueueEmpty as err: @@ -166,9 +173,16 @@ class KNXTestKit: telegram.payload.value.value == payload # type: ignore[attr-defined] ), f"Payload mismatch in {telegram} - Expected: {payload}" - async def assert_read(self, group_address: str) -> None: - """Assert outgoing GroupValueRead telegram. One by one in timely order.""" + async def assert_read( + self, group_address: str, response: int | tuple[int, ...] | None = None + ) -> None: + """Assert outgoing GroupValueRead telegram. One by one in timely order. + + Optionally inject incoming GroupValueResponse telegram after reception. + """ await self.assert_telegram(group_address, None, GroupValueRead) + if response is not None: + await self.receive_response(group_address, response) async def assert_response( self, group_address: str, payload: int | tuple[int, ...] @@ -232,6 +246,7 @@ class KNXTestKit: GroupValueResponse(payload_value), source=source, ) + await asyncio.sleep(0) # advance loop to allow StateUpdater to process async def receive_write( self, @@ -280,3 +295,53 @@ def load_knxproj(hass_storage: dict[str, Any]) -> None: "version": 1, "data": FIXTURE_PROJECT_DATA, } + + +@pytest.fixture +def load_config_store(hass_storage: dict[str, Any]) -> None: + """Mock KNX config store data.""" + hass_storage[KNX_CONFIG_STORAGE_KEY] = FIXTURE_CONFIG_STORAGE_DATA + + +@pytest.fixture +async def create_ui_entity( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], +) -> KnxEntityGenerator: + """Return a helper to create a KNX entities via WS. + + The KNX integration must be set up before using the helper. + """ + ws_client = await hass_ws_client(hass) + + async def _create_ui_entity( + platform: Platform, + knx_data: dict[str, Any], + entity_data: dict[str, Any] | None = None, + ) -> er.RegistryEntry: + """Create a KNX entity from WS with given configuration.""" + if entity_data is None: + entity_data = {"name": "Test"} + + await ws_client.send_json_auto_id( + { + "type": "knx/create_entity", + "platform": platform, + "data": { + "entity": entity_data, + "knx": knx_data, + }, + } + ) + res = await ws_client.receive_json() + assert res["success"], res + assert res["result"]["success"] is True, res["result"] + entity_id = res["result"]["entity_id"] + + entity = entity_registry.async_get(entity_id) + assert entity + return entity + + return _create_ui_entity diff --git a/tests/components/knx/fixtures/config_store.json b/tests/components/knx/fixtures/config_store.json new file mode 100644 index 00000000000..971b692ade1 --- /dev/null +++ b/tests/components/knx/fixtures/config_store.json @@ -0,0 +1,29 @@ +{ + "version": 1, + "minor_version": 1, + "key": "knx/config_store.json", + "data": { + "entities": { + "switch": { + "knx_es_9d97829f47f1a2a3176a7c5b4216070c": { + "entity": { + "entity_category": null, + "name": "test", + "device_info": "knx_vdev_4c80a564f5fe5da701ed293966d6384d" + }, + "knx": { + "ga_switch": { + "write": "1/1/45", + "state": "1/0/45", + "passive": [] + }, + "invert": false, + "sync_state": true, + "respond_to_read": false + } + } + }, + "light": {} + } + } +} diff --git a/tests/components/knx/test_binary_sensor.py b/tests/components/knx/test_binary_sensor.py index b9216aa149a..dbb8d2ee832 100644 --- a/tests/components/knx/test_binary_sensor.py +++ b/tests/components/knx/test_binary_sensor.py @@ -2,6 +2,8 @@ from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components.knx.const import CONF_STATE_ADDRESS, CONF_SYNC_STATE from homeassistant.components.knx.schema import BinarySensorSchema from homeassistant.const import ( @@ -13,7 +15,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit @@ -123,31 +124,30 @@ async def test_binary_sensor_ignore_internal_state( # receive initial ON telegram await knx.receive_write("1/1/1", True) await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() assert len(events) == 2 # receive second ON telegram - ignore_internal_state shall force state_changed event await knx.receive_write("1/1/1", True) await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() assert len(events) == 3 # receive first OFF telegram await knx.receive_write("1/1/1", False) await knx.receive_write("2/2/2", False) - await hass.async_block_till_done() assert len(events) == 5 # receive second OFF telegram - ignore_internal_state shall force state_changed event await knx.receive_write("1/1/1", False) await knx.receive_write("2/2/2", False) - await hass.async_block_till_done() assert len(events) == 6 -async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_binary_sensor_counter( + hass: HomeAssistant, + knx: KNXTestKit, + freezer: FrozenDateTimeFactory, +) -> None: """Test KNX binary_sensor with context timeout.""" - async_fire_time_changed(hass, dt_util.utcnow()) context_timeout = 1 await knx.setup_integration( @@ -166,21 +166,18 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No # receive initial ON telegram await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() # no change yet - still in 1 sec context (additional async_block_till_done needed for time change) assert len(events) == 0 state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF assert state.attributes.get("counter") == 0 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) - await hass.async_block_till_done() + freezer.tick(timedelta(seconds=context_timeout)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() # state changed twice after context timeout - once to ON with counter 1 and once to counter 0 state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 - # additional async_block_till_done needed event capture - await hass.async_block_till_done() assert len(events) == 2 event = events.pop(0).data assert event.get("new_state").attributes.get("counter") == 1 @@ -196,9 +193,9 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) + freezer.tick(timedelta(seconds=context_timeout)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 @@ -211,10 +208,12 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No assert event.get("old_state").attributes.get("counter") == 2 -async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_binary_sensor_reset( + hass: HomeAssistant, + knx: KNXTestKit, + freezer: FrozenDateTimeFactory, +) -> None: """Test KNX binary_sensor with reset_after function.""" - async_fire_time_changed(hass, dt_util.utcnow()) - await knx.setup_integration( { BinarySensorSchema.PLATFORM: [ @@ -230,11 +229,10 @@ async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None # receive ON telegram await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1)) - await hass.async_block_till_done() + freezer.tick(timedelta(seconds=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() # state reset after after timeout state = hass.states.get("binary_sensor.test") @@ -265,7 +263,6 @@ async def test_binary_sensor_restore_and_respond(hass: HomeAssistant, knx) -> No await knx.assert_telegram_count(0) await knx.receive_write(_ADDRESS, False) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF @@ -296,6 +293,5 @@ async def test_binary_sensor_restore_invert(hass: HomeAssistant, knx) -> None: # inverted is on, make sure the state is off after it await knx.receive_write(_ADDRESS, True) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF diff --git a/tests/components/knx/test_button.py b/tests/components/knx/test_button.py index 613208d5595..a05752eced1 100644 --- a/tests/components/knx/test_button.py +++ b/tests/components/knx/test_button.py @@ -3,20 +3,22 @@ from datetime import timedelta import logging +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.knx.const import CONF_PAYLOAD_LENGTH, DOMAIN, KNX_ADDRESS from homeassistant.components.knx.schema import ButtonSchema from homeassistant.const import CONF_NAME, CONF_PAYLOAD, CONF_TYPE from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit from tests.common import async_capture_events, async_fire_time_changed -async def test_button_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_button_simple( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test KNX button with default payload.""" await knx.setup_integration( { @@ -38,7 +40,8 @@ async def test_button_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: # received telegrams on button GA are ignored by the entity old_state = hass.states.get("button.test") - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) + freezer.tick(timedelta(seconds=3)) + async_fire_time_changed(hass) await knx.receive_write("1/2/3", False) await knx.receive_write("1/2/3", True) new_state = hass.states.get("button.test") diff --git a/tests/components/knx/test_climate.py b/tests/components/knx/test_climate.py index 9c431386b43..ec0498dc447 100644 --- a/tests/components/knx/test_climate.py +++ b/tests/components/knx/test_climate.py @@ -2,7 +2,7 @@ import pytest -from homeassistant.components.climate import PRESET_ECO, PRESET_SLEEP, HVACMode +from homeassistant.components.climate import HVACMode from homeassistant.components.knx.schema import ClimateSchema from homeassistant.const import CONF_NAME, STATE_IDLE from homeassistant.core import HomeAssistant @@ -80,12 +80,6 @@ async def test_climate_on_off( ) } ) - - await hass.async_block_till_done() - # read heat/cool state - if heat_cool_ga: - await knx.assert_read("1/2/11") - await knx.receive_response("1/2/11", 0) # cool # read temperature state await knx.assert_read("1/2/3") await knx.receive_response("1/2/3", RAW_FLOAT_20_0) @@ -95,6 +89,10 @@ async def test_climate_on_off( # read on/off state await knx.assert_read("1/2/9") await knx.receive_response("1/2/9", 1) + # read heat/cool state + if heat_cool_ga: + await knx.assert_read("1/2/11") + await knx.receive_response("1/2/11", 0) # cool # turn off await hass.services.async_call( @@ -171,18 +169,15 @@ async def test_climate_hvac_mode( ) } ) - - await hass.async_block_till_done() # read states state updater - await knx.assert_read("1/2/7") - await knx.assert_read("1/2/3") - # StateUpdater initialize state - await knx.receive_response("1/2/7", (0x01,)) - await knx.receive_response("1/2/3", RAW_FLOAT_20_0) # StateUpdater semaphore allows 2 concurrent requests - # read target temperature state + await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_20_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) # turn hvac mode to off - set_hvac_mode() doesn't send to on_off if dedicated hvac mode is available await hass.services.async_call( @@ -236,6 +231,90 @@ async def test_climate_hvac_mode( assert hass.states.get("climate.test").state == "cool" +async def test_climate_heat_cool_read_only( + hass: HomeAssistant, knx: KNXTestKit +) -> None: + """Test KNX climate hvac mode.""" + heat_cool_state_ga = "3/3/3" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_HEAT_COOL_STATE_ADDRESS: heat_cool_state_ga, + } + } + ) + # read states state updater + # StateUpdater semaphore allows 2 concurrent requests + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_20_0) + await knx.receive_response("1/2/5", RAW_FLOAT_20_0) + await knx.assert_read(heat_cool_state_ga) + await knx.receive_response(heat_cool_state_ga, True) # heat + + state = hass.states.get("climate.test") + assert state.state == "heat" + assert state.attributes["hvac_modes"] == ["heat"] + assert state.attributes["hvac_action"] == "heating" + + await knx.receive_write(heat_cool_state_ga, False) # cool + state = hass.states.get("climate.test") + assert state.state == "cool" + assert state.attributes["hvac_modes"] == ["cool"] + assert state.attributes["hvac_action"] == "cooling" + + +async def test_climate_heat_cool_read_only_on_off( + hass: HomeAssistant, knx: KNXTestKit +) -> None: + """Test KNX climate hvac mode.""" + on_off_ga = "2/2/2" + heat_cool_state_ga = "3/3/3" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_ON_OFF_ADDRESS: on_off_ga, + ClimateSchema.CONF_HEAT_COOL_STATE_ADDRESS: heat_cool_state_ga, + } + } + ) + # read states state updater + # StateUpdater semaphore allows 2 concurrent requests + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_20_0) + await knx.receive_response("1/2/5", RAW_FLOAT_20_0) + await knx.assert_read(heat_cool_state_ga) + await knx.receive_response(heat_cool_state_ga, True) # heat + + state = hass.states.get("climate.test") + assert state.state == "off" + assert set(state.attributes["hvac_modes"]) == {"off", "heat"} + assert state.attributes["hvac_action"] == "off" + + await knx.receive_write(heat_cool_state_ga, False) # cool + state = hass.states.get("climate.test") + assert state.state == "off" + assert set(state.attributes["hvac_modes"]) == {"off", "cool"} + assert state.attributes["hvac_action"] == "off" + + await knx.receive_write(on_off_ga, True) + state = hass.states.get("climate.test") + assert state.state == "cool" + assert set(state.attributes["hvac_modes"]) == {"off", "cool"} + assert state.attributes["hvac_action"] == "cooling" + + async def test_climate_preset_mode( hass: HomeAssistant, knx: KNXTestKit, entity_registry: er.EntityRegistry ) -> None: @@ -252,50 +331,42 @@ async def test_climate_preset_mode( } } ) - events = async_capture_events(hass, "state_changed") - await hass.async_block_till_done() - # read states state updater - await knx.assert_read("1/2/7") - await knx.assert_read("1/2/3") # StateUpdater initialize state - await knx.receive_response("1/2/7", (0x01,)) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) # StateUpdater semaphore allows 2 concurrent requests - # read target temperature state + await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) - events.clear() + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) # comfort + knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="comfort") # set preset mode await hass.services.async_call( "climate", "set_preset_mode", - {"entity_id": "climate.test", "preset_mode": PRESET_ECO}, + {"entity_id": "climate.test", "preset_mode": "building_protection"}, blocking=True, ) await knx.assert_write("1/2/6", (0x04,)) - assert len(events) == 1 - events.pop() + knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="building_protection") # set preset mode await hass.services.async_call( "climate", "set_preset_mode", - {"entity_id": "climate.test", "preset_mode": PRESET_SLEEP}, + {"entity_id": "climate.test", "preset_mode": "economy"}, blocking=True, ) await knx.assert_write("1/2/6", (0x03,)) - assert len(events) == 1 - events.pop() + knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="economy") assert len(knx.xknx.devices) == 2 assert len(knx.xknx.devices[0].device_updated_cbs) == 2 assert len(knx.xknx.devices[1].device_updated_cbs) == 2 # test removing also removes hooks entity_registry.async_remove("climate.test") - await hass.async_block_till_done() - # If we remove the entity the underlying devices should disappear too assert len(knx.xknx.devices) == 0 @@ -315,18 +386,15 @@ async def test_update_entity(hass: HomeAssistant, knx: KNXTestKit) -> None: } ) assert await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - await hass.async_block_till_done() # read states state updater - await knx.assert_read("1/2/7") await knx.assert_read("1/2/3") - # StateUpdater initialize state - await knx.receive_response("1/2/7", (0x01,)) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - # StateUpdater semaphore allows 2 concurrent requests await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) # verify update entity retriggers group value reads to the bus await hass.services.async_call( @@ -354,8 +422,6 @@ async def test_command_value_idle_mode(hass: HomeAssistant, knx: KNXTestKit) -> } } ) - - await hass.async_block_till_done() # read states state updater await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index f12a57f97ba..78751c7e641 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -1,7 +1,7 @@ """Test the KNX config flow.""" from contextlib import contextmanager -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest from xknx.exceptions.exception import CommunicationError, InvalidSecureConfiguration @@ -76,10 +76,10 @@ def patch_file_upload(return_value=FIXTURE_KEYRING, side_effect=None): """Patch file upload. Yields the Keyring instance (return_value).""" with ( patch( - "homeassistant.components.knx.helpers.keyring.process_uploaded_file" + "homeassistant.components.knx.storage.keyring.process_uploaded_file" ) as file_upload_mock, patch( - "homeassistant.components.knx.helpers.keyring.sync_load_keyring", + "homeassistant.components.knx.storage.keyring.sync_load_keyring", return_value=return_value, side_effect=side_effect, ), @@ -126,7 +126,7 @@ def _gateway_descriptor( class GatewayScannerMock: """Mock GatewayScanner.""" - def __init__(self, gateways=None): + def __init__(self, gateways=None) -> None: """Initialize GatewayScannerMock.""" # Key is a HPAI instance in xknx, but not used in HA anyway. self.found_gateways = ( @@ -184,7 +184,6 @@ async def test_routing_setup( CONF_KNX_INDIVIDUAL_ADDRESS: "1.1.110", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Routing as 1.1.110" assert result3["data"] == { @@ -259,7 +258,6 @@ async def test_routing_setup_advanced( CONF_KNX_LOCAL_IP: "192.168.1.112", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Routing as 1.1.110" assert result3["data"] == { @@ -350,7 +348,6 @@ async def test_routing_secure_manual_setup( CONF_KNX_ROUTING_SYNC_LATENCY_TOLERANCE: 2000, }, ) - await hass.async_block_till_done() assert secure_routing_manual["type"] is FlowResultType.CREATE_ENTRY assert secure_routing_manual["title"] == "Secure Routing as 0.0.123" assert secure_routing_manual["data"] == { @@ -419,7 +416,6 @@ async def test_routing_secure_keyfile( CONF_KNX_KNXKEY_PASSWORD: "password", }, ) - await hass.async_block_till_done() assert routing_secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert routing_secure_knxkeys["title"] == "Secure Routing as 0.0.123" assert routing_secure_knxkeys["data"] == { @@ -514,7 +510,7 @@ async def test_routing_secure_keyfile( return_value=GatewayScannerMock(), ) async def test_tunneling_setup_manual( - _gateway_scanner_mock, + gateway_scanner_mock: MagicMock, hass: HomeAssistant, knx_setup, user_input, @@ -552,7 +548,6 @@ async def test_tunneling_setup_manual( result2["flow_id"], user_input, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == title assert result3["data"] == config_entry_data @@ -564,7 +559,7 @@ async def test_tunneling_setup_manual( return_value=GatewayScannerMock(), ) async def test_tunneling_setup_manual_request_description_error( - _gateway_scanner_mock, + gateway_scanner_mock: MagicMock, hass: HomeAssistant, knx_setup, ) -> None: @@ -681,7 +676,6 @@ async def test_tunneling_setup_manual_request_description_error( CONF_PORT: 3671, }, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Tunneling TCP @ 192.168.0.1" assert result["data"] == { @@ -706,7 +700,10 @@ async def test_tunneling_setup_manual_request_description_error( return_value=_gateway_descriptor("192.168.0.2", 3675), ) async def test_tunneling_setup_for_local_ip( - _request_description_mock, _gateway_scanner_mock, hass: HomeAssistant, knx_setup + request_description_mock: MagicMock, + gateway_scanner_mock: MagicMock, + hass: HomeAssistant, + knx_setup, ) -> None: """Test tunneling if only one gateway is found.""" result = await hass.config_entries.flow.async_init( @@ -772,7 +769,6 @@ async def test_tunneling_setup_for_local_ip( CONF_KNX_LOCAL_IP: "192.168.1.112", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Tunneling UDP @ 192.168.0.2" assert result3["data"] == { @@ -821,7 +817,6 @@ async def test_tunneling_setup_for_multiple_found_gateways( tunnel_flow["flow_id"], {CONF_KNX_GATEWAY: str(gateway)}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { **DEFAULT_ENTRY_DATA, @@ -905,7 +900,6 @@ async def test_form_with_automatic_connection_handling( CONF_KNX_CONNECTION_TYPE: CONF_KNX_AUTOMATIC, }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == CONF_KNX_AUTOMATIC.capitalize() assert result2["data"] == { @@ -971,7 +965,7 @@ async def _get_menu_step_secure_tunnel(hass: HomeAssistant) -> FlowResult: ), ) async def test_get_secure_menu_step_manual_tunnelling( - _request_description_mock, + request_description_mock: MagicMock, hass: HomeAssistant, ) -> None: """Test flow reaches secure_tunnellinn menu step from manual tunnelling configuration.""" @@ -1040,7 +1034,6 @@ async def test_configure_secure_tunnel_manual(hass: HomeAssistant, knx_setup) -> CONF_KNX_SECURE_DEVICE_AUTHENTICATION: "device_auth", }, ) - await hass.async_block_till_done() assert secure_tunnel_manual["type"] is FlowResultType.CREATE_ENTRY assert secure_tunnel_manual["data"] == { **DEFAULT_ENTRY_DATA, @@ -1086,7 +1079,6 @@ async def test_configure_secure_knxkeys(hass: HomeAssistant, knx_setup) -> None: {CONF_KNX_TUNNEL_ENDPOINT_IA: CONF_KNX_AUTOMATIC}, ) - await hass.async_block_till_done() assert secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert secure_knxkeys["data"] == { **DEFAULT_ENTRY_DATA, @@ -1201,7 +1193,6 @@ async def test_options_flow_connection_type( CONF_KNX_GATEWAY: str(gateway), }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert not result3["data"] assert mock_config_entry.data == { @@ -1307,7 +1298,6 @@ async def test_options_flow_secure_manual_to_keyfile( {CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.1"}, ) - await hass.async_block_till_done() assert secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert mock_config_entry.data == { **DEFAULT_ENTRY_DATA, @@ -1352,7 +1342,6 @@ async def test_options_communication_settings( CONF_KNX_TELEGRAM_LOG_SIZE: 3000, }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert not result2.get("data") assert mock_config_entry.data == { @@ -1405,7 +1394,6 @@ async def test_options_update_keyfile(hass: HomeAssistant, knx_setup) -> None: CONF_KNX_KNXKEY_PASSWORD: "password", }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert not result2.get("data") assert mock_config_entry.data == { @@ -1463,7 +1451,6 @@ async def test_options_keyfile_upload(hass: HomeAssistant, knx_setup) -> None: CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.1", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert not result3.get("data") assert mock_config_entry.data == { diff --git a/tests/components/knx/test_config_store.py b/tests/components/knx/test_config_store.py new file mode 100644 index 00000000000..116f4b5d839 --- /dev/null +++ b/tests/components/knx/test_config_store.py @@ -0,0 +1,412 @@ +"""Test KNX config store.""" + +from typing import Any + +import pytest + +from homeassistant.components.knx.storage.config_store import ( + STORAGE_KEY as KNX_CONFIG_STORAGE_KEY, +) +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import KnxEntityGenerator +from .conftest import KNXTestKit + +from tests.typing import WebSocketGenerator + + +async def test_create_entity( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity creation.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_name = "Test no device" + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": test_name}, + ) + + # Test if entity is correctly stored in registry + await client.send_json_auto_id({"type": "knx/get_entity_entries"}) + res = await client.receive_json() + assert res["success"], res + assert res["result"] == [ + test_entity.extended_dict, + ] + # Test if entity is correctly stored in config store + test_storage_data = next( + iter( + hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"]["switch"].values() + ) + ) + assert test_storage_data == { + "entity": { + "name": test_name, + "device_info": None, + "entity_category": None, + }, + "knx": { + "ga_switch": {"write": "1/2/3", "state": None, "passive": []}, + "invert": False, + "respond_to_read": False, + "sync_state": True, + }, + } + + +async def test_create_entity_error( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test unsuccessful entity creation.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + # create entity with invalid platform + await client.send_json_auto_id( + { + "type": "knx/create_entity", + "platform": "invalid_platform", + "data": { + "entity": {"name": "Test invalid platform"}, + "knx": {"ga_switch": {"write": "1/2/3"}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert not res["result"]["success"] + assert res["result"]["errors"][0]["path"] == ["platform"] + assert res["result"]["error_base"].startswith("expected Platform or one of") + + # create entity with unsupported platform + await client.send_json_auto_id( + { + "type": "knx/create_entity", + "platform": Platform.TTS, # "tts" is not a supported platform (and is unlikely to ever be) + "data": { + "entity": {"name": "Test invalid platform"}, + "knx": {"ga_switch": {"write": "1/2/3"}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert not res["result"]["success"] + assert res["result"]["errors"][0]["path"] == ["platform"] + assert res["result"]["error_base"].startswith("value must be one of") + + +async def test_update_entity( + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity update.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": "Test"}, + ) + test_entity_id = test_entity.entity_id + + # update entity + new_name = "Updated name" + new_ga_switch_write = "4/5/6" + await client.send_json_auto_id( + { + "type": "knx/update_entity", + "platform": Platform.SWITCH, + "entity_id": test_entity_id, + "data": { + "entity": {"name": new_name}, + "knx": {"ga_switch": {"write": new_ga_switch_write}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["success"] + + entity = entity_registry.async_get(test_entity_id) + assert entity + assert entity.original_name == new_name + + assert ( + hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"]["switch"][ + test_entity.unique_id + ]["knx"]["ga_switch"]["write"] + == new_ga_switch_write + ) + + +async def test_update_entity_error( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity update.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": "Test"}, + ) + + # update unsupported platform + new_name = "Updated name" + new_ga_switch_write = "4/5/6" + await client.send_json_auto_id( + { + "type": "knx/update_entity", + "platform": Platform.TTS, + "entity_id": test_entity.entity_id, + "data": { + "entity": {"name": new_name}, + "knx": {"ga_switch": {"write": new_ga_switch_write}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert not res["result"]["success"] + assert res["result"]["errors"][0]["path"] == ["platform"] + assert res["result"]["error_base"].startswith("value must be one of") + + # entity not found + await client.send_json_auto_id( + { + "type": "knx/update_entity", + "platform": Platform.SWITCH, + "entity_id": "non_existing_entity_id", + "data": { + "entity": {"name": new_name}, + "knx": {"ga_switch": {"write": new_ga_switch_write}}, + }, + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith("Entity not found:") + + # entity not in storage + await client.send_json_auto_id( + { + "type": "knx/update_entity", + "platform": Platform.SWITCH, + # `sensor` isn't yet supported, but we only have sensor entities automatically + # created with no configuration - it doesn't ,atter for the test though + "entity_id": "sensor.knx_interface_individual_address", + "data": { + "entity": {"name": new_name}, + "knx": {"ga_switch": {"write": new_ga_switch_write}}, + }, + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith("Entity not found in storage") + + +async def test_delete_entity( + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity deletion.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": "Test"}, + ) + test_entity_id = test_entity.entity_id + + # delete entity + await client.send_json_auto_id( + { + "type": "knx/delete_entity", + "entity_id": test_entity_id, + } + ) + res = await client.receive_json() + assert res["success"], res + + assert not entity_registry.async_get(test_entity_id) + assert not hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") + + +async def test_delete_entity_error( + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], +) -> None: + """Test unsuccessful entity deletion.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + # delete unknown entity + await client.send_json_auto_id( + { + "type": "knx/delete_entity", + "entity_id": "switch.non_existing_entity", + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith("Entity not found") + + # delete entity not in config store + test_entity_id = "sensor.knx_interface_individual_address" + assert entity_registry.async_get(test_entity_id) + await client.send_json_auto_id( + { + "type": "knx/delete_entity", + "entity_id": test_entity_id, + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith("Entity not found") + + +async def test_get_entity_config( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity config retrieval.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": "Test"}, + ) + + await client.send_json_auto_id( + { + "type": "knx/get_entity_config", + "entity_id": test_entity.entity_id, + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["platform"] == Platform.SWITCH + assert res["result"]["data"] == { + "entity": { + "name": "Test", + "device_info": None, + "entity_category": None, + }, + "knx": { + "ga_switch": {"write": "1/2/3", "passive": [], "state": None}, + "respond_to_read": False, + "invert": False, + "sync_state": True, + }, + } + + +@pytest.mark.parametrize( + ("test_entity_id", "error_message_start"), + [ + ("switch.non_existing_entity", "Entity not found"), + ("sensor.knx_interface_individual_address", "Entity data not found"), + ], +) +async def test_get_entity_config_error( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + test_entity_id: str, + error_message_start: str, +) -> None: + """Test entity config retrieval errors.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "knx/get_entity_config", + "entity_id": test_entity_id, + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith(error_message_start) + + +async def test_validate_entity( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test entity validation.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "knx/validate_entity", + "platform": Platform.SWITCH, + "data": { + "entity": {"name": "test_name"}, + "knx": {"ga_switch": {"write": "1/2/3"}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["success"] is True + + # invalid data + await client.send_json_auto_id( + { + "type": "knx/validate_entity", + "platform": Platform.SWITCH, + "data": { + "entity": {"name": "test_name"}, + "knx": {"ga_switch": {}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["success"] is False + assert res["result"]["errors"][0]["path"] == ["data", "knx", "ga_switch", "write"] + assert res["result"]["errors"][0]["error_message"] == "required key not provided" + assert res["result"]["error_base"].startswith("required key not provided") diff --git a/tests/components/knx/test_datetime.py b/tests/components/knx/test_datetime.py index c8c6bd4f346..4b66769a8a3 100644 --- a/tests/components/knx/test_datetime.py +++ b/tests/components/knx/test_datetime.py @@ -34,7 +34,8 @@ async def test_datetime(hass: HomeAssistant, knx: KNXTestKit) -> None: ) await knx.assert_write( test_address, - (0x78, 0x01, 0x01, 0x73, 0x04, 0x05, 0x20, 0x80), + # service call in UTC, telegram in local time + (0x78, 0x01, 0x01, 0x13, 0x04, 0x05, 0x24, 0x00), ) state = hass.states.get("datetime.test") assert state.state == "2020-01-02T03:04:05+00:00" @@ -74,7 +75,7 @@ async def test_date_restore_and_respond(hass: HomeAssistant, knx: KNXTestKit) -> await knx.receive_read(test_address) await knx.assert_response( test_address, - (0x7A, 0x03, 0x03, 0x84, 0x04, 0x05, 0x20, 0x80), + (0x7A, 0x03, 0x03, 0x04, 0x04, 0x05, 0x24, 0x00), ) # don't respond to passive address diff --git a/tests/components/knx/test_device.py b/tests/components/knx/test_device.py new file mode 100644 index 00000000000..330fd854a50 --- /dev/null +++ b/tests/components/knx/test_device.py @@ -0,0 +1,77 @@ +"""Test KNX devices.""" + +from typing import Any + +from homeassistant.components.knx.const import DOMAIN +from homeassistant.components.knx.storage.config_store import ( + STORAGE_KEY as KNX_CONFIG_STORAGE_KEY, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from .conftest import KNXTestKit + +from tests.typing import WebSocketGenerator + + +async def test_create_device( + hass: HomeAssistant, + knx: KNXTestKit, + device_registry: dr.DeviceRegistry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test device creation.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "knx/create_device", + "name": "Test Device", + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["name"] == "Test Device" + assert res["result"]["manufacturer"] == "KNX" + assert res["result"]["identifiers"] + assert res["result"]["config_entries"][0] == knx.mock_config_entry.entry_id + + device_identifier = res["result"]["identifiers"][0][1] + assert device_registry.async_get_device({(DOMAIN, device_identifier)}) + device_id = res["result"]["id"] + assert device_registry.async_get(device_id) + + +async def test_remove_device( + hass: HomeAssistant, + knx: KNXTestKit, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + load_config_store: None, + hass_storage: dict[str, Any], +) -> None: + """Test device removal.""" + assert await async_setup_component(hass, "config", {}) + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await knx.assert_read("1/0/45", response=True) + + assert hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") + test_device = device_registry.async_get_device( + {(DOMAIN, "knx_vdev_4c80a564f5fe5da701ed293966d6384d")} + ) + device_id = test_device.id + device_entities = entity_registry.entities.get_entries_for_device_id(device_id) + assert len(device_entities) == 1 + + response = await client.remove_device(device_id, knx.mock_config_entry.entry_id) + assert response["success"] + assert not device_registry.async_get_device( + {(DOMAIN, "knx_vdev_4c80a564f5fe5da701ed293966d6384d")} + ) + assert not entity_registry.entities.get_entries_for_device_id(device_id) + assert not hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") diff --git a/tests/components/knx/test_device_trigger.py b/tests/components/knx/test_device_trigger.py index 136dddefaab..e5f776a9404 100644 --- a/tests/components/knx/test_device_trigger.py +++ b/tests/components/knx/test_device_trigger.py @@ -18,18 +18,12 @@ from homeassistant.setup import async_setup_component from .conftest import KNXTestKit -from tests.common import async_get_device_automations, async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import async_get_device_automations async def test_if_fires_on_telegram( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, knx: KNXTestKit, ) -> None: @@ -98,31 +92,31 @@ async def test_if_fires_on_telegram( # "specific" shall ignore destination address await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["id"] == 0 await knx.receive_write("1/2/4", (0x03, 0x2F)) - assert len(calls) == 2 - test_call = calls.pop() + assert len(service_calls) == 2 + test_call = service_calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = calls.pop() + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 # "specific" shall ignore GroupValueRead await knx.receive_read("1/2/4") - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 async def test_default_if_fires_on_telegram( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, knx: KNXTestKit, ) -> None: @@ -179,34 +173,34 @@ async def test_default_if_fires_on_telegram( ) await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["id"] == 0 await knx.receive_write("1/2/4", (0x03, 0x2F)) - assert len(calls) == 2 - test_call = calls.pop() + assert len(service_calls) == 2 + test_call = service_calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = calls.pop() + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 # "specific" shall catch GroupValueRead as it is not set explicitly await knx.receive_read("1/2/4") - assert len(calls) == 2 - test_call = calls.pop() + assert len(service_calls) == 2 + test_call = service_calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = calls.pop() + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 async def test_remove_device_trigger( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, knx: KNXTestKit, ) -> None: @@ -241,8 +235,8 @@ async def test_remove_device_trigger( ) await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" await hass.services.async_call( automation.DOMAIN, @@ -250,8 +244,10 @@ async def test_remove_device_trigger( {ATTR_ENTITY_ID: f"automation.{automation_name}"}, blocking=True, ) + assert len(service_calls) == 1 + await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_get_triggers( @@ -395,7 +391,6 @@ async def test_invalid_device_trigger( ] }, ) - await hass.async_block_till_done() assert ( "Unnamed automation failed to setup triggers and has been disabled: " "extra keys not allowed @ data['invalid']. Got None" diff --git a/tests/components/knx/test_events.py b/tests/components/knx/test_events.py index ddb9d50240c..2228781ba89 100644 --- a/tests/components/knx/test_events.py +++ b/tests/components/knx/test_events.py @@ -31,7 +31,6 @@ async def test_knx_event( events = async_capture_events(hass, "knx_event") async def test_event_data(address, payload, value=None): - await hass.async_block_till_done() assert len(events) == 1 event = events.pop() assert event.data["data"] == payload @@ -69,7 +68,6 @@ async def test_knx_event( ) # no event received - await hass.async_block_till_done() assert len(events) == 0 # receive telegrams for group addresses matching the filter @@ -101,7 +99,6 @@ async def test_knx_event( await knx.receive_write("0/5/0", True) await knx.receive_write("1/7/0", True) await knx.receive_write("2/6/6", True) - await hass.async_block_till_done() assert len(events) == 0 # receive telegrams with wrong payload length diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index e0b4c78e322..c4d0acf0ce2 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -1,9 +1,9 @@ """Test KNX expose.""" from datetime import timedelta -import time -from unittest.mock import patch +from freezegun import freeze_time +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.knx import CONF_KNX_EXPOSE, DOMAIN, KNX_ADDRESS @@ -15,11 +15,10 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit -from tests.common import async_fire_time_changed_exact +from tests.common import async_fire_time_changed async def test_binary_expose(hass: HomeAssistant, knx: KNXTestKit) -> None: @@ -207,7 +206,9 @@ async def test_expose_string(hass: HomeAssistant, knx: KNXTestKit) -> None: ) -async def test_expose_cooldown(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_expose_cooldown( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test an expose with cooldown.""" cooldown_time = 2 entity_id = "fake.entity" @@ -235,9 +236,8 @@ async def test_expose_cooldown(hass: HomeAssistant, knx: KNXTestKit) -> None: await hass.async_block_till_done() await knx.assert_no_telegram() # Wait for cooldown to pass - async_fire_time_changed_exact( - hass, dt_util.utcnow() + timedelta(seconds=cooldown_time) - ) + freezer.tick(timedelta(seconds=cooldown_time)) + async_fire_time_changed(hass) await hass.async_block_till_done() await knx.assert_write("1/1/8", (3,)) @@ -327,25 +327,32 @@ async def test_expose_conversion_exception( ) -@patch("time.localtime") +@freeze_time("2022-1-7 9:13:14") +@pytest.mark.parametrize( + ("time_type", "raw"), + [ + ("time", (0xA9, 0x0D, 0x0E)), # localtime includes day of week + ("date", (0x07, 0x01, 0x16)), + ("datetime", (0x7A, 0x1, 0x7, 0xA9, 0xD, 0xE, 0x20, 0xC0)), + ], +) async def test_expose_with_date( - localtime, hass: HomeAssistant, knx: KNXTestKit + hass: HomeAssistant, knx: KNXTestKit, time_type: str, raw: tuple[int, ...] ) -> None: """Test an expose with a date.""" - localtime.return_value = time.struct_time([2022, 1, 7, 9, 13, 14, 6, 0, 0]) await knx.setup_integration( { CONF_KNX_EXPOSE: { - CONF_TYPE: "datetime", + CONF_TYPE: time_type, KNX_ADDRESS: "1/1/8", } } ) - await knx.assert_write("1/1/8", (0x7A, 0x1, 0x7, 0xE9, 0xD, 0xE, 0x20, 0x80)) + await knx.assert_write("1/1/8", raw) await knx.receive_read("1/1/8") - await knx.assert_response("1/1/8", (0x7A, 0x1, 0x7, 0xE9, 0xD, 0xE, 0x20, 0x80)) + await knx.assert_response("1/1/8", raw) entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 diff --git a/tests/components/knx/test_init.py b/tests/components/knx/test_init.py index a317a6a298c..48cc46ef1ee 100644 --- a/tests/components/knx/test_init.py +++ b/tests/components/knx/test_init.py @@ -284,7 +284,6 @@ async def test_async_remove_entry( assert await hass.config_entries.async_remove(config_entry.entry_id) assert unlink_mock.call_count == 3 rmdir_mock.assert_called_once() - await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/knx/test_interface_device.py b/tests/components/knx/test_interface_device.py index 6cf5d8026b9..79114d4ffd5 100644 --- a/tests/components/knx/test_interface_device.py +++ b/tests/components/knx/test_interface_device.py @@ -1,23 +1,28 @@ -"""Test KNX scene.""" +"""Test KNX interface device.""" from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from xknx.core import XknxConnectionState, XknxConnectionType from xknx.telegram import IndividualAddress from homeassistant.components.knx.sensor import SCAN_INTERVAL from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component from .conftest import KNXTestKit from tests.common import async_capture_events, async_fire_time_changed +from tests.typing import WebSocketGenerator async def test_diagnostic_entities( - hass: HomeAssistant, knx: KNXTestKit, entity_registry: er.EntityRegistry + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test diagnostic entities.""" await knx.setup_integration({}) @@ -48,7 +53,8 @@ async def test_diagnostic_entities( knx.xknx.connection_manager.cemi_count_outgoing_error = 2 events = async_capture_events(hass, "state_changed") - async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() assert len(events) == 3 # 5 polled sensors - 2 disabled @@ -64,25 +70,19 @@ async def test_diagnostic_entities( ): assert hass.states.get(entity_id).state == test_state - await knx.xknx.connection_manager.connection_state_changed( + knx.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.DISCONNECTED ) await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() assert len(events) == 4 # 3 not always_available + 3 force_update - 2 disabled events.clear() knx.xknx.current_address = IndividualAddress("1.1.1") - await knx.xknx.connection_manager.connection_state_changed( + knx.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.CONNECTED, connection_type=XknxConnectionType.TUNNEL_UDP, ) await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() assert len(events) == 6 # all diagnostic sensors - counters are reset on connect for entity_id, test_state in ( @@ -109,5 +109,29 @@ async def test_removed_entity( "sensor.knx_interface_connection_established", disabled_by=er.RegistryEntryDisabler.USER, ) - await hass.async_block_till_done() unregister_mock.assert_called_once() + + +async def test_remove_interface_device( + hass: HomeAssistant, + knx: KNXTestKit, + device_registry: dr.DeviceRegistry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test device removal.""" + assert await async_setup_component(hass, "config", {}) + await knx.setup_integration({}) + client = await hass_ws_client(hass) + knx_devices = device_registry.devices.get_devices_for_config_entry_id( + knx.mock_config_entry.entry_id + ) + assert len(knx_devices) == 1 + assert knx_devices[0].name == "KNX Interface" + device_id = knx_devices[0].id + # interface device can't be removed + res = await client.remove_device(device_id, knx.mock_config_entry.entry_id) + assert not res["success"] + assert ( + res["error"]["message"] + == "Failed to remove device entry, rejected by integration" + ) diff --git a/tests/components/knx/test_knx_selectors.py b/tests/components/knx/test_knx_selectors.py new file mode 100644 index 00000000000..7b2f09af84b --- /dev/null +++ b/tests/components/knx/test_knx_selectors.py @@ -0,0 +1,128 @@ +"""Test KNX selectors.""" + +from typing import Any + +import pytest +import voluptuous as vol + +from homeassistant.components.knx.const import ColorTempModes +from homeassistant.components.knx.storage.knx_selector import GASelector + +INVALID = "invalid" + + +@pytest.mark.parametrize( + ("selector_config", "data", "expected"), + [ + ( + {}, + {}, + {"write": None, "state": None, "passive": []}, + ), + ( + {}, + {"write": "1/2/3"}, + {"write": "1/2/3", "state": None, "passive": []}, + ), + ( + {}, + {"state": "1/2/3"}, + {"write": None, "state": "1/2/3", "passive": []}, + ), + ( + {}, + {"passive": ["1/2/3"]}, + {"write": None, "state": None, "passive": ["1/2/3"]}, + ), + ( + {}, + {"write": "1", "state": 2, "passive": ["1/2/3"]}, + {"write": "1", "state": 2, "passive": ["1/2/3"]}, + ), + ( + {"write": False}, + {"write": "1/2/3"}, + {"state": None, "passive": []}, + ), + ( + {"write": False}, + {"state": "1/2/3"}, + {"state": "1/2/3", "passive": []}, + ), + ( + {"write": False}, + {"passive": ["1/2/3"]}, + {"state": None, "passive": ["1/2/3"]}, + ), + ( + {"passive": False}, + {"passive": ["1/2/3"]}, + {"write": None, "state": None}, + ), + ( + {"passive": False}, + {"write": "1/2/3"}, + {"write": "1/2/3", "state": None}, + ), + # required keys + ( + {"write_required": True}, + {}, + INVALID, + ), + ( + {"state_required": True}, + {}, + INVALID, + ), + ( + {"write_required": True}, + {"write": "1/2/3"}, + {"write": "1/2/3", "state": None, "passive": []}, + ), + ( + {"state_required": True}, + {"state": "1/2/3"}, + {"write": None, "state": "1/2/3", "passive": []}, + ), + ( + {"write_required": True}, + {"state": "1/2/3"}, + INVALID, + ), + ( + {"state_required": True}, + {"write": "1/2/3"}, + INVALID, + ), + # dpt key + ( + {"dpt": ColorTempModes}, + {"write": "1/2/3"}, + INVALID, + ), + ( + {"dpt": ColorTempModes}, + {"write": "1/2/3", "dpt": "7.600"}, + {"write": "1/2/3", "state": None, "passive": [], "dpt": "7.600"}, + ), + ( + {"dpt": ColorTempModes}, + {"write": "1/2/3", "state": None, "passive": [], "dpt": "invalid"}, + INVALID, + ), + ], +) +def test_ga_selector( + selector_config: dict[str, Any], + data: dict[str, Any], + expected: str | dict[str, Any], +) -> None: + """Test GASelector.""" + selector = GASelector(**selector_config) + if expected == INVALID: + with pytest.raises(vol.Invalid): + selector(data) + else: + result = selector(data) + assert result == expected diff --git a/tests/components/knx/test_light.py b/tests/components/knx/test_light.py index a14d1bb32ae..e2e4a673a0d 100644 --- a/tests/components/knx/test_light.py +++ b/tests/components/knx/test_light.py @@ -4,10 +4,12 @@ from __future__ import annotations from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory +import pytest from xknx.core import XknxConnectionState from xknx.devices.light import Light as XknxLight -from homeassistant.components.knx.const import CONF_STATE_ADDRESS, KNX_ADDRESS +from homeassistant.components.knx.const import CONF_STATE_ADDRESS, KNX_ADDRESS, Platform from homeassistant.components.knx.schema import LightSchema from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -19,8 +21,8 @@ from homeassistant.components.light import ( ) from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util +from . import KnxEntityGenerator from .conftest import KNXTestKit from tests.common import async_fire_time_changed @@ -91,9 +93,7 @@ async def test_light_brightness(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # StateUpdater initialize state await knx.assert_read(test_brightness_state) - await knx.xknx.connection_manager.connection_state_changed( - XknxConnectionState.CONNECTED - ) + knx.xknx.connection_manager.connection_state_changed(XknxConnectionState.CONNECTED) # turn on light via brightness await hass.services.async_call( "light", @@ -644,7 +644,9 @@ async def test_light_rgb_individual(hass: HomeAssistant, knx: KNXTestKit) -> Non await knx.assert_write(test_blue, (45,)) -async def test_light_rgbw_individual(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_light_rgbw_individual( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test KNX light with rgbw color in individual GAs.""" test_red = "1/1/3" test_red_state = "1/1/4" @@ -764,9 +766,8 @@ async def test_light_rgbw_individual(hass: HomeAssistant, knx: KNXTestKit) -> No await knx.receive_write(test_green, (0,)) # # individual color debounce takes 0.2 seconds if not all 4 addresses received knx.assert_state("light.test", STATE_ON) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT) - ) + freezer.tick(timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() knx.assert_state("light.test", STATE_OFF) # turn ON from KNX @@ -1151,3 +1152,69 @@ async def test_light_rgbw_brightness(hass: HomeAssistant, knx: KNXTestKit) -> No knx.assert_state( "light.test", STATE_ON, brightness=50, rgbw_color=(100, 200, 55, 12) ) + + +async def test_light_ui_create( + hass: HomeAssistant, + knx: KNXTestKit, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test creating a switch.""" + await knx.setup_integration({}) + await create_ui_entity( + platform=Platform.LIGHT, + entity_data={"name": "test"}, + knx_data={ + "ga_switch": {"write": "1/1/1", "state": "2/2/2"}, + "_light_color_mode_schema": "default", + "sync_state": True, + }, + ) + # created entity sends read-request to KNX bus + await knx.assert_read("2/2/2") + await knx.receive_response("2/2/2", True) + state = hass.states.get("light.test") + assert state.state is STATE_ON + + +@pytest.mark.parametrize( + ("color_temp_mode", "raw_ct"), + [ + ("7.600", (0x10, 0x68)), + ("9", (0x46, 0x69)), + ("5.001", (0x74,)), + ], +) +async def test_light_ui_color_temp( + hass: HomeAssistant, + knx: KNXTestKit, + create_ui_entity: KnxEntityGenerator, + color_temp_mode: str, + raw_ct: tuple[int, ...], +) -> None: + """Test creating a switch.""" + await knx.setup_integration({}) + await create_ui_entity( + platform=Platform.LIGHT, + entity_data={"name": "test"}, + knx_data={ + "ga_switch": {"write": "1/1/1", "state": "2/2/2"}, + "ga_color_temp": { + "write": "3/3/3", + "dpt": color_temp_mode, + }, + "_light_color_mode_schema": "default", + "sync_state": True, + }, + ) + await knx.assert_read("2/2/2", True) + await hass.services.async_call( + "light", + "turn_on", + {"entity_id": "light.test", ATTR_COLOR_TEMP_KELVIN: 4200}, + blocking=True, + ) + await knx.assert_write("3/3/3", raw_ct) + state = hass.states.get("light.test") + assert state.state is STATE_ON + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == pytest.approx(4200, abs=1) diff --git a/tests/components/knx/test_notify.py b/tests/components/knx/test_notify.py index 94f2d579fc8..b481675140b 100644 --- a/tests/components/knx/test_notify.py +++ b/tests/components/knx/test_notify.py @@ -21,17 +21,13 @@ async def test_legacy_notify_service_simple( } } ) - await hass.async_block_till_done() - await hass.services.async_call( "notify", "notify", {"target": "test", "message": "I love KNX"}, blocking=True ) - await knx.assert_write( "1/0/0", (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), ) - await hass.services.async_call( "notify", "notify", @@ -41,7 +37,6 @@ async def test_legacy_notify_service_simple( }, blocking=True, ) - await knx.assert_write( "1/0/0", (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), @@ -68,12 +63,9 @@ async def test_legacy_notify_service_multiple_sends_to_all_with_different_encodi ] } ) - await hass.async_block_till_done() - await hass.services.async_call( "notify", "notify", {"message": "Gänsefüßchen"}, blocking=True ) - await knx.assert_write( "1/0/0", # "G?nsef??chen" @@ -95,7 +87,6 @@ async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: } } ) - await hass.services.async_call( notify.DOMAIN, notify.SERVICE_SEND_MESSAGE, diff --git a/tests/components/knx/test_sensor.py b/tests/components/knx/test_sensor.py index 22d9993b58f..41ffcfcb5c7 100644 --- a/tests/components/knx/test_sensor.py +++ b/tests/components/knx/test_sensor.py @@ -68,25 +68,21 @@ async def test_always_callback(hass: HomeAssistant, knx: KNXTestKit) -> None: # receive initial telegram await knx.receive_write("1/1/1", (0x42,)) await knx.receive_write("2/2/2", (0x42,)) - await hass.async_block_till_done() assert len(events) == 2 # receive second telegram with identical payload # always_callback shall force state_changed event await knx.receive_write("1/1/1", (0x42,)) await knx.receive_write("2/2/2", (0x42,)) - await hass.async_block_till_done() assert len(events) == 3 # receive telegram with different payload await knx.receive_write("1/1/1", (0xFA,)) await knx.receive_write("2/2/2", (0xFA,)) - await hass.async_block_till_done() assert len(events) == 5 # receive telegram with second payload again # always_callback shall force state_changed event await knx.receive_write("1/1/1", (0xFA,)) await knx.receive_write("2/2/2", (0xFA,)) - await hass.async_block_till_done() assert len(events) == 6 diff --git a/tests/components/knx/test_services.py b/tests/components/knx/test_services.py index 7f748af5ceb..f70389dbc92 100644 --- a/tests/components/knx/test_services.py +++ b/tests/components/knx/test_services.py @@ -154,7 +154,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: # no event registered await knx.receive_write(test_address, True) - await hass.async_block_till_done() assert len(events) == 0 # register event with `type` @@ -165,7 +164,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: blocking=True, ) await knx.receive_write(test_address, (0x04, 0xD2)) - await hass.async_block_till_done() assert len(events) == 1 typed_event = events.pop() assert typed_event.data["data"] == (0x04, 0xD2) @@ -179,7 +177,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: blocking=True, ) await knx.receive_write(test_address, True) - await hass.async_block_till_done() assert len(events) == 0 # register event without `type` @@ -188,7 +185,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: ) await knx.receive_write(test_address, True) await knx.receive_write(test_address, False) - await hass.async_block_till_done() assert len(events) == 2 untyped_event_2 = events.pop() assert untyped_event_2.data["data"] is False diff --git a/tests/components/knx/test_switch.py b/tests/components/knx/test_switch.py index 8dce4cf9c27..bc0a6b27675 100644 --- a/tests/components/knx/test_switch.py +++ b/tests/components/knx/test_switch.py @@ -6,9 +6,10 @@ from homeassistant.components.knx.const import ( KNX_ADDRESS, ) from homeassistant.components.knx.schema import SwitchSchema -from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON +from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant, State +from . import KnxEntityGenerator from .conftest import KNXTestKit from tests.common import mock_restore_cache @@ -146,3 +147,27 @@ async def test_switch_restore_and_respond(hass: HomeAssistant, knx) -> None: # respond to new state await knx.receive_read(_ADDRESS) await knx.assert_response(_ADDRESS, False) + + +async def test_switch_ui_create( + hass: HomeAssistant, + knx: KNXTestKit, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test creating a switch.""" + await knx.setup_integration({}) + await create_ui_entity( + platform=Platform.SWITCH, + entity_data={"name": "test"}, + knx_data={ + "ga_switch": {"write": "1/1/1", "state": "2/2/2"}, + "respond_to_read": True, + "sync_state": True, + "invert": False, + }, + ) + # created entity sends read-request to KNX bus + await knx.assert_read("2/2/2") + await knx.receive_response("2/2/2", True) + state = hass.states.get("switch.test") + assert state.state is STATE_ON diff --git a/tests/components/knx/test_trigger.py b/tests/components/knx/test_trigger.py index d957082de18..73e8b10840e 100644 --- a/tests/components/knx/test_trigger.py +++ b/tests/components/knx/test_trigger.py @@ -11,18 +11,10 @@ from homeassistant.setup import async_setup_component from .conftest import KNXTestKit -from tests.common import async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - async def test_telegram_trigger( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], knx: KNXTestKit, ) -> None: """Test telegram triggers firing.""" @@ -73,24 +65,24 @@ async def test_telegram_trigger( # "specific" shall ignore destination address await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["id"] == 0 await knx.receive_write("1/2/4", (0x03, 0x2F)) - assert len(calls) == 2 - test_call = calls.pop() + assert len(service_calls) == 2 + test_call = service_calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = calls.pop() + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 # "specific" shall ignore GroupValueRead await knx.receive_read("1/2/4") - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 @@ -105,7 +97,7 @@ async def test_telegram_trigger( ) async def test_telegram_trigger_dpt_option( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], knx: KNXTestKit, payload: tuple[int, ...], type_option: dict[str, bool], @@ -138,16 +130,16 @@ async def test_telegram_trigger_dpt_option( ) await knx.receive_write("0/0/1", payload) - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["trigger"]["value"] == expected_value assert test_call.data["trigger"]["unit"] == expected_unit await knx.receive_read("0/0/1") - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["trigger"]["value"] is None assert test_call.data["trigger"]["unit"] is None @@ -192,7 +184,7 @@ async def test_telegram_trigger_dpt_option( ) async def test_telegram_trigger_options( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], knx: KNXTestKit, group_value_options: dict[str, bool], direction_options: dict[str, bool], @@ -225,28 +217,28 @@ async def test_telegram_trigger_options( if group_value_options.get("group_value_write", True) and direction_options.get( "incoming", True ): - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(calls) == 0 + assert len(service_calls) == 0 await knx.receive_response("0/0/1", 1) if group_value_options["group_value_response"] and direction_options.get( "incoming", True ): - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(calls) == 0 + assert len(service_calls) == 0 await knx.receive_read("0/0/1") if group_value_options["group_value_read"] and direction_options.get( "incoming", True ): - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(calls) == 0 + assert len(service_calls) == 0 await hass.services.async_call( "knx", @@ -254,20 +246,22 @@ async def test_telegram_trigger_options( {"address": "0/0/1", "payload": True}, blocking=True, ) + assert len(service_calls) == 1 + await knx.assert_write("0/0/1", True) if ( group_value_options.get("group_value_write", True) and direction_options["outgoing"] ): - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 2 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_remove_telegram_trigger( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], knx: KNXTestKit, ) -> None: """Test for removed callback when telegram trigger not used.""" @@ -296,8 +290,8 @@ async def test_remove_telegram_trigger( ) await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" await hass.services.async_call( automation.DOMAIN, @@ -305,8 +299,10 @@ async def test_remove_telegram_trigger( {ATTR_ENTITY_ID: f"automation.{automation_name}"}, blocking=True, ) + assert len(service_calls) == 1 + await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_invalid_trigger( @@ -338,7 +334,6 @@ async def test_invalid_trigger( ] }, ) - await hass.async_block_till_done() assert ( "Unnamed automation failed to setup triggers and has been disabled: " "extra keys not allowed @ data['invalid']. Got None" diff --git a/tests/components/knx/test_weather.py b/tests/components/knx/test_weather.py index 0adcc309252..5ba38d6cdf8 100644 --- a/tests/components/knx/test_weather.py +++ b/tests/components/knx/test_weather.py @@ -45,12 +45,12 @@ async def test_weather(hass: HomeAssistant, knx: KNXTestKit) -> None: # brightness await knx.assert_read("1/1/6") - await knx.receive_response("1/1/6", (0x7C, 0x5E)) await knx.assert_read("1/1/8") + await knx.receive_response("1/1/6", (0x7C, 0x5E)) await knx.receive_response("1/1/8", (0x7C, 0x5E)) + await knx.assert_read("1/1/5") await knx.assert_read("1/1/7") await knx.receive_response("1/1/7", (0x7C, 0x5E)) - await knx.assert_read("1/1/5") await knx.receive_response("1/1/5", (0x7C, 0x5E)) # wind speed @@ -64,10 +64,10 @@ async def test_weather(hass: HomeAssistant, knx: KNXTestKit) -> None: # alarms await knx.assert_read("1/1/2") await knx.receive_response("1/1/2", False) - await knx.assert_read("1/1/3") - await knx.receive_response("1/1/3", False) await knx.assert_read("1/1/1") + await knx.assert_read("1/1/3") await knx.receive_response("1/1/1", False) + await knx.receive_response("1/1/3", False) # day night await knx.assert_read("1/1/12") diff --git a/tests/components/knx/test_websocket.py b/tests/components/knx/test_websocket.py index ca60905b0ba..309ea111709 100644 --- a/tests/components/knx/test_websocket.py +++ b/tests/components/knx/test_websocket.py @@ -4,6 +4,7 @@ from typing import Any from unittest.mock import patch from homeassistant.components.knx import DOMAIN, KNX_ADDRESS, SwitchSchema +from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant @@ -87,6 +88,7 @@ async def test_knx_project_file_process( assert res["success"], res assert hass.data[DOMAIN].project.loaded + assert hass_storage[KNX_PROJECT_STORAGE_KEY]["data"] == _parse_result async def test_knx_project_file_process_error( @@ -126,19 +128,20 @@ async def test_knx_project_file_remove( knx: KNXTestKit, hass_ws_client: WebSocketGenerator, load_knxproj: None, + hass_storage: dict[str, Any], ) -> None: """Test knx/project_file_remove command.""" await knx.setup_integration({}) + assert hass_storage[KNX_PROJECT_STORAGE_KEY] client = await hass_ws_client(hass) assert hass.data[DOMAIN].project.loaded await client.send_json({"id": 6, "type": "knx/project_file_remove"}) - with patch("homeassistant.helpers.storage.Store.async_remove") as remove_mock: - res = await client.receive_json() - remove_mock.assert_called_once_with() + res = await client.receive_json() assert res["success"], res assert not hass.data[DOMAIN].project.loaded + assert not hass_storage.get(KNX_PROJECT_STORAGE_KEY) async def test_knx_get_project( @@ -343,7 +346,7 @@ async def test_knx_subscribe_telegrams_command_project( assert res["event"]["destination"] == "0/1/1" assert res["event"]["destination_name"] == "percent" assert res["event"]["payload"] == 1 - assert res["event"]["value"] == "Error decoding value" + assert res["event"]["value"] is None assert res["event"]["telegramtype"] == "GroupValueWrite" assert res["event"]["source"] == "1.1.6" assert ( diff --git a/tests/components/kodi/test_config_flow.py b/tests/components/kodi/test_config_flow.py index d570654be93..ad99067ac7a 100644 --- a/tests/components/kodi/test_config_flow.py +++ b/tests/components/kodi/test_config_flow.py @@ -30,7 +30,7 @@ from tests.common import MockConfigEntry @pytest.fixture -async def user_flow(hass): +async def user_flow(hass: HomeAssistant) -> str: """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -41,7 +41,7 @@ async def user_flow(hass): return result["flow_id"] -async def test_user_flow(hass: HomeAssistant, user_flow) -> None: +async def test_user_flow(hass: HomeAssistant, user_flow: str) -> None: """Test a successful user initiated flow.""" with ( patch( @@ -74,7 +74,7 @@ async def test_user_flow(hass: HomeAssistant, user_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_valid_auth(hass: HomeAssistant, user_flow) -> None: +async def test_form_valid_auth(hass: HomeAssistant, user_flow: str) -> None: """Test we handle valid auth.""" with ( patch( @@ -124,7 +124,7 @@ async def test_form_valid_auth(hass: HomeAssistant, user_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_valid_ws_port(hass: HomeAssistant, user_flow) -> None: +async def test_form_valid_ws_port(hass: HomeAssistant, user_flow: str) -> None: """Test we handle valid websocket port.""" with ( patch( @@ -180,7 +180,7 @@ async def test_form_valid_ws_port(hass: HomeAssistant, user_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_empty_ws_port(hass: HomeAssistant, user_flow) -> None: +async def test_form_empty_ws_port(hass: HomeAssistant, user_flow: str) -> None: """Test we handle an empty websocket port input.""" with ( patch( @@ -226,7 +226,7 @@ async def test_form_empty_ws_port(hass: HomeAssistant, user_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_invalid_auth(hass: HomeAssistant, user_flow) -> None: +async def test_form_invalid_auth(hass: HomeAssistant, user_flow: str) -> None: """Test we handle invalid auth.""" with ( patch( @@ -322,7 +322,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, user_flow) -> None: assert result["errors"] == {} -async def test_form_cannot_connect_http(hass: HomeAssistant, user_flow) -> None: +async def test_form_cannot_connect_http(hass: HomeAssistant, user_flow: str) -> None: """Test we handle cannot connect over HTTP error.""" with ( patch( @@ -341,7 +341,7 @@ async def test_form_cannot_connect_http(hass: HomeAssistant, user_flow) -> None: assert result["errors"] == {"base": "cannot_connect"} -async def test_form_exception_http(hass: HomeAssistant, user_flow) -> None: +async def test_form_exception_http(hass: HomeAssistant, user_flow: str) -> None: """Test we handle generic exception over HTTP.""" with ( patch( @@ -360,7 +360,7 @@ async def test_form_exception_http(hass: HomeAssistant, user_flow) -> None: assert result["errors"] == {"base": "unknown"} -async def test_form_cannot_connect_ws(hass: HomeAssistant, user_flow) -> None: +async def test_form_cannot_connect_ws(hass: HomeAssistant, user_flow: str) -> None: """Test we handle cannot connect over WebSocket error.""" with ( patch( @@ -423,7 +423,7 @@ async def test_form_cannot_connect_ws(hass: HomeAssistant, user_flow) -> None: assert result["errors"] == {"base": "cannot_connect"} -async def test_form_exception_ws(hass: HomeAssistant, user_flow) -> None: +async def test_form_exception_ws(hass: HomeAssistant, user_flow: str) -> None: """Test we handle generic exception over WebSocket.""" with ( patch( @@ -560,7 +560,7 @@ async def test_discovery_cannot_connect_ws(hass: HomeAssistant) -> None: assert result["errors"] == {} -async def test_discovery_exception_http(hass: HomeAssistant, user_flow) -> None: +async def test_discovery_exception_http(hass: HomeAssistant) -> None: """Test we handle generic exception during discovery validation.""" with ( patch( diff --git a/tests/components/kodi/test_device_trigger.py b/tests/components/kodi/test_device_trigger.py index d3de349018e..a54641a4234 100644 --- a/tests/components/kodi/test_device_trigger.py +++ b/tests/components/kodi/test_device_trigger.py @@ -12,11 +12,7 @@ from homeassistant.setup import async_setup_component from . import init_integration -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -25,13 +21,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - -@pytest.fixture -async def kodi_media_player(hass): +async def kodi_media_player(hass: HomeAssistant) -> str: """Get a kodi media player.""" await init_integration(hass) return f"{MP_DOMAIN}.name" @@ -77,8 +67,8 @@ async def test_get_triggers( async def test_if_fires_on_state_change( hass: HomeAssistant, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], - kodi_media_player, + service_calls: list[ServiceCall], + kodi_media_player: str, ) -> None: """Test for turn_on and turn_off triggers firing.""" entry = entity_registry.async_get(kodi_media_player) @@ -135,8 +125,8 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == f"turn_on - {kodi_media_player} - 0" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == f"turn_on - {kodi_media_player} - 0" await hass.services.async_call( MP_DOMAIN, @@ -146,15 +136,15 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == f"turn_off - {kodi_media_player} - 0" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == f"turn_off - {kodi_media_player} - 0" async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], - kodi_media_player, + service_calls: list[ServiceCall], + kodi_media_player: str, ) -> None: """Test for turn_on and turn_off triggers firing.""" entry = entity_registry.async_get(kodi_media_player) @@ -194,5 +184,5 @@ async def test_if_fires_on_state_change_legacy( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == f"turn_on - {kodi_media_player} - 0" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == f"turn_on - {kodi_media_player} - 0" diff --git a/tests/components/kodi/util.py b/tests/components/kodi/util.py index 6217a77903b..e56ba03b7e5 100644 --- a/tests/components/kodi/util.py +++ b/tests/components/kodi/util.py @@ -63,7 +63,7 @@ def get_kodi_connection( class MockConnection: """A mock kodi connection.""" - def __init__(self, connected=True): + def __init__(self, connected=True) -> None: """Mock the Kodi connection.""" self._connected = connected @@ -92,7 +92,7 @@ class MockConnection: class MockWSConnection: """A mock kodi websocket connection.""" - def __init__(self, connected=True): + def __init__(self, connected=True) -> None: """Mock the websocket connection.""" self._connected = connected diff --git a/tests/components/konnected/test_panel.py b/tests/components/konnected/test_panel.py index 64cc414cdd3..48ebea64161 100644 --- a/tests/components/konnected/test_panel.py +++ b/tests/components/konnected/test_panel.py @@ -700,4 +700,4 @@ async def test_connect_retry(hass: HomeAssistant, mock_panel) -> None: async_fire_time_changed(hass, utcnow() + timedelta(seconds=21)) await hass.async_block_till_done() await async_update_entity(hass, "switch.konnected_445566_actuator_6") - assert hass.states.get("switch.konnected_445566_actuator_6").state == "off" + assert hass.states.get("switch.konnected_445566_actuator_6").state == "unknown" diff --git a/tests/components/kostal_plenticore/conftest.py b/tests/components/kostal_plenticore/conftest.py index af958f19f3a..acce8ebed7a 100644 --- a/tests/components/kostal_plenticore/conftest.py +++ b/tests/components/kostal_plenticore/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pykoplenti import MeData, VersionData import pytest -from typing_extensions import Generator from homeassistant.components.kostal_plenticore.coordinator import Plenticore from homeassistant.core import HomeAssistant diff --git a/tests/components/kostal_plenticore/test_config_flow.py b/tests/components/kostal_plenticore/test_config_flow.py index c982e2af818..bd9b9ad278d 100644 --- a/tests/components/kostal_plenticore/test_config_flow.py +++ b/tests/components/kostal_plenticore/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Kostal Plenticore Solar Inverter config flow.""" +from collections.abc import Generator from unittest.mock import ANY, AsyncMock, MagicMock, patch from pykoplenti import ApiClient, AuthenticationException, SettingsData import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.kostal_plenticore.const import DOMAIN diff --git a/tests/components/kostal_plenticore/test_diagnostics.py b/tests/components/kostal_plenticore/test_diagnostics.py index 1c3a9efe2e5..0f358260be7 100644 --- a/tests/components/kostal_plenticore/test_diagnostics.py +++ b/tests/components/kostal_plenticore/test_diagnostics.py @@ -6,7 +6,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.kostal_plenticore.coordinator import Plenticore from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry +from tests.common import ANY, MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -54,6 +54,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": None, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "client": { "version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'", diff --git a/tests/components/kostal_plenticore/test_helper.py b/tests/components/kostal_plenticore/test_helper.py index a18cf32c5a1..acd33f82a27 100644 --- a/tests/components/kostal_plenticore/test_helper.py +++ b/tests/components/kostal_plenticore/test_helper.py @@ -1,10 +1,10 @@ """Test Kostal Plenticore helper.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pykoplenti import ApiClient, ExtendedApiClient, SettingsData import pytest -from typing_extensions import Generator from homeassistant.components.kostal_plenticore.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/kostal_plenticore/test_number.py b/tests/components/kostal_plenticore/test_number.py index 9d94c6f9951..586129c486d 100644 --- a/tests/components/kostal_plenticore/test_number.py +++ b/tests/components/kostal_plenticore/test_number.py @@ -1,11 +1,11 @@ """Test Kostal Plenticore number.""" +from collections.abc import Generator from datetime import timedelta from unittest.mock import patch from pykoplenti import ApiClient, SettingsData import pytest -from typing_extensions import Generator from homeassistant.components.number import ( ATTR_MAX, diff --git a/tests/components/kulersky/test_light.py b/tests/components/kulersky/test_light.py index 90f40d327e4..a2245e721c5 100644 --- a/tests/components/kulersky/test_light.py +++ b/tests/components/kulersky/test_light.py @@ -1,5 +1,6 @@ """Test the Kuler Sky lights.""" +from collections.abc import AsyncGenerator from unittest.mock import MagicMock, patch import pykulersky @@ -37,13 +38,15 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture -async def mock_entry(hass): +async def mock_entry() -> MockConfigEntry: """Create a mock light entity.""" return MockConfigEntry(domain=DOMAIN) @pytest.fixture -async def mock_light(hass, mock_entry): +async def mock_light( + hass: HomeAssistant, mock_entry: MockConfigEntry +) -> AsyncGenerator[MagicMock]: """Create a mock light entity.""" light = MagicMock(spec=pykulersky.Light) @@ -64,7 +67,7 @@ async def mock_light(hass, mock_entry): yield light -async def test_init(hass: HomeAssistant, mock_light) -> None: +async def test_init(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test platform setup.""" state = hass.states.get("light.bedroom") assert state.state == STATE_OFF @@ -87,7 +90,9 @@ async def test_init(hass: HomeAssistant, mock_light) -> None: assert mock_light.disconnect.called -async def test_remove_entry(hass: HomeAssistant, mock_light, mock_entry) -> None: +async def test_remove_entry( + hass: HomeAssistant, mock_light: MagicMock, mock_entry: MockConfigEntry +) -> None: """Test platform setup.""" assert hass.data[DOMAIN][DATA_ADDRESSES] == {"AA:BB:CC:11:22:33"} assert DATA_DISCOVERY_SUBSCRIPTION in hass.data[DOMAIN] @@ -99,7 +104,7 @@ async def test_remove_entry(hass: HomeAssistant, mock_light, mock_entry) -> None async def test_remove_entry_exceptions_caught( - hass: HomeAssistant, mock_light, mock_entry + hass: HomeAssistant, mock_light: MagicMock, mock_entry: MockConfigEntry ) -> None: """Assert that disconnect exceptions are caught.""" mock_light.disconnect.side_effect = pykulersky.PykulerskyException("Mock error") @@ -108,7 +113,7 @@ async def test_remove_entry_exceptions_caught( assert mock_light.disconnect.called -async def test_update_exception(hass: HomeAssistant, mock_light) -> None: +async def test_update_exception(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test platform setup.""" mock_light.get_color.side_effect = pykulersky.PykulerskyException @@ -118,7 +123,7 @@ async def test_update_exception(hass: HomeAssistant, mock_light) -> None: assert state.state == STATE_UNAVAILABLE -async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: +async def test_light_turn_on(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test KulerSkyLight turn_on.""" mock_light.get_color.return_value = (255, 255, 255, 255) await hass.services.async_call( @@ -175,7 +180,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: mock_light.set_color.assert_called_with(50, 41, 0, 50) -async def test_light_turn_off(hass: HomeAssistant, mock_light) -> None: +async def test_light_turn_off(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test KulerSkyLight turn_on.""" mock_light.get_color.return_value = (0, 0, 0, 0) await hass.services.async_call( @@ -188,7 +193,7 @@ async def test_light_turn_off(hass: HomeAssistant, mock_light) -> None: mock_light.set_color.assert_called_with(0, 0, 0, 0) -async def test_light_update(hass: HomeAssistant, mock_light) -> None: +async def test_light_update(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test KulerSkyLight update.""" utcnow = dt_util.utcnow() diff --git a/tests/components/lacrosse_view/conftest.py b/tests/components/lacrosse_view/conftest.py index a6294c64210..4f1bfdc5748 100644 --- a/tests/components/lacrosse_view/conftest.py +++ b/tests/components/lacrosse_view/conftest.py @@ -1,9 +1,9 @@ """Define fixtures for LaCrosse View tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/lacrosse_view/test_diagnostics.py b/tests/components/lacrosse_view/test_diagnostics.py index 08cef64a935..dc48f160113 100644 --- a/tests/components/lacrosse_view/test_diagnostics.py +++ b/tests/components/lacrosse_view/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.lacrosse_view import DOMAIN from homeassistant.core import HomeAssistant @@ -32,7 +33,6 @@ async def test_entry_diagnostics( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 6741ac0797c..1a4fbbd4a0c 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -1,6 +1,6 @@ """Lamarzocco session fixtures.""" -from collections.abc import Callable +from collections.abc import Generator import json from unittest.mock import MagicMock, patch @@ -9,7 +9,6 @@ from lmcloud.const import FirmwareType, MachineModel, SteamLevel from lmcloud.lm_machine import LaMarzoccoMachine from lmcloud.models import LaMarzoccoDeviceInfo import pytest -from typing_extensions import Generator from homeassistant.components.lamarzocco.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_NAME, CONF_TOKEN @@ -129,14 +128,6 @@ def mock_lamarzocco(device_fixture: MachineModel) -> Generator[MagicMock]: lamarzocco.firmware[FirmwareType.GATEWAY].latest_version = "v3.5-rc3" lamarzocco.firmware[FirmwareType.MACHINE].latest_version = "1.55" - async def websocket_connect_mock( - notify_callback: Callable | None, - ) -> None: - """Mock the websocket connect method.""" - return None - - lamarzocco.websocket_connect = websocket_connect_mock - yield lamarzocco diff --git a/tests/components/lamarzocco/snapshots/test_switch.ambr b/tests/components/lamarzocco/snapshots/test_switch.ambr index 09864be1d5c..4ec22e3123d 100644 --- a/tests/components/lamarzocco/snapshots/test_switch.ambr +++ b/tests/components/lamarzocco/snapshots/test_switch.ambr @@ -10,7 +10,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'switch', - 'entity_category': None, + 'entity_category': , 'entity_id': 'switch.gs01234_auto_on_off_os2oswx', 'has_entity_name': True, 'hidden_by': None, @@ -43,7 +43,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'switch', - 'entity_category': None, + 'entity_category': , 'entity_id': 'switch.gs01234_auto_on_off_axfz5bj', 'has_entity_name': True, 'hidden_by': None, @@ -113,8 +113,10 @@ }), 'manufacturer': 'La Marzocco', 'model': , + 'model_id': None, 'name': 'GS01234', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'GS01234', 'suggested_area': None, 'sw_version': '1.40', diff --git a/tests/components/lamarzocco/snapshots/test_update.ambr b/tests/components/lamarzocco/snapshots/test_update.ambr index 4ab8e35ffd0..f08b9249f50 100644 --- a/tests/components/lamarzocco/snapshots/test_update.ambr +++ b/tests/components/lamarzocco/snapshots/test_update.ambr @@ -10,7 +10,7 @@ 'installed_version': 'v3.1-rc4', 'latest_version': 'v3.5-rc3', 'release_summary': None, - 'release_url': None, + 'release_url': 'https://support-iot.lamarzocco.com/firmware-updates/', 'skipped_version': None, 'supported_features': , 'title': None, @@ -67,7 +67,7 @@ 'installed_version': '1.40', 'latest_version': '1.55', 'release_summary': None, - 'release_url': None, + 'release_url': 'https://support-iot.lamarzocco.com/firmware-updates/', 'skipped_version': None, 'supported_features': , 'title': None, diff --git a/tests/components/lametric/conftest.py b/tests/components/lametric/conftest.py index dd3885b78d9..e8ba727f3db 100644 --- a/tests/components/lametric/conftest.py +++ b/tests/components/lametric/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from demetriek import CloudDevice, Device from pydantic import parse_raw_as # pylint: disable=no-name-in-module import pytest -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/lametric/test_button.py b/tests/components/lametric/test_button.py index a6cdca5b426..04efeaac87f 100644 --- a/tests/components/lametric/test_button.py +++ b/tests/components/lametric/test_button.py @@ -49,7 +49,7 @@ async def test_button_app_next( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model == "LM 37X8" + assert device_entry.model_id == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None @@ -95,7 +95,7 @@ async def test_button_app_previous( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model == "LM 37X8" + assert device_entry.model_id == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None @@ -143,7 +143,7 @@ async def test_button_dismiss_current_notification( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model == "LM 37X8" + assert device_entry.model_id == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None @@ -191,7 +191,7 @@ async def test_button_dismiss_all_notifications( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model == "LM 37X8" + assert device_entry.model_id == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None diff --git a/tests/components/landisgyr_heat_meter/conftest.py b/tests/components/landisgyr_heat_meter/conftest.py index 22f29b3a4b1..1dad983c909 100644 --- a/tests/components/landisgyr_heat_meter/conftest.py +++ b/tests/components/landisgyr_heat_meter/conftest.py @@ -1,9 +1,9 @@ """Define fixtures for Landis + Gyr Heat Meter tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/landisgyr_heat_meter/test_init.py b/tests/components/landisgyr_heat_meter/test_init.py index c9768ec681f..76a376e441c 100644 --- a/tests/components/landisgyr_heat_meter/test_init.py +++ b/tests/components/landisgyr_heat_meter/test_init.py @@ -1,6 +1,6 @@ """Test the Landis + Gyr Heat Meter init.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch from homeassistant.components.landisgyr_heat_meter.const import ( DOMAIN as LANDISGYR_HEAT_METER_DOMAIN, @@ -17,7 +17,7 @@ API_HEAT_METER_SERVICE = ( @patch(API_HEAT_METER_SERVICE) -async def test_unload_entry(_, hass: HomeAssistant) -> None: +async def test_unload_entry(mock_meter_service: MagicMock, hass: HomeAssistant) -> None: """Test removing config entry.""" mock_entry_data = { "device": "/dev/USB0", @@ -41,7 +41,9 @@ async def test_unload_entry(_, hass: HomeAssistant) -> None: @patch(API_HEAT_METER_SERVICE) async def test_migrate_entry( - _, hass: HomeAssistant, entity_registry: er.EntityRegistry + mock_meter_service: MagicMock, + hass: HomeAssistant, + entity_registry: er.EntityRegistry, ) -> None: """Test successful migration of entry data from version 1 to 2.""" diff --git a/tests/components/lastfm/__init__.py b/tests/components/lastfm/__init__.py index 9fe946f8dff..e4eb476f62d 100644 --- a/tests/components/lastfm/__init__.py +++ b/tests/components/lastfm/__init__.py @@ -1,5 +1,6 @@ """The tests for lastfm.""" +from typing import Any from unittest.mock import patch from pylast import PyLastError, Track @@ -91,7 +92,7 @@ class MockUser: """Get mock now playing.""" return self._now_playing_result - def get_friends(self) -> list[any]: + def get_friends(self) -> list[Any]: """Get mock friends.""" if len(self._friends) == 0: raise PyLastError("network", "status", "Page not found") diff --git a/tests/components/laundrify/conftest.py b/tests/components/laundrify/conftest.py index 91aeebf81ee..2f6496c06a5 100644 --- a/tests/components/laundrify/conftest.py +++ b/tests/components/laundrify/conftest.py @@ -3,6 +3,7 @@ import json from unittest.mock import patch +from laundrify_aio import LaundrifyAPI, LaundrifyDevice import pytest from .const import VALID_ACCESS_TOKEN, VALID_ACCOUNT_ID @@ -49,7 +50,10 @@ def laundrify_api_fixture(laundrify_exchange_code, laundrify_validate_token): ), patch( "laundrify_aio.LaundrifyAPI.get_machines", - return_value=json.loads(load_fixture("laundrify/machines.json")), + return_value=[ + LaundrifyDevice(machine, LaundrifyAPI) + for machine in json.loads(load_fixture("laundrify/machines.json")) + ], ) as get_machines_mock, ): yield get_machines_mock diff --git a/tests/components/laundrify/fixtures/machines.json b/tests/components/laundrify/fixtures/machines.json index ab1a737cb45..3397212659f 100644 --- a/tests/components/laundrify/fixtures/machines.json +++ b/tests/components/laundrify/fixtures/machines.json @@ -1,8 +1,10 @@ [ { - "_id": "14", + "id": "14", "name": "Demo Waschmaschine", "status": "OFF", + "internalIP": "192.168.0.123", + "model": "SU02", "firmwareVersion": "2.1.0" } ] diff --git a/tests/components/lawn_mower/test_init.py b/tests/components/lawn_mower/test_init.py index e7066ed43c1..16f32da7e04 100644 --- a/tests/components/lawn_mower/test_init.py +++ b/tests/components/lawn_mower/test_init.py @@ -1,9 +1,9 @@ """The tests for the lawn mower integration.""" +from collections.abc import Generator from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.components.lawn_mower import ( DOMAIN as LAWN_MOWER_DOMAIN, diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index f24fdbc054f..e29a7076430 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -1,6 +1,8 @@ """Test configuration and mocks for LCN component.""" +from collections.abc import AsyncGenerator import json +from typing import Any from unittest.mock import AsyncMock, patch import pypck @@ -10,13 +12,13 @@ from pypck.module import GroupConnection, ModuleConnection import pytest from homeassistant.components.lcn.const import DOMAIN -from homeassistant.components.lcn.helpers import generate_unique_id +from homeassistant.components.lcn.helpers import AddressType, generate_unique_id from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_mock_service, load_fixture +from tests.common import MockConfigEntry, load_fixture class MockModuleConnection(ModuleConnection): @@ -28,7 +30,7 @@ class MockModuleConnection(ModuleConnection): request_name = AsyncMock(return_value="TestModule") send_command = AsyncMock(return_value=True) - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Construct ModuleConnection instance.""" super().__init__(*args, **kwargs) self.serials_request_handler.serial_known.set() @@ -43,13 +45,13 @@ class MockGroupConnection(GroupConnection): class MockPchkConnectionManager(PchkConnectionManager): """Fake connection handler.""" - async def async_connect(self, timeout=30): + async def async_connect(self, timeout: int = 30) -> None: """Mock establishing a connection to PCHK.""" self.authentication_completed_future.set_result(True) self.license_error_future.set_result(True) self.segment_scan_completed_event.set() - async def async_close(self): + async def async_close(self) -> None: """Mock closing a connection to PCHK.""" @patch.object(pypck.connection, "ModuleConnection", MockModuleConnection) @@ -61,7 +63,7 @@ class MockPchkConnectionManager(PchkConnectionManager): send_command = AsyncMock() -def create_config_entry(name): +def create_config_entry(name: str) -> MockConfigEntry: """Set up config entries with configuration data.""" fixture_filename = f"lcn/config_entry_{name}.json" entry_data = json.loads(load_fixture(fixture_filename)) @@ -78,26 +80,22 @@ def create_config_entry(name): ) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(name="entry") -def create_config_entry_pchk(): +def create_config_entry_pchk() -> MockConfigEntry: """Return one specific config entry.""" return create_config_entry("pchk") @pytest.fixture(name="entry2") -def create_config_entry_myhome(): +def create_config_entry_myhome() -> MockConfigEntry: """Return one specific config entry.""" return create_config_entry("myhome") @pytest.fixture(name="lcn_connection") -async def init_integration(hass, entry): +async def init_integration( + hass: HomeAssistant, entry: MockConfigEntry +) -> AsyncGenerator[MockPchkConnectionManager]: """Set up the LCN integration in Home Assistant.""" lcn_connection = None @@ -116,7 +114,7 @@ async def init_integration(hass, entry): yield lcn_connection -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Set up the LCN component.""" fixture_filename = "lcn/config.json" config_data = json.loads(load_fixture(fixture_filename)) @@ -125,7 +123,9 @@ async def setup_component(hass): await hass.async_block_till_done() -def get_device(hass, entry, address): +def get_device( + hass: HomeAssistant, entry: MockConfigEntry, address: AddressType +) -> dr.DeviceEntry: """Get LCN device for specified address.""" device_registry = dr.async_get(hass) identifiers = {(DOMAIN, generate_unique_id(entry.entry_id, address))} diff --git a/tests/components/lcn/test_device_trigger.py b/tests/components/lcn/test_device_trigger.py index 67bd7568254..6c5ab7d6f4e 100644 --- a/tests/components/lcn/test_device_trigger.py +++ b/tests/components/lcn/test_device_trigger.py @@ -72,7 +72,7 @@ async def test_get_triggers_non_module_device( async def test_if_fires_on_transponder_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for transponder event triggers firing.""" address = (0, 7, False) @@ -111,15 +111,15 @@ async def test_if_fires_on_transponder_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_transponder", "code": "aabbcc", } async def test_if_fires_on_fingerprint_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for fingerprint event triggers firing.""" address = (0, 7, False) @@ -158,15 +158,15 @@ async def test_if_fires_on_fingerprint_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_fingerprint", "code": "aabbcc", } async def test_if_fires_on_codelock_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for codelock event triggers firing.""" address = (0, 7, False) @@ -205,15 +205,15 @@ async def test_if_fires_on_codelock_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_codelock", "code": "aabbcc", } async def test_if_fires_on_transmitter_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for transmitter event triggers firing.""" address = (0, 7, False) @@ -258,8 +258,8 @@ async def test_if_fires_on_transmitter_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_transmitter", "code": "aabbcc", "level": 0, @@ -269,7 +269,7 @@ async def test_if_fires_on_transmitter_event( async def test_if_fires_on_send_keys_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for send_keys event triggers firing.""" address = (0, 7, False) @@ -309,8 +309,8 @@ async def test_if_fires_on_send_keys_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_send_keys", "key": "a1", "action": "hit", diff --git a/tests/components/lg_netcast/__init__.py b/tests/components/lg_netcast/__init__.py index ce3e09aeb65..6e608ae207b 100644 --- a/tests/components/lg_netcast/__init__.py +++ b/tests/components/lg_netcast/__init__.py @@ -1,7 +1,7 @@ """Tests for LG Netcast TV.""" from unittest.mock import patch -from xml.etree import ElementTree +import xml.etree.ElementTree as ET from pylgnetcast import AccessTokenError, LgNetCastClient, SessionIdError import requests @@ -56,7 +56,7 @@ def _patched_lgnetcast_client( if always_404: return None if invalid_details: - raise ElementTree.ParseError("Mocked Parsed Error") + raise ET.ParseError("Mocked Parsed Error") return { "uuid": UNIQUE_ID if not no_unique_id else None, "model_name": MODEL_NAME, diff --git a/tests/components/lg_netcast/conftest.py b/tests/components/lg_netcast/conftest.py deleted file mode 100644 index eb13d5c8c67..00000000000 --- a/tests/components/lg_netcast/conftest.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Common fixtures and objects for the LG Netcast integration tests.""" - -import pytest - -from homeassistant.core import HomeAssistant, ServiceCall - -from tests.common import async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") diff --git a/tests/components/lg_netcast/test_device_trigger.py b/tests/components/lg_netcast/test_device_trigger.py index 05911acc41d..c8d725afde1 100644 --- a/tests/components/lg_netcast/test_device_trigger.py +++ b/tests/components/lg_netcast/test_device_trigger.py @@ -43,7 +43,9 @@ async def test_get_triggers( async def test_if_fires_on_turn_on_request( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for turn_on triggers firing.""" await setup_lgnetcast(hass) @@ -96,11 +98,11 @@ async def test_if_fires_on_turn_on_request( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 - assert calls[1].data["some"] == ENTITY_ID - assert calls[1].data["id"] == 0 + assert len(service_calls) == 3 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 + assert service_calls[2].data["some"] == ENTITY_ID + assert service_calls[2].data["id"] == 0 async def test_failure_scenarios( diff --git a/tests/components/lg_netcast/test_trigger.py b/tests/components/lg_netcast/test_trigger.py index b0c2a86ec21..d838b931560 100644 --- a/tests/components/lg_netcast/test_trigger.py +++ b/tests/components/lg_netcast/test_trigger.py @@ -18,7 +18,9 @@ from tests.common import MockEntity, MockEntityPlatform async def test_lg_netcast_turn_on_trigger_device_id( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for turn_on trigger by device_id firing.""" await setup_lgnetcast(hass) @@ -56,14 +58,14 @@ async def test_lg_netcast_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 with patch("homeassistant.config.load_yaml_dict", return_value={}): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) - calls.clear() + service_calls.clear() with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -74,11 +76,11 @@ async def test_lg_netcast_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_lg_netcast_turn_on_trigger_entity_id( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for turn_on triggers by entity firing.""" await setup_lgnetcast(hass) @@ -113,9 +115,9 @@ async def test_lg_netcast_turn_on_trigger_entity_id( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == ENTITY_ID - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == ENTITY_ID + assert service_calls[1].data["id"] == 0 async def test_wrong_trigger_platform_type( diff --git a/tests/components/lidarr/conftest.py b/tests/components/lidarr/conftest.py index 588acb2b87f..1024aadc403 100644 --- a/tests/components/lidarr/conftest.py +++ b/tests/components/lidarr/conftest.py @@ -2,13 +2,12 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from http import HTTPStatus from aiohttp.client_exceptions import ClientError from aiopyarr.lidarr_client import LidarrClient import pytest -from typing_extensions import Generator from homeassistant.components.lidarr.const import DOMAIN from homeassistant.const import ( diff --git a/tests/components/lifx/__init__.py b/tests/components/lifx/__init__.py index 505d212a352..432e7673db6 100644 --- a/tests/components/lifx/__init__.py +++ b/tests/components/lifx/__init__.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from contextlib import contextmanager +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch from aiolifx.aiolifx import Light @@ -25,7 +26,7 @@ DEFAULT_ENTRY_TITLE = LABEL class MockMessage: """Mock a lifx message.""" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """Init message.""" self.target_addr = SERIAL self.count = 9 @@ -37,7 +38,7 @@ class MockMessage: class MockFailingLifxCommand: """Mock a lifx command that fails.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.calls = [] @@ -60,7 +61,7 @@ class MockLifxCommand: """Return name.""" return "mock_lifx_command" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.calls = [] @@ -172,6 +173,19 @@ def _mocked_tile() -> Light: bulb.effect = {"effect": "OFF"} bulb.get_tile_effect = MockLifxCommand(bulb) bulb.set_tile_effect = MockLifxCommand(bulb) + bulb.get64 = MockLifxCommand(bulb) + bulb.get_device_chain = MockLifxCommand(bulb) + return bulb + + +def _mocked_ceiling() -> Light: + bulb = _mocked_bulb() + bulb.product = 176 # LIFX Ceiling + bulb.effect = {"effect": "OFF"} + bulb.get_tile_effect = MockLifxCommand(bulb) + bulb.set_tile_effect = MockLifxCommand(bulb) + bulb.get64 = MockLifxCommand(bulb) + bulb.get_device_chain = MockLifxCommand(bulb) return bulb @@ -199,7 +213,7 @@ def _patch_device(device: Light | None = None, no_device: bool = False): class MockLifxConnecton: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" if no_device: self.device = _mocked_failing_bulb() @@ -227,7 +241,7 @@ def _patch_discovery(device: Light | None = None, no_device: bool = False): class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init discovery.""" if no_device: self.lights = {} @@ -263,7 +277,7 @@ def _patch_config_flow_try_connect( class MockLifxConnection: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" if no_device: self.device = _mocked_failing_bulb() diff --git a/tests/components/lifx/conftest.py b/tests/components/lifx/conftest.py index 093f2309e53..e4a5f303f61 100644 --- a/tests/components/lifx/conftest.py +++ b/tests/components/lifx/conftest.py @@ -1,5 +1,6 @@ """Tests for the lifx integration.""" +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -8,8 +9,6 @@ from homeassistant.components.lifx import config_flow, coordinator, util from . import _patch_discovery -from tests.common import mock_device_registry, mock_registry - @pytest.fixture def mock_discovery(): @@ -23,7 +22,7 @@ def mock_effect_conductor(): """Mock the effect conductor.""" class MockConductor: - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock the conductor.""" self.start = AsyncMock() self.stop = AsyncMock() @@ -61,15 +60,3 @@ def lifx_mock_async_get_ipv4_broadcast_addresses(): return_value=["255.255.255.255"], ): yield - - -@pytest.fixture(name="device_reg") -def device_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture(name="entity_reg") -def entity_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_registry(hass) diff --git a/tests/components/lifx/test_config_flow.py b/tests/components/lifx/test_config_flow.py index 59b7090788a..29324d0d19a 100644 --- a/tests/components/lifx/test_config_flow.py +++ b/tests/components/lifx/test_config_flow.py @@ -2,6 +2,7 @@ from ipaddress import ip_address import socket +from typing import Any from unittest.mock import patch import pytest @@ -288,7 +289,7 @@ async def test_manual_dns_error(hass: HomeAssistant) -> None: class MockLifxConnectonDnsError: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" self.device = _mocked_failing_bulb() @@ -574,7 +575,7 @@ async def test_suggested_area( class MockLifxCommandGetGroup: """Mock the get_group method that gets the group name from the bulb.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.lifx_group = kwargs.get("lifx_group") diff --git a/tests/components/lifx/test_init.py b/tests/components/lifx/test_init.py index 42ece68a2c5..66adc54704e 100644 --- a/tests/components/lifx/test_init.py +++ b/tests/components/lifx/test_init.py @@ -4,6 +4,7 @@ from __future__ import annotations from datetime import timedelta import socket +from typing import Any from unittest.mock import patch import pytest @@ -37,7 +38,7 @@ async def test_configuring_lifx_causes_discovery(hass: HomeAssistant) -> None: class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init discovery.""" discovered = _mocked_bulb() self.lights = {discovered.mac_addr: discovered} @@ -137,7 +138,7 @@ async def test_dns_error_at_startup(hass: HomeAssistant) -> None: class MockLifxConnectonDnsError: """Mock lifx connection with a dns error.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" self.device = bulb diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 56630053cc0..a642347b4e6 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -1,6 +1,7 @@ """Tests for the lifx integration light platform.""" from datetime import timedelta +from typing import Any from unittest.mock import patch import aiolifx_effects @@ -11,15 +12,19 @@ from homeassistant.components.lifx import DOMAIN from homeassistant.components.lifx.const import ATTR_POWER from homeassistant.components.lifx.light import ATTR_INFRARED, ATTR_ZONES from homeassistant.components.lifx.manager import ( + ATTR_CLOUD_SATURATION_MAX, + ATTR_CLOUD_SATURATION_MIN, ATTR_DIRECTION, ATTR_PALETTE, ATTR_SATURATION_MAX, ATTR_SATURATION_MIN, + ATTR_SKY_TYPE, ATTR_SPEED, ATTR_THEME, SERVICE_EFFECT_COLORLOOP, SERVICE_EFFECT_MORPH, SERVICE_EFFECT_MOVE, + SERVICE_EFFECT_SKY, ) from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -62,6 +67,7 @@ from . import ( _mocked_brightness_bulb, _mocked_bulb, _mocked_bulb_new_firmware, + _mocked_ceiling, _mocked_clean_bulb, _mocked_light_strip, _mocked_tile, @@ -691,6 +697,7 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: entity_id = "light.my_bulb" + # FLAME effect test await hass.services.async_call( LIGHT_DOMAIN, "turn_on", @@ -707,11 +714,15 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: "effect": 3, "speed": 3, "palette": [], + "sky_type": None, + "cloud_saturation_min": None, + "cloud_saturation_max": None, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() bulb.set_power.reset_mock() + # MORPH effect tests bulb.power_level = 0 await hass.services.async_call( DOMAIN, @@ -750,6 +761,9 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: (8920, 65535, 32768, 3500), (10558, 65535, 32768, 3500), ], + "sky_type": None, + "cloud_saturation_min": None, + "cloud_saturation_max": None, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() @@ -808,6 +822,140 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: (43690, 65535, 65535, 3500), (54613, 65535, 65535, 3500), ], + "sky_type": None, + "cloud_saturation_min": None, + "cloud_saturation_max": None, + } + bulb.get_tile_effect.reset_mock() + bulb.set_tile_effect.reset_mock() + bulb.set_power.reset_mock() + + +@pytest.mark.usefixtures("mock_discovery") +async def test_sky_effect(hass: HomeAssistant) -> None: + """Test the firmware sky effect on a ceiling device.""" + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=SERIAL + ) + config_entry.add_to_hass(hass) + bulb = _mocked_ceiling() + bulb.power_level = 0 + bulb.color = [65535, 65535, 65535, 65535] + with ( + _patch_discovery(device=bulb), + _patch_config_flow_try_connect(device=bulb), + _patch_device(device=bulb), + ): + await async_setup_component(hass, lifx.DOMAIN, {lifx.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "light.my_bulb" + + # SKY effect test + bulb.power_level = 0 + await hass.services.async_call( + DOMAIN, + SERVICE_EFFECT_SKY, + { + ATTR_ENTITY_ID: entity_id, + ATTR_PALETTE: [], + ATTR_SKY_TYPE: "Clouds", + ATTR_CLOUD_SATURATION_MAX: 180, + ATTR_CLOUD_SATURATION_MIN: 50, + }, + blocking=True, + ) + + bulb.power_level = 65535 + bulb.effect = { + "effect": "SKY", + "palette": [], + "sky_type": 2, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, + } + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + assert len(bulb.set_power.calls) == 1 + assert len(bulb.set_tile_effect.calls) == 1 + call_dict = bulb.set_tile_effect.calls[0][1] + call_dict.pop("callb") + assert call_dict == { + "effect": 5, + "speed": 50, + "palette": [], + "sky_type": 2, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, + } + bulb.get_tile_effect.reset_mock() + bulb.set_tile_effect.reset_mock() + bulb.set_power.reset_mock() + + bulb.power_level = 0 + await hass.services.async_call( + DOMAIN, + SERVICE_EFFECT_SKY, + { + ATTR_ENTITY_ID: entity_id, + ATTR_PALETTE: [ + (200, 100, 1, 3500), + (241, 100, 1, 3500), + (189, 100, 8, 3500), + (40, 100, 100, 3500), + (40, 50, 100, 3500), + (0, 0, 100, 6500), + ], + ATTR_SKY_TYPE: "Sunrise", + ATTR_CLOUD_SATURATION_MAX: 180, + ATTR_CLOUD_SATURATION_MIN: 50, + }, + blocking=True, + ) + + bulb.power_level = 65535 + bulb.effect = { + "effect": "SKY", + "palette": [ + (200, 100, 1, 3500), + (241, 100, 1, 3500), + (189, 100, 8, 3500), + (40, 100, 100, 3500), + (40, 50, 100, 3500), + (0, 0, 100, 6500), + ], + "sky_type": 0, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, + } + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + assert len(bulb.set_power.calls) == 1 + assert len(bulb.set_tile_effect.calls) == 1 + call_dict = bulb.set_tile_effect.calls[0][1] + call_dict.pop("callb") + assert call_dict == { + "effect": 5, + "speed": 50, + "palette": [ + (36408, 65535, 65535, 3500), + (43872, 65535, 65535, 3500), + (34406, 65535, 5243, 3500), + (7281, 65535, 65535, 3500), + (7281, 32768, 65535, 3500), + (0, 0, 65535, 6500), + ], + "sky_type": 0, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() @@ -1152,7 +1300,7 @@ async def test_config_zoned_light_strip_fails( class MockFailingLifxCommand: """Mock a lifx command that fails on the 2nd try.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.call_count = 0 @@ -1191,7 +1339,7 @@ async def test_legacy_zoned_light_strip( class MockPopulateLifxZonesCommand: """Mock populating the number of zones.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.call_count = 0 @@ -1698,7 +1846,7 @@ async def test_color_bulb_is_actually_off(hass: HomeAssistant) -> None: class MockLifxCommandActuallyOff: """Mock a lifx command that will update our power level state.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.calls = [] diff --git a/tests/components/lifx/test_migration.py b/tests/components/lifx/test_migration.py index 62018790906..f984acce238 100644 --- a/tests/components/lifx/test_migration.py +++ b/tests/components/lifx/test_migration.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import timedelta +from typing import Any from unittest.mock import patch from homeassistant import setup @@ -11,8 +12,6 @@ from homeassistant.components.lifx import DOMAIN, discovery from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.device_registry import DeviceRegistry -from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -31,20 +30,22 @@ from tests.common import MockConfigEntry, async_fire_time_changed async def test_migration_device_online_end_to_end( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test migration from single config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, title="LEGACY", data={}, unique_id=DOMAIN ) config_entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - light_entity_reg = entity_reg.async_get_or_create( + light_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -65,9 +66,9 @@ async def test_migration_device_online_end_to_end( assert migrated_entry is not None - assert device.config_entries == [migrated_entry.entry_id] + assert device.config_entries == {migrated_entry.entry_id} assert light_entity_reg.config_entry_id == migrated_entry.entry_id - assert er.async_entries_for_config_entry(entity_reg, config_entry) == [] + assert er.async_entries_for_config_entry(entity_registry, config_entry) == [] hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() @@ -84,20 +85,22 @@ async def test_migration_device_online_end_to_end( async def test_discovery_is_more_frequent_during_migration( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test that discovery is more frequent during migration.""" config_entry = MockConfigEntry( domain=DOMAIN, title="LEGACY", data={}, unique_id=DOMAIN ) config_entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - entity_reg.async_get_or_create( + entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -112,7 +115,7 @@ async def test_discovery_is_more_frequent_during_migration( class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init discovery.""" self.bulb = bulb self.lights = {} @@ -160,7 +163,9 @@ async def test_discovery_is_more_frequent_during_migration( async def test_migration_device_online_end_to_end_after_downgrade( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test migration from single config entry can happen again after a downgrade.""" config_entry = MockConfigEntry( @@ -172,13 +177,13 @@ async def test_migration_device_online_end_to_end_after_downgrade( domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=SERIAL ) already_migrated_config_entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - light_entity_reg = entity_reg.async_get_or_create( + light_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -195,9 +200,9 @@ async def test_migration_device_online_end_to_end_after_downgrade( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=20)) await hass.async_block_till_done() - assert device.config_entries == [config_entry.entry_id] + assert device.config_entries == {config_entry.entry_id} assert light_entity_reg.config_entry_id == config_entry.entry_id - assert er.async_entries_for_config_entry(entity_reg, config_entry) == [] + assert er.async_entries_for_config_entry(entity_registry, config_entry) == [] legacy_entry = None for entry in hass.config_entries.async_entries(DOMAIN): @@ -209,7 +214,9 @@ async def test_migration_device_online_end_to_end_after_downgrade( async def test_migration_device_online_end_to_end_ignores_other_devices( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test migration from single config entry.""" legacy_config_entry = MockConfigEntry( @@ -221,18 +228,18 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( domain="other_domain", data={}, unique_id="other_domain" ) other_domain_config_entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=legacy_config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - other_device = device_reg.async_get_or_create( + other_device = device_registry.async_get_or_create( config_entry_id=other_domain_config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "556655665566")}, name=LABEL, ) - light_entity_reg = entity_reg.async_get_or_create( + light_entity_reg = entity_registry.async_get_or_create( config_entry=legacy_config_entry, platform=DOMAIN, domain="light", @@ -240,7 +247,7 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( original_name=LABEL, device_id=device.id, ) - ignored_entity_reg = entity_reg.async_get_or_create( + ignored_entity_reg = entity_registry.async_get_or_create( config_entry=other_domain_config_entry, platform=DOMAIN, domain="sensor", @@ -248,7 +255,7 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( original_name=LABEL, device_id=device.id, ) - garbage_entity_reg = entity_reg.async_get_or_create( + garbage_entity_reg = entity_registry.async_get_or_create( config_entry=legacy_config_entry, platform=DOMAIN, domain="sensor", @@ -276,10 +283,16 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( assert new_entry is not None assert legacy_entry is None - assert device.config_entries == [legacy_config_entry.entry_id] + assert device.config_entries == {legacy_config_entry.entry_id} assert light_entity_reg.config_entry_id == legacy_config_entry.entry_id assert ignored_entity_reg.config_entry_id == other_domain_config_entry.entry_id assert garbage_entity_reg.config_entry_id == legacy_config_entry.entry_id - assert er.async_entries_for_config_entry(entity_reg, legacy_config_entry) == [] - assert dr.async_entries_for_config_entry(device_reg, legacy_config_entry) == [] + assert ( + er.async_entries_for_config_entry(entity_registry, legacy_config_entry) + == [] + ) + assert ( + dr.async_entries_for_config_entry(device_registry, legacy_config_entry) + == [] + ) diff --git a/tests/components/light/test_device_action.py b/tests/components/light/test_device_action.py index 8848ce19621..c2ac7087cf0 100644 --- a/tests/components/light/test_device_action.py +++ b/tests/components/light/test_device_action.py @@ -14,7 +14,7 @@ from homeassistant.components.light import ( LightEntityFeature, ) from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component @@ -32,12 +32,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -471,7 +465,6 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -636,7 +629,6 @@ async def test_action_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) diff --git a/tests/components/light/test_device_condition.py b/tests/components/light/test_device_condition.py index 11dea49ea60..94e12ffbfa5 100644 --- a/tests/components/light/test_device_condition.py +++ b/tests/components/light/test_device_condition.py @@ -22,7 +22,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -32,12 +31,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -186,7 +179,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -252,20 +245,20 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" @pytest.mark.usefixtures("enable_custom_integrations") @@ -273,7 +266,7 @@ async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -318,20 +311,20 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_light_entities: list[MockLight], ) -> None: """Test for firing if condition is on with delay.""" @@ -385,26 +378,26 @@ async def test_if_fires_on_for_condition( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future freezer.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 20 secs into the future freezer.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_off event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/light/test_device_trigger.py b/tests/components/light/test_device_trigger.py index ab3babd1b64..4e8414edabc 100644 --- a/tests/components/light/test_device_trigger.py +++ b/tests/components/light/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) DATA_TEMPLATE_ATTRIBUTES = ( @@ -37,12 +36,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -189,7 +182,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -258,20 +251,20 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -282,7 +275,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -321,13 +314,14 @@ async def test_if_fires_on_state_change_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"turn_on device - {entry.entity_id} - on - off - None" + service_calls[0].data["some"] + == f"turn_on device - {entry.entity_id} - on - off - None" ) @@ -336,7 +330,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -376,16 +370,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/light/test_recorder.py b/tests/components/light/test_recorder.py index 49c9a567856..f3f87ff6074 100644 --- a/tests/components/light/test_recorder.py +++ b/tests/components/light/test_recorder.py @@ -9,12 +9,23 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, + ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, + ATTR_EFFECT, ATTR_EFFECT_LIST, + ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MIN_MIREDS, + ATTR_RGB_COLOR, + ATTR_RGBW_COLOR, + ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, + ATTR_XY_COLOR, + DOMAIN, ) from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states @@ -50,7 +61,7 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) await async_wait_recording_done(hass) states = await hass.async_add_executor_job( - get_significant_states, hass, now, None, hass.states.async_entity_ids() + get_significant_states, hass, now, None, hass.states.async_entity_ids(DOMAIN) ) assert len(states) >= 1 for entity_states in states.values(): @@ -62,3 +73,13 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert ATTR_FRIENDLY_NAME in state.attributes assert ATTR_MAX_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_MIN_COLOR_TEMP_KELVIN not in state.attributes + assert ATTR_BRIGHTNESS not in state.attributes + assert ATTR_COLOR_MODE not in state.attributes + assert ATTR_COLOR_TEMP not in state.attributes + assert ATTR_COLOR_TEMP_KELVIN not in state.attributes + assert ATTR_EFFECT not in state.attributes + assert ATTR_HS_COLOR not in state.attributes + assert ATTR_RGB_COLOR not in state.attributes + assert ATTR_RGBW_COLOR not in state.attributes + assert ATTR_RGBWW_COLOR not in state.attributes + assert ATTR_XY_COLOR not in state.attributes diff --git a/tests/components/linear_garage_door/conftest.py b/tests/components/linear_garage_door/conftest.py index 306da23ebf9..4ed7662e5d0 100644 --- a/tests/components/linear_garage_door/conftest.py +++ b/tests/components/linear_garage_door/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Linear Garage Door tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.linear_garage_door import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/linear_garage_door/test_diagnostics.py b/tests/components/linear_garage_door/test_diagnostics.py index 6bf7415bde5..a00feed43ff 100644 --- a/tests/components/linear_garage_door/test_diagnostics.py +++ b/tests/components/linear_garage_door/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -25,4 +26,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/linkplay/__init__.py b/tests/components/linkplay/__init__.py new file mode 100644 index 00000000000..5962f7fdaba --- /dev/null +++ b/tests/components/linkplay/__init__.py @@ -0,0 +1 @@ +"""Tests for the LinkPlay integration.""" diff --git a/tests/components/linkplay/conftest.py b/tests/components/linkplay/conftest.py new file mode 100644 index 00000000000..b3d65422e08 --- /dev/null +++ b/tests/components/linkplay/conftest.py @@ -0,0 +1,40 @@ +"""Test configuration and mocks for LinkPlay component.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from linkplay.bridge import LinkPlayBridge, LinkPlayDevice +import pytest + +HOST = "10.0.0.150" +HOST_REENTRY = "10.0.0.66" +UUID = "FF31F09E-5001-FBDE-0546-2DBFFF31F09E" +NAME = "Smart Zone 1_54B9" + + +@pytest.fixture +def mock_linkplay_factory_bridge() -> Generator[AsyncMock]: + """Mock for linkplay_factory_bridge.""" + + with ( + patch( + "homeassistant.components.linkplay.config_flow.linkplay_factory_bridge" + ) as factory, + ): + bridge = AsyncMock(spec=LinkPlayBridge) + bridge.endpoint = HOST + bridge.device = AsyncMock(spec=LinkPlayDevice) + bridge.device.uuid = UUID + bridge.device.name = NAME + factory.return_value = bridge + yield factory + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.linkplay.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry diff --git a/tests/components/linkplay/test_config_flow.py b/tests/components/linkplay/test_config_flow.py new file mode 100644 index 00000000000..641f09893c2 --- /dev/null +++ b/tests/components/linkplay/test_config_flow.py @@ -0,0 +1,204 @@ +"""Tests for the LinkPlay config flow.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock + +from homeassistant.components.linkplay.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import HOST, HOST_REENTRY, NAME, UUID + +from tests.common import MockConfigEntry + +ZEROCONF_DISCOVERY = ZeroconfServiceInfo( + ip_address=ip_address(HOST), + ip_addresses=[ip_address(HOST)], + hostname=f"{NAME}.local.", + name=f"{NAME}._linkplay._tcp.local.", + port=59152, + type="_linkplay._tcp.local.", + properties={ + "uuid": f"uuid:{UUID}", + "mac": "00:2F:69:01:84:3A", + "security": "https 2.0", + "upnp": "1.0.0", + "bootid": "1f347886-1dd2-11b2-86ab-aa0cd2803583", + }, +) + +ZEROCONF_DISCOVERY_RE_ENTRY = ZeroconfServiceInfo( + ip_address=ip_address(HOST_REENTRY), + ip_addresses=[ip_address(HOST_REENTRY)], + hostname=f"{NAME}.local.", + name=f"{NAME}._linkplay._tcp.local.", + port=59152, + type="_linkplay._tcp.local.", + properties={ + "uuid": f"uuid:{UUID}", + "mac": "00:2F:69:01:84:3A", + "security": "https 2.0", + "upnp": "1.0.0", + "bootid": "1f347886-1dd2-11b2-86ab-aa0cd2803583", + }, +) + + +async def test_user_flow( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user setup config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == NAME + assert result["data"] == { + CONF_HOST: HOST, + } + assert result["result"].unique_id == UUID + + +async def test_user_flow_re_entry( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user setup config flow when an entry with the same unique id already exists.""" + + # Create mock entry which already has the same UUID + entry = MockConfigEntry( + data={CONF_HOST: HOST}, + domain=DOMAIN, + title=NAME, + unique_id=UUID, + ) + entry.add_to_hass(hass) + + # Re-create entry with different host + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST_REENTRY}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_zeroconf_flow( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test Zeroconf flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == NAME + assert result["data"] == { + CONF_HOST: HOST, + } + assert result["result"].unique_id == UUID + + +async def test_zeroconf_flow_re_entry( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test Zeroconf flow when an entry with the same unique id already exists.""" + + # Create mock entry which already has the same UUID + entry = MockConfigEntry( + data={CONF_HOST: HOST}, + domain=DOMAIN, + title=NAME, + unique_id=UUID, + ) + entry.add_to_hass(hass) + + # Re-create entry with different host + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY_RE_ENTRY, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_flow_errors( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test flow when the device cannot be reached.""" + + # Temporarily store bridge in a separate variable and set factory to return None + bridge = mock_linkplay_factory_bridge.return_value + mock_linkplay_factory_bridge.return_value = None + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + + # Make linkplay_factory_bridge return a mock bridge again + mock_linkplay_factory_bridge.return_value = bridge + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == NAME + assert result["data"] == { + CONF_HOST: HOST, + } + assert result["result"].unique_id == UUID diff --git a/tests/components/litejet/test_trigger.py b/tests/components/litejet/test_trigger.py index 216084c26bc..b4374652955 100644 --- a/tests/components/litejet/test_trigger.py +++ b/tests/components/litejet/test_trigger.py @@ -14,7 +14,7 @@ import homeassistant.util.dt as dt_util from . import async_init_integration -from tests.common import async_fire_time_changed_exact, async_mock_service +from tests.common import async_fire_time_changed_exact @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -30,12 +30,6 @@ ENTITY_OTHER_SWITCH = "switch.mock_switch_2" ENTITY_OTHER_SWITCH_NUMBER = 2 -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def simulate_press(hass, mock_litejet, number): """Test to simulate a press.""" _LOGGER.info("*** simulate press of %d", number) @@ -101,7 +95,7 @@ async def setup_automation(hass, trigger): async def test_simple( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -111,12 +105,12 @@ async def test_simple( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 async def test_only_release( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -125,11 +119,11 @@ async def test_only_release( await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_held_more_than_short( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test a too short hold.""" await setup_automation( @@ -144,11 +138,11 @@ async def test_held_more_than_short( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_litejet, timedelta(seconds=1)) await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_held_more_than_long( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test a hold that is long enough.""" await setup_automation( @@ -161,16 +155,16 @@ async def test_held_more_than_long( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=3)) - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_held_less_than_short( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test a hold that is short enough.""" await setup_automation( @@ -184,14 +178,14 @@ async def test_held_less_than_short( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_litejet, timedelta(seconds=1)) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 async def test_held_less_than_long( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test a hold that is too long.""" await setup_automation( @@ -204,15 +198,15 @@ async def test_held_less_than_long( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=3)) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_held_in_range_short( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test an in-range trigger with a too short hold.""" await setup_automation( @@ -228,11 +222,11 @@ async def test_held_in_range_short( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_litejet, timedelta(seconds=0.5)) await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_held_in_range_just_right( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test an in-range trigger with a just right hold.""" await setup_automation( @@ -246,16 +240,16 @@ async def test_held_in_range_just_right( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=2)) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 async def test_held_in_range_long( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test an in-range trigger with a too long hold.""" await setup_automation( @@ -269,15 +263,15 @@ async def test_held_in_range_long( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=4)) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_reload( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test reloading automation.""" await setup_automation( @@ -312,8 +306,8 @@ async def test_reload( await hass.async_block_till_done() await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 1 await simulate_time(hass, mock_litejet, timedelta(seconds=5)) - assert len(calls) == 0 + assert len(service_calls) == 1 await simulate_time(hass, mock_litejet, timedelta(seconds=12.5)) - assert len(calls) == 1 + assert len(service_calls) == 2 diff --git a/tests/components/local_calendar/conftest.py b/tests/components/local_calendar/conftest.py index 6d2c38544a5..8aef73a9d5a 100644 --- a/tests/components/local_calendar/conftest.py +++ b/tests/components/local_calendar/conftest.py @@ -1,6 +1,6 @@ """Fixtures for local calendar.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from http import HTTPStatus from pathlib import Path from typing import Any @@ -9,7 +9,6 @@ import urllib from aiohttp import ClientWebSocketResponse import pytest -from typing_extensions import Generator from homeassistant.components.local_calendar import LocalCalendarStore from homeassistant.components.local_calendar.const import CONF_CALENDAR_NAME, DOMAIN diff --git a/tests/components/local_calendar/test_diagnostics.py b/tests/components/local_calendar/test_diagnostics.py index ed12391f8a9..30c857dad98 100644 --- a/tests/components/local_calendar/test_diagnostics.py +++ b/tests/components/local_calendar/test_diagnostics.py @@ -7,7 +7,6 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.auth.models import Credentials from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY, Client @@ -41,12 +40,6 @@ def _get_test_client_generator( return auth_client -@pytest.fixture(autouse=True) -async def setup_diag(hass): - """Set up diagnostics platform.""" - assert await async_setup_component(hass, "diagnostics", {}) - - @freeze_time("2023-03-13 12:05:00-07:00") @pytest.mark.usefixtures("socket_enabled") async def test_empty_calendar( diff --git a/tests/components/local_ip/test_init.py b/tests/components/local_ip/test_init.py index 51e0628a417..7f411ea9cd7 100644 --- a/tests/components/local_ip/test_init.py +++ b/tests/components/local_ip/test_init.py @@ -2,7 +2,7 @@ from __future__ import annotations -from homeassistant.components.local_ip import DOMAIN +from homeassistant.components.local_ip.const import DOMAIN from homeassistant.components.network import MDNS_TARGET_IP, async_get_source_ip from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant diff --git a/tests/components/local_todo/conftest.py b/tests/components/local_todo/conftest.py index 67ef76172b7..ab73dabb474 100644 --- a/tests/components/local_todo/conftest.py +++ b/tests/components/local_todo/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the local_todo tests.""" +from collections.abc import Generator from pathlib import Path from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.local_todo import LocalTodoListStore from homeassistant.components.local_todo.const import ( diff --git a/tests/components/local_todo/test_todo.py b/tests/components/local_todo/test_todo.py index e54ee925437..253adebd757 100644 --- a/tests/components/local_todo/test_todo.py +++ b/tests/components/local_todo/test_todo.py @@ -7,7 +7,17 @@ from typing import Any import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from .conftest import TEST_ENTITY @@ -76,17 +86,17 @@ EXPECTED_ADD_ITEM = { ("item_data", "expected_item_data"), [ ({}, EXPECTED_ADD_ITEM), - ({"due_date": "2023-11-17"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17"}), + ({ATTR_DUE_DATE: "2023-11-17"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17"}), ( - {"due_datetime": "2023-11-17T11:30:00+00:00"}, + {ATTR_DUE_DATETIME: "2023-11-17T11:30:00+00:00"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17T05:30:00-06:00"}, ), ( - {"description": "Additional detail"}, + {ATTR_DESCRIPTION: "Additional detail"}, {**EXPECTED_ADD_ITEM, "description": "Additional detail"}, ), - ({"description": ""}, {**EXPECTED_ADD_ITEM, "description": ""}), - ({"description": None}, EXPECTED_ADD_ITEM), + ({ATTR_DESCRIPTION: ""}, {**EXPECTED_ADD_ITEM, "description": ""}), + ({ATTR_DESCRIPTION: None}, EXPECTED_ADD_ITEM), ], ) async def test_add_item( @@ -105,9 +115,9 @@ async def test_add_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "replace batteries", **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "replace batteries", **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -127,12 +137,12 @@ async def test_add_item( ("item_data", "expected_item_data"), [ ({}, {}), - ({"due_date": "2023-11-17"}, {"due": "2023-11-17"}), + ({ATTR_DUE_DATE: "2023-11-17"}, {"due": "2023-11-17"}), ( {"due_datetime": "2023-11-17T11:30:00+00:00"}, {"due": "2023-11-17T05:30:00-06:00"}, ), - ({"description": "Additional detail"}, {"description": "Additional detail"}), + ({ATTR_DESCRIPTION: "Additional detail"}, {"description": "Additional detail"}), ], ) async def test_remove_item( @@ -145,9 +155,9 @@ async def test_remove_item( """Test removing a todo item.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "replace batteries", **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "replace batteries", **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -165,9 +175,9 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": [items[0]["uid"]]}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: [items[0]["uid"]]}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -188,9 +198,9 @@ async def test_bulk_remove( for i in range(5): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": f"soda #{i}"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: f"soda #{i}"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -204,9 +214,9 @@ async def test_bulk_remove( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": uids}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uids}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -227,19 +237,23 @@ EXPECTED_UPDATE_ITEM = { @pytest.mark.parametrize( ("item_data", "expected_item_data", "expected_state"), [ - ({"status": "completed"}, {**EXPECTED_UPDATE_ITEM, "status": "completed"}, "0"), ( - {"due_date": "2023-11-17"}, + {ATTR_STATUS: "completed"}, + {**EXPECTED_UPDATE_ITEM, "status": "completed"}, + "0", + ), + ( + {ATTR_DUE_DATE: "2023-11-17"}, {**EXPECTED_UPDATE_ITEM, "due": "2023-11-17"}, "1", ), ( - {"due_datetime": "2023-11-17T11:30:00+00:00"}, + {ATTR_DUE_DATETIME: "2023-11-17T11:30:00+00:00"}, {**EXPECTED_UPDATE_ITEM, "due": "2023-11-17T05:30:00-06:00"}, "1", ), ( - {"description": "Additional detail"}, + {ATTR_DESCRIPTION: "Additional detail"}, {**EXPECTED_UPDATE_ITEM, "description": "Additional detail"}, "1", ), @@ -258,9 +272,9 @@ async def test_update_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -279,9 +293,9 @@ async def test_update_item( # Update item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": item["uid"], **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: item["uid"], **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -303,7 +317,7 @@ async def test_update_item( ("item_data", "expected_item_data"), [ ( - {"status": "completed"}, + {ATTR_STATUS: "completed"}, { "summary": "soda", "status": "completed", @@ -312,7 +326,7 @@ async def test_update_item( }, ), ( - {"due_date": "2024-01-02"}, + {ATTR_DUE_DATE: "2024-01-02"}, { "summary": "soda", "status": "needs_action", @@ -321,7 +335,7 @@ async def test_update_item( }, ), ( - {"due_date": None}, + {ATTR_DUE_DATE: None}, { "summary": "soda", "status": "needs_action", @@ -329,7 +343,7 @@ async def test_update_item( }, ), ( - {"due_datetime": "2024-01-01 10:30:00"}, + {ATTR_DUE_DATETIME: "2024-01-01 10:30:00"}, { "summary": "soda", "status": "needs_action", @@ -338,7 +352,7 @@ async def test_update_item( }, ), ( - {"due_datetime": None}, + {ATTR_DUE_DATETIME: None}, { "summary": "soda", "status": "needs_action", @@ -346,7 +360,7 @@ async def test_update_item( }, ), ( - {"description": "updated description"}, + {ATTR_DESCRIPTION: "updated description"}, { "summary": "soda", "status": "needs_action", @@ -355,7 +369,7 @@ async def test_update_item( }, ), ( - {"description": None}, + {ATTR_DESCRIPTION: None}, {"summary": "soda", "status": "needs_action", "due": "2024-01-01"}, ), ], @@ -381,9 +395,13 @@ async def test_update_existing_field( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda", "description": "Additional detail", "due_date": "2024-01-01"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + { + ATTR_ITEM: "soda", + ATTR_DESCRIPTION: "Additional detail", + ATTR_DUE_DATE: "2024-01-01", + }, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -398,9 +416,9 @@ async def test_update_existing_field( # Perform update await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": item["uid"], **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: item["uid"], **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -424,9 +442,9 @@ async def test_rename( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -444,9 +462,9 @@ async def test_rename( # Rename item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": item["uid"], "rename": "water"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: item["uid"], ATTR_RENAME: "water"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -501,9 +519,9 @@ async def test_move_item( for i in range(1, 5): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": f"item {i}"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: f"item {i}"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -559,9 +577,9 @@ async def test_move_item_previous_unknown( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "item 1"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "item 1"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) items = await ws_get_items() @@ -732,9 +750,9 @@ async def test_susbcribe( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -765,9 +783,9 @@ async def test_susbcribe( # Rename item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": uid, "rename": "milk"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_RENAME: "milk"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/locative/test_init.py b/tests/components/locative/test_init.py index 305497ebbd6..8fd239ee398 100644 --- a/tests/components/locative/test_init.py +++ b/tests/components/locative/test_init.py @@ -38,7 +38,7 @@ async def locative_client( @pytest.fixture -async def webhook_id(hass, locative_client): +async def webhook_id(hass: HomeAssistant, locative_client: TestClient) -> str: """Initialize the Geofency component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -56,7 +56,7 @@ async def webhook_id(hass, locative_client): return result["result"].data["webhook_id"] -async def test_missing_data(locative_client, webhook_id) -> None: +async def test_missing_data(locative_client: TestClient, webhook_id: str) -> None: """Test missing data.""" url = f"/api/webhook/{webhook_id}" @@ -116,7 +116,9 @@ async def test_missing_data(locative_client, webhook_id) -> None: assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY -async def test_enter_and_exit(hass: HomeAssistant, locative_client, webhook_id) -> None: +async def test_enter_and_exit( + hass: HomeAssistant, locative_client: TestClient, webhook_id: str +) -> None: """Test when there is a known zone.""" url = f"/api/webhook/{webhook_id}" @@ -186,7 +188,7 @@ async def test_enter_and_exit(hass: HomeAssistant, locative_client, webhook_id) async def test_exit_after_enter( - hass: HomeAssistant, locative_client, webhook_id + hass: HomeAssistant, locative_client: TestClient, webhook_id: str ) -> None: """Test when an exit message comes after an enter message.""" url = f"/api/webhook/{webhook_id}" @@ -229,7 +231,9 @@ async def test_exit_after_enter( assert state.state == "work" -async def test_exit_first(hass: HomeAssistant, locative_client, webhook_id) -> None: +async def test_exit_first( + hass: HomeAssistant, locative_client: TestClient, webhook_id: str +) -> None: """Test when an exit message is sent first on a new device.""" url = f"/api/webhook/{webhook_id}" @@ -250,7 +254,9 @@ async def test_exit_first(hass: HomeAssistant, locative_client, webhook_id) -> N assert state.state == "not_home" -async def test_two_devices(hass: HomeAssistant, locative_client, webhook_id) -> None: +async def test_two_devices( + hass: HomeAssistant, locative_client: TestClient, webhook_id: str +) -> None: """Test updating two different devices.""" url = f"/api/webhook/{webhook_id}" @@ -294,7 +300,7 @@ async def test_two_devices(hass: HomeAssistant, locative_client, webhook_id) -> reason="The device_tracker component does not support unloading yet." ) async def test_load_unload_entry( - hass: HomeAssistant, locative_client, webhook_id + hass: HomeAssistant, locative_client: TestClient, webhook_id: str ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" diff --git a/tests/components/lock/conftest.py b/tests/components/lock/conftest.py index f1715687339..fd569b162bc 100644 --- a/tests/components/lock/conftest.py +++ b/tests/components/lock/conftest.py @@ -1,10 +1,10 @@ """Fixtures for the lock entity platform tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, diff --git a/tests/components/lock/test_device_condition.py b/tests/components/lock/test_device_condition.py index 97afe9fb759..74910e1909f 100644 --- a/tests/components/lock/test_device_condition.py +++ b/tests/components/lock/test_device_condition.py @@ -21,11 +21,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -33,12 +29,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -139,7 +129,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -291,52 +281,52 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_locked - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_locked - event - test_event1" hass.states.async_set(entry.entity_id, STATE_UNLOCKED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_unlocked - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_unlocked - event - test_event2" hass.states.async_set(entry.entity_id, STATE_UNLOCKING) hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_unlocking - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_unlocking - event - test_event3" hass.states.async_set(entry.entity_id, STATE_LOCKING) hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "is_locking - event - test_event4" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "is_locking - event - test_event4" hass.states.async_set(entry.entity_id, STATE_JAMMED) hass.bus.async_fire("test_event5") await hass.async_block_till_done() - assert len(calls) == 5 - assert calls[4].data["some"] == "is_jammed - event - test_event5" + assert len(service_calls) == 5 + assert service_calls[4].data["some"] == "is_jammed - event - test_event5" hass.states.async_set(entry.entity_id, STATE_OPENING) hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 6 - assert calls[5].data["some"] == "is_opening - event - test_event6" + assert len(service_calls) == 6 + assert service_calls[5].data["some"] == "is_opening - event - test_event6" hass.states.async_set(entry.entity_id, STATE_OPEN) hass.bus.async_fire("test_event7") await hass.async_block_till_done() - assert len(calls) == 7 - assert calls[6].data["some"] == "is_open - event - test_event7" + assert len(service_calls) == 7 + assert service_calls[6].data["some"] == "is_open - event - test_event7" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -380,5 +370,5 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_locked - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_locked - event - test_event1" diff --git a/tests/components/lock/test_device_trigger.py b/tests/components/lock/test_device_trigger.py index 3cbfbb1a04c..f64334fa29b 100644 --- a/tests/components/lock/test_device_trigger.py +++ b/tests/components/lock/test_device_trigger.py @@ -29,7 +29,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -38,12 +37,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -212,7 +205,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -296,27 +289,27 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_LOCKED) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"locked - device - {entry.entity_id} - unlocked - locked - None" ) # Fake that the entity is turning off. hass.states.async_set(entry.entity_id, STATE_UNLOCKED) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"unlocked - device - {entry.entity_id} - locked - unlocked - None" ) # Fake that the entity is opens. hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert ( - calls[2].data["some"] + service_calls[2].data["some"] == f"open - device - {entry.entity_id} - unlocked - open - None" ) @@ -325,7 +318,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -371,9 +364,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_LOCKED) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"locked - device - {entry.entity_id} - unlocked - locked - None" ) @@ -382,7 +375,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -516,64 +509,64 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_LOCKED) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - unlocked - locked - 0:00:05" ) hass.states.async_set(entry.entity_id, STATE_UNLOCKING) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=16)) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 await hass.async_block_till_done() assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"turn_on device - {entry.entity_id} - locked - unlocking - 0:00:05" ) hass.states.async_set(entry.entity_id, STATE_JAMMED) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=21)) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 await hass.async_block_till_done() assert ( - calls[2].data["some"] + service_calls[2].data["some"] == f"turn_off device - {entry.entity_id} - unlocking - jammed - 0:00:05" ) hass.states.async_set(entry.entity_id, STATE_LOCKING) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=27)) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 await hass.async_block_till_done() assert ( - calls[3].data["some"] + service_calls[3].data["some"] == f"turn_on device - {entry.entity_id} - jammed - locking - 0:00:05" ) hass.states.async_set(entry.entity_id, STATE_OPENING) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=27)) await hass.async_block_till_done() - assert len(calls) == 5 + assert len(service_calls) == 5 await hass.async_block_till_done() assert ( - calls[4].data["some"] + service_calls[4].data["some"] == f"turn_on device - {entry.entity_id} - locking - opening - 0:00:05" ) diff --git a/tests/components/logbook/common.py b/tests/components/logbook/common.py index 67f12955581..afa8b7fcde5 100644 --- a/tests/components/logbook/common.py +++ b/tests/components/logbook/common.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.components import logbook from homeassistant.components.logbook import processor -from homeassistant.components.logbook.models import LogbookConfig +from homeassistant.components.logbook.models import EventAsRow, LogbookConfig from homeassistant.components.recorder.models import ( process_timestamp_to_utc_isoformat, ulid_to_bytes_or_none, @@ -18,6 +18,8 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.json import JSONEncoder import homeassistant.util.dt as dt_util +IDX_TO_NAME = dict(enumerate(EventAsRow._fields)) + class MockRow: """Minimal row mock.""" @@ -48,6 +50,10 @@ class MockRow: self.attributes = None self.context_only = False + def __getitem__(self, idx: int) -> Any: + """Get item.""" + return getattr(self, IDX_TO_NAME[idx]) + @property def time_fired_minute(self): """Minute the event was fired.""" @@ -73,7 +79,7 @@ def mock_humanify(hass_, rows): event_cache, entity_name_cache, include_entity_name=True, - format_time=processor._row_time_fired_isoformat, + timestamp=False, ) context_augmenter = processor.ContextAugmenter(logbook_run) return list( diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 3534192a43e..9dc96410166 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -1,11 +1,9 @@ """The tests for the logbook component.""" import asyncio -import collections from collections.abc import Callable from datetime import datetime, timedelta from http import HTTPStatus -import json from unittest.mock import Mock from freezegun import freeze_time @@ -15,7 +13,7 @@ import voluptuous as vol from homeassistant.components import logbook, recorder from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED -from homeassistant.components.logbook.models import LazyEventPartialState +from homeassistant.components.logbook.models import EventAsRow, LazyEventPartialState from homeassistant.components.logbook.processor import EventProcessor from homeassistant.components.logbook.queries.common import PSEUDO_EVENT_STATE_CHANGED from homeassistant.components.recorder import Recorder @@ -44,7 +42,6 @@ import homeassistant.core as ha from homeassistant.core import Event, HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS -from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -68,12 +65,12 @@ async def hass_(recorder_mock: Recorder, hass: HomeAssistant) -> HomeAssistant: @pytest.fixture -async def set_utc(hass): +async def set_utc(hass: HomeAssistant) -> None: """Set timezone to UTC.""" await hass.config.async_set_time_zone("UTC") -async def test_service_call_create_logbook_entry(hass_) -> None: +async def test_service_call_create_logbook_entry(hass_: HomeAssistant) -> None: """Test if service call create log book entry.""" calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY) @@ -126,8 +123,9 @@ async def test_service_call_create_logbook_entry(hass_) -> None: assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook" +@pytest.mark.usefixtures("recorder_mock") async def test_service_call_create_logbook_entry_invalid_entity_id( - recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test if service call create log book entry with an invalid entity id.""" await async_setup_component(hass, "logbook", {}) @@ -156,7 +154,9 @@ async def test_service_call_create_logbook_entry_invalid_entity_id( assert events[0][logbook.ATTR_MESSAGE] == "is triggered" -async def test_service_call_create_log_book_entry_no_message(hass_) -> None: +async def test_service_call_create_log_book_entry_no_message( + hass_: HomeAssistant, +) -> None: """Test if service call create log book entry without message.""" calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY) @@ -172,7 +172,7 @@ async def test_service_call_create_log_book_entry_no_message(hass_) -> None: async def test_filter_sensor( - hass_: ha.HomeAssistant, hass_client: ClientSessionGenerator + hass_: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test numeric sensors are filtered.""" @@ -220,7 +220,7 @@ async def test_filter_sensor( _assert_entry(entries[2], name="ble", entity_id=entity_id4, state="10") -async def test_home_assistant_start_stop_not_grouped(hass_) -> None: +async def test_home_assistant_start_stop_not_grouped(hass_: HomeAssistant) -> None: """Test if HA start and stop events are no longer grouped.""" await async_setup_component(hass_, "homeassistant", {}) await hass_.async_block_till_done() @@ -237,7 +237,7 @@ async def test_home_assistant_start_stop_not_grouped(hass_) -> None: assert_entry(entries[1], name="Home Assistant", message="started", domain=ha.DOMAIN) -async def test_home_assistant_start(hass_) -> None: +async def test_home_assistant_start(hass_: HomeAssistant) -> None: """Test if HA start is not filtered or converted into a restart.""" await async_setup_component(hass_, "homeassistant", {}) await hass_.async_block_till_done() @@ -257,7 +257,7 @@ async def test_home_assistant_start(hass_) -> None: assert_entry(entries[1], pointA, "bla", entity_id=entity_id) -def test_process_custom_logbook_entries(hass_) -> None: +def test_process_custom_logbook_entries(hass_: HomeAssistant) -> None: """Test if custom log book entries get added as an entry.""" name = "Nice name" message = "has a custom entry" @@ -324,55 +324,27 @@ def create_state_changed_event_from_old_new( entity_id, event_time_fired, old_state, new_state ): """Create a state changed event from a old and new state.""" - attributes = {} - if new_state is not None: - attributes = new_state.get("attributes") - attributes_json = json.dumps(attributes, cls=JSONEncoder) - row = collections.namedtuple( - "Row", - [ - "event_type", - "event_data", - "time_fired", - "time_fired_ts", - "context_id_bin", - "context_user_id_bin", - "context_parent_id_bin", - "state", - "entity_id", - "domain", - "attributes", - "state_id", - "old_state_id", - "shared_attrs", - "shared_data", - "context_only", - ], + row = EventAsRow( + row_id=1, + event_type=PSEUDO_EVENT_STATE_CHANGED, + event_data="{}", + time_fired_ts=dt_util.utc_to_timestamp(event_time_fired), + context_id_bin=None, + context_user_id_bin=None, + context_parent_id_bin=None, + state=new_state and new_state.get("state"), + entity_id=entity_id, + icon=None, + context_only=False, + data=None, + context=None, ) - - row.event_type = PSEUDO_EVENT_STATE_CHANGED - row.event_data = "{}" - row.shared_data = "{}" - row.attributes = attributes_json - row.shared_attrs = attributes_json - row.time_fired = event_time_fired - row.time_fired_ts = dt_util.utc_to_timestamp(event_time_fired) - row.state = new_state and new_state.get("state") - row.entity_id = entity_id - row.domain = entity_id and ha.split_entity_id(entity_id)[0] - row.context_only = False - row.context_id_bin = None - row.friendly_name = None - row.icon = None - row.context_user_id_bin = None - row.context_parent_id_bin = None - row.old_state_id = old_state and 1 - row.state_id = new_state and 1 return LazyEventPartialState(row, {}) +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view.""" await async_setup_component(hass, "logbook", {}) @@ -382,8 +354,9 @@ async def test_logbook_view( assert response.status == HTTPStatus.OK +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_invalid_start_date_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with an invalid date time.""" await async_setup_component(hass, "logbook", {}) @@ -393,8 +366,9 @@ async def test_logbook_view_invalid_start_date_time( assert response.status == HTTPStatus.BAD_REQUEST +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_invalid_end_date_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view.""" await async_setup_component(hass, "logbook", {}) @@ -406,11 +380,10 @@ async def test_logbook_view_invalid_end_date_time( assert response.status == HTTPStatus.BAD_REQUEST +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_logbook_view_period_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test the logbook view with period and entity.""" await async_setup_component(hass, "logbook", {}) @@ -492,8 +465,9 @@ async def test_logbook_view_period_entity( assert response_json[0]["entity_id"] == entity_id_test +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_describe_event( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test teaching logbook about a new event.""" @@ -540,8 +514,9 @@ async def test_logbook_describe_event( assert event["domain"] == "test_domain" +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_described_event( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test exclusions of events that are described by another integration.""" name = "My Automation Rule" @@ -611,8 +586,9 @@ async def test_exclude_described_event( assert event["entity_id"] == "automation.included_rule" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_end_time_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity.""" await async_setup_component(hass, "logbook", {}) @@ -671,8 +647,9 @@ async def test_logbook_view_end_time_entity( assert response_json[0]["entity_id"] == entity_id_test +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_filter_with_automations( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -757,8 +734,9 @@ async def test_logbook_entity_filter_with_automations( assert json_dict[0]["entity_id"] == entity_id_second +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_no_longer_in_state_machine( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with an entity that hass been removed from the state machine.""" await async_setup_component(hass, "logbook", {}) @@ -796,11 +774,10 @@ async def test_logbook_entity_no_longer_in_state_machine( assert json_dict[0]["name"] == "area 001" +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_filter_continuous_sensor_values( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test remove continuous sensor events from logbook.""" await async_setup_component(hass, "logbook", {}) @@ -840,11 +817,10 @@ async def test_filter_continuous_sensor_values( assert response_json[1]["entity_id"] == entity_id_third +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_new_entities( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test if events are excluded on first update.""" await asyncio.gather( @@ -882,11 +858,10 @@ async def test_exclude_new_entities( assert response_json[1]["message"] == "started" +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_removed_entities( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test if events are excluded on last update.""" await asyncio.gather( @@ -931,11 +906,10 @@ async def test_exclude_removed_entities( assert response_json[2]["entity_id"] == entity_id2 +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_attribute_changes( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test if events of attribute changes are filtered.""" await asyncio.gather( @@ -976,8 +950,9 @@ async def test_exclude_attribute_changes( assert response_json[2]["entity_id"] == "light.kitchen" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_context_id( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -1129,8 +1104,9 @@ async def test_logbook_entity_context_id( assert json_dict[7]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_context_id_automation_script_started_manually( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook populates context_ids for scripts and automations started manually.""" await asyncio.gather( @@ -1221,8 +1197,9 @@ async def test_logbook_context_id_automation_script_started_manually( assert json_dict[4]["context_domain"] == "script" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_context_parent_id( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view links events via context parent_id.""" await asyncio.gather( @@ -1403,8 +1380,9 @@ async def test_logbook_entity_context_parent_id( assert json_dict[8]["context_user_id"] == "485cacf93ef84d25a99ced3126b921d2" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_context_from_template( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -1493,8 +1471,9 @@ async def test_logbook_context_from_template( assert json_dict[5]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a single entity and .""" await async_setup_component(hass, "logbook", {}) @@ -1564,8 +1543,9 @@ async def test_logbook_( assert json_dict[1]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_many_entities_multiple_calls( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a many entities called multiple times.""" await async_setup_component(hass, "logbook", {}) @@ -1636,8 +1616,9 @@ async def test_logbook_many_entities_multiple_calls( assert len(json_dict) == 0 +@pytest.mark.usefixtures("recorder_mock") async def test_custom_log_entry_discoverable_via_( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if a custom log entry is later discoverable via .""" await async_setup_component(hass, "logbook", {}) @@ -1673,8 +1654,9 @@ async def test_custom_log_entry_discoverable_via_( assert json_dict[0]["entity_id"] == "switch.test_switch" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_multiple_entities( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a multiple entities.""" await async_setup_component(hass, "logbook", {}) @@ -1799,8 +1781,9 @@ async def test_logbook_multiple_entities( assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_invalid_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with requesting an invalid entity.""" await async_setup_component(hass, "logbook", {}) @@ -1819,8 +1802,9 @@ async def test_logbook_invalid_entity( assert response.status == HTTPStatus.INTERNAL_SERVER_ERROR +@pytest.mark.usefixtures("recorder_mock") async def test_icon_and_state( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test to ensure state and custom icons are returned.""" await asyncio.gather( @@ -1864,8 +1848,9 @@ async def test_icon_and_state( assert response_json[2]["state"] == STATE_OFF +@pytest.mark.usefixtures("recorder_mock") async def test_fire_logbook_entries( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test many logbook entry calls.""" await async_setup_component(hass, "logbook", {}) @@ -1902,8 +1887,9 @@ async def test_fire_logbook_entries( assert len(response_json) == 11 +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_domain( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain is excluded in config.""" entity_id = "switch.bla" @@ -1938,8 +1924,9 @@ async def test_exclude_events_domain( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_domain_glob( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain or glob is excluded in config.""" entity_id = "switch.bla" @@ -1983,8 +1970,9 @@ async def test_exclude_events_domain_glob( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_include_events_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if entity is included in config.""" entity_id = "sensor.bla" @@ -2025,8 +2013,9 @@ async def test_include_events_entity( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if entity is excluded in config.""" entity_id = "sensor.bla" @@ -2061,8 +2050,9 @@ async def test_exclude_events_entity( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_include_events_domain( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain is included in config.""" assert await async_setup_component(hass, "alexa", {}) @@ -2105,8 +2095,9 @@ async def test_include_events_domain( _assert_entry(entries[2], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_include_events_domain_glob( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain or glob is included in config.""" assert await async_setup_component(hass, "alexa", {}) @@ -2164,8 +2155,9 @@ async def test_include_events_domain_glob( _assert_entry(entries[3], name="included", entity_id=entity_id3) +@pytest.mark.usefixtures("recorder_mock") async def test_include_exclude_events_no_globs( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" @@ -2222,8 +2214,9 @@ async def test_include_exclude_events_no_globs( _assert_entry(entries[5], name="keep", entity_id=entity_id4, state="10") +@pytest.mark.usefixtures("recorder_mock") async def test_include_exclude_events_with_glob_filters( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" @@ -2288,8 +2281,9 @@ async def test_include_exclude_events_with_glob_filters( _assert_entry(entries[6], name="included", entity_id=entity_id5, state="30") +@pytest.mark.usefixtures("recorder_mock") async def test_empty_config( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test we can handle an empty entity filter.""" entity_id = "sensor.blu" @@ -2322,8 +2316,9 @@ async def test_empty_config( _assert_entry(entries[1], name="blu", entity_id=entity_id) +@pytest.mark.usefixtures("recorder_mock") async def test_context_filter( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test we can filter by context.""" assert await async_setup_component(hass, "logbook", {}) @@ -2399,8 +2394,9 @@ def _assert_entry( assert state == entry["state"] +@pytest.mark.usefixtures("recorder_mock") async def test_get_events( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test logbook get_events.""" now = dt_util.utcnow() @@ -2519,8 +2515,9 @@ async def test_get_events( assert isinstance(results[0]["when"], float) +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_future_start_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events with a future start time.""" await async_setup_component(hass, "logbook", {}) @@ -2544,8 +2541,9 @@ async def test_get_events_future_start_time( assert len(results) == 0 +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_bad_start_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events bad start time.""" await async_setup_component(hass, "logbook", {}) @@ -2564,8 +2562,9 @@ async def test_get_events_bad_start_time( assert response["error"]["code"] == "invalid_start_time" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_bad_end_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events bad end time.""" now = dt_util.utcnow() @@ -2586,8 +2585,9 @@ async def test_get_events_bad_end_time( assert response["error"]["code"] == "invalid_end_time" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_invalid_filters( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events invalid filters.""" await async_setup_component(hass, "logbook", {}) @@ -2616,8 +2616,8 @@ async def test_get_events_invalid_filters( assert response["error"]["code"] == "invalid_format" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_with_device_ids( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, @@ -2757,8 +2757,9 @@ async def test_get_events_with_device_ids( assert isinstance(results[3]["when"], float) +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_select_entities_context_id( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -2892,8 +2893,9 @@ async def test_logbook_select_entities_context_id( assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_with_context_state( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test logbook get_events with a context state.""" now = dt_util.utcnow() @@ -2957,9 +2959,8 @@ async def test_get_events_with_context_state( assert "context_event_type" not in results[3] -async def test_logbook_with_empty_config( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_logbook_with_empty_config(hass: HomeAssistant) -> None: """Test we handle a empty configuration.""" assert await async_setup_component( hass, @@ -2972,9 +2973,8 @@ async def test_logbook_with_empty_config( await hass.async_block_till_done() -async def test_logbook_with_non_iterable_entity_filter( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_logbook_with_non_iterable_entity_filter(hass: HomeAssistant) -> None: """Test we handle a non-iterable entity filter.""" assert await async_setup_component( hass, diff --git a/tests/components/logbook/test_models.py b/tests/components/logbook/test_models.py index 7021711014f..cfdd7efc727 100644 --- a/tests/components/logbook/test_models.py +++ b/tests/components/logbook/test_models.py @@ -2,20 +2,26 @@ from unittest.mock import Mock -from homeassistant.components.logbook.models import LazyEventPartialState +from homeassistant.components.logbook.models import EventAsRow, LazyEventPartialState def test_lazy_event_partial_state_context() -> None: """Test we can extract context from a lazy event partial state.""" state = LazyEventPartialState( - Mock( + EventAsRow( + row_id=1, + event_type="event_type", + event_data={}, + time_fired_ts=1, context_id_bin=b"1234123412341234", context_user_id_bin=b"1234123412341234", context_parent_id_bin=b"4444444444444444", - event_data={}, - event_type="event_type", - entity_id="entity_id", state="state", + entity_id="entity_id", + icon="icon", + context_only=False, + data={}, + context=Mock(), ), {}, ) diff --git a/tests/components/logbook/test_websocket_api.py b/tests/components/logbook/test_websocket_api.py index ac653737614..e5649564f94 100644 --- a/tests/components/logbook/test_websocket_api.py +++ b/tests/components/logbook/test_websocket_api.py @@ -3,6 +3,7 @@ import asyncio from collections.abc import Callable from datetime import timedelta +from typing import Any from unittest.mock import ANY, patch from freezegun import freeze_time @@ -31,9 +32,10 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, ) -from homeassistant.core import Event, HomeAssistant, State +from homeassistant.core import Event, HomeAssistant, State, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS +from homeassistant.helpers.event import async_track_state_change_event from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -46,12 +48,6 @@ from tests.components.recorder.common import ( from tests.typing import RecorderInstanceGenerator, WebSocketGenerator -@pytest.fixture -async def set_utc(hass): - """Set timezone to UTC.""" - await hass.config.async_set_time_zone("UTC") - - def listeners_without_writes(listeners: dict[str, int]) -> dict[str, int]: """Return listeners without final write listeners since we are not testing for these.""" return { @@ -2965,3 +2961,79 @@ async def test_subscribe_all_entities_are_continuous_with_device( assert listeners_without_writes( hass.bus.async_listeners() ) == listeners_without_writes(init_listeners) + + +@pytest.mark.parametrize("params", [{"entity_ids": ["binary_sensor.is_light"]}, {}]) +async def test_live_stream_with_changed_state_change( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + params: dict[str, Any], +) -> None: + """Test the live logbook stream with chained events.""" + config = {recorder.CONF_COMMIT_INTERVAL: 0.5} + await async_setup_recorder_instance(hass, config) + now = dt_util.utcnow() + await asyncio.gather( + *[ + async_setup_component(hass, comp, {}) + for comp in ("homeassistant", "logbook") + ] + ) + + hass.states.async_set("binary_sensor.is_light", "ignored") + hass.states.async_set("binary_sensor.is_light", "init") + await async_wait_recording_done(hass) + + @callback + def auto_off_listener(event): + hass.states.async_set("binary_sensor.is_light", STATE_OFF) + + async_track_state_change_event(hass, ["binary_sensor.is_light"], auto_off_listener) + + websocket_client = await hass_ws_client() + init_listeners = hass.bus.async_listeners() + await websocket_client.send_json( + { + "id": 7, + "type": "logbook/event_stream", + "start_time": now.isoformat(), + **params, + } + ) + + msg = await asyncio.wait_for(websocket_client.receive_json(), 2) + assert msg["id"] == 7 + assert msg["type"] == TYPE_RESULT + assert msg["success"] + + await hass.async_block_till_done() + hass.states.async_set("binary_sensor.is_light", STATE_ON) + + recieved_rows = [] + while len(recieved_rows) < 3: + msg = await asyncio.wait_for(websocket_client.receive_json(), 2.5) + assert msg["id"] == 7 + assert msg["type"] == "event" + recieved_rows.extend(msg["event"]["events"]) + + # Make sure we get rows back in order + assert recieved_rows == [ + {"entity_id": "binary_sensor.is_light", "state": "init", "when": ANY}, + {"entity_id": "binary_sensor.is_light", "state": "on", "when": ANY}, + {"entity_id": "binary_sensor.is_light", "state": "off", "when": ANY}, + ] + + await websocket_client.send_json( + {"id": 8, "type": "unsubscribe_events", "subscription": 7} + ) + msg = await asyncio.wait_for(websocket_client.receive_json(), 2) + + assert msg["id"] == 8 + assert msg["type"] == TYPE_RESULT + assert msg["success"] + + # Check our listener got unsubscribed + assert listeners_without_writes( + hass.bus.async_listeners() + ) == listeners_without_writes(init_listeners) diff --git a/tests/components/logi_circle/__init__.py b/tests/components/logi_circle/__init__.py deleted file mode 100644 index d2e2fbb8fdb..00000000000 --- a/tests/components/logi_circle/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Logi Circle component.""" diff --git a/tests/components/logi_circle/test_config_flow.py b/tests/components/logi_circle/test_config_flow.py deleted file mode 100644 index 2525354598d..00000000000 --- a/tests/components/logi_circle/test_config_flow.py +++ /dev/null @@ -1,219 +0,0 @@ -"""Tests for Logi Circle config flow.""" - -import asyncio -from http import HTTPStatus -from unittest.mock import AsyncMock, Mock, patch - -import pytest - -from homeassistant import config_entries -from homeassistant.components.http import KEY_HASS -from homeassistant.components.logi_circle import config_flow -from homeassistant.components.logi_circle.config_flow import ( - DOMAIN, - AuthorizationFailed, - LogiCircleAuthCallbackView, -) -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import AbortFlow, FlowResultType -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry - - -class MockRequest: - """Mock request passed to HomeAssistantView.""" - - def __init__(self, hass, query): - """Init request object.""" - self.app = {KEY_HASS: hass} - self.query = query - - -def init_config_flow(hass): - """Init a configuration flow.""" - config_flow.register_flow_implementation( - hass, - DOMAIN, - client_id="id", - client_secret="secret", - api_key="123", - redirect_uri="http://example.com", - sensors=None, - ) - flow = config_flow.LogiCircleFlowHandler() - flow._get_authorization_url = Mock(return_value="http://example.com") - flow.hass = hass - return flow - - -@pytest.fixture -def mock_logi_circle(): - """Mock logi_circle.""" - with patch( - "homeassistant.components.logi_circle.config_flow.LogiCircle" - ) as logi_circle: - future = asyncio.Future() - future.set_result({"accountId": "testId"}) - LogiCircle = logi_circle() - LogiCircle.authorize = AsyncMock(return_value=True) - LogiCircle.close = AsyncMock(return_value=True) - LogiCircle.account = future - LogiCircle.authorize_url = "http://authorize.url" - yield LogiCircle - - -async def test_step_import(hass: HomeAssistant, mock_logi_circle) -> None: - """Test that we trigger import when configuring with client.""" - flow = init_config_flow(hass) - - result = await flow.async_step_import() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - -async def test_full_flow_implementation(hass: HomeAssistant, mock_logi_circle) -> None: - """Test registering an implementation and finishing flow works.""" - config_flow.register_flow_implementation( - hass, - "test-other", - client_id=None, - client_secret=None, - api_key=None, - redirect_uri=None, - sensors=None, - ) - flow = init_config_flow(hass) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await flow.async_step_user({"flow_impl": "test-other"}) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - assert result["description_placeholders"] == { - "authorization_url": "http://example.com" - } - - result = await flow.async_step_code("123ABC") - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Logi Circle ({})".format("testId") - - -async def test_we_reprompt_user_to_follow_link(hass: HomeAssistant) -> None: - """Test we prompt user to follow link if previously prompted.""" - flow = init_config_flow(hass) - - result = await flow.async_step_auth("dummy") - assert result["errors"]["base"] == "follow_link" - - -async def test_abort_if_no_implementation_registered(hass: HomeAssistant) -> None: - """Test we abort if no implementation is registered.""" - flow = config_flow.LogiCircleFlowHandler() - flow.hass = hass - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "missing_configuration" - - -async def test_abort_if_already_setup(hass: HomeAssistant) -> None: - """Test we abort if Logi Circle is already setup.""" - flow = init_config_flow(hass) - MockConfigEntry(domain=config_flow.DOMAIN).add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": config_entries.SOURCE_USER}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - with pytest.raises(AbortFlow): - result = await flow.async_step_code() - - result = await flow.async_step_auth() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "external_setup" - - -@pytest.mark.parametrize( - ("side_effect", "error"), - [ - (TimeoutError, "authorize_url_timeout"), - (AuthorizationFailed, "invalid_auth"), - ], -) -async def test_abort_if_authorize_fails( - hass: HomeAssistant, mock_logi_circle, side_effect, error -) -> None: - """Test we abort if authorizing fails.""" - flow = init_config_flow(hass) - mock_logi_circle.authorize.side_effect = side_effect - - result = await flow.async_step_code("123ABC") - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "external_error" - - result = await flow.async_step_auth() - assert result["errors"]["base"] == error - - -async def test_not_pick_implementation_if_only_one(hass: HomeAssistant) -> None: - """Test we bypass picking implementation if we have one flow_imp.""" - flow = init_config_flow(hass) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - -async def test_gen_auth_url(hass: HomeAssistant, mock_logi_circle) -> None: - """Test generating authorize URL from Logi Circle API.""" - config_flow.register_flow_implementation( - hass, - "test-auth-url", - client_id="id", - client_secret="secret", - api_key="123", - redirect_uri="http://example.com", - sensors=None, - ) - flow = config_flow.LogiCircleFlowHandler() - flow.hass = hass - flow.flow_impl = "test-auth-url" - await async_setup_component(hass, "http", {}) - - result = flow._get_authorization_url() - assert result == "http://authorize.url" - - -async def test_callback_view_rejects_missing_code(hass: HomeAssistant) -> None: - """Test the auth callback view rejects requests with no code.""" - view = LogiCircleAuthCallbackView() - resp = await view.get(MockRequest(hass, {})) - - assert resp.status == HTTPStatus.BAD_REQUEST - - -async def test_callback_view_accepts_code( - hass: HomeAssistant, mock_logi_circle -) -> None: - """Test the auth callback view handles requests with auth code.""" - init_config_flow(hass) - view = LogiCircleAuthCallbackView() - - resp = await view.get(MockRequest(hass, {"code": "456"})) - assert resp.status == HTTPStatus.OK - - await hass.async_block_till_done() - mock_logi_circle.authorize.assert_called_with("456") diff --git a/tests/components/logi_circle/test_init.py b/tests/components/logi_circle/test_init.py deleted file mode 100644 index f8bf8306609..00000000000 --- a/tests/components/logi_circle/test_init.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Tests for the Logi Circle integration.""" - -import asyncio -from unittest.mock import AsyncMock, Mock, patch - -import pytest - -from homeassistant.components.logi_circle import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from tests.common import MockConfigEntry - - -@pytest.fixture(name="disable_platforms") -async def disable_platforms_fixture(hass): - """Disable logi_circle platforms.""" - with patch("homeassistant.components.logi_circle.PLATFORMS", []): - yield - - -@pytest.fixture -def mock_logi_circle(): - """Mock logi_circle.""" - - auth_provider_mock = Mock() - auth_provider_mock.close = AsyncMock() - auth_provider_mock.clear_authorization = AsyncMock() - - with patch("homeassistant.components.logi_circle.LogiCircle") as logi_circle: - future = asyncio.Future() - future.set_result({"accountId": "testId"}) - LogiCircle = logi_circle() - LogiCircle.auth_provider = auth_provider_mock - LogiCircle.synchronize_cameras = AsyncMock() - yield LogiCircle - - -async def test_repair_issue( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - disable_platforms, - mock_logi_circle, -) -> None: - """Test the LogiCircle configuration entry loading/unloading handles the repair.""" - config_entry = MockConfigEntry( - title="Example 1", - domain=DOMAIN, - data={ - "api_key": "blah", - "client_id": "blah", - "client_secret": "blah", - "redirect_uri": "blah", - }, - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) - - # Remove the entry - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.NOT_LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None diff --git a/tests/components/loqed/conftest.py b/tests/components/loqed/conftest.py index 57ef19d0fcb..ddad8949d7d 100644 --- a/tests/components/loqed/conftest.py +++ b/tests/components/loqed/conftest.py @@ -1,12 +1,12 @@ """Contains fixtures for Loqed tests.""" +from collections.abc import AsyncGenerator import json from typing import Any from unittest.mock import AsyncMock, Mock, patch from loqedAPI import loqed import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.loqed import DOMAIN from homeassistant.components.loqed.const import CONF_CLOUDHOOK_URL diff --git a/tests/components/lovelace/test_cast.py b/tests/components/lovelace/test_cast.py index 632ea731d0c..c54b31d9297 100644 --- a/tests/components/lovelace/test_cast.py +++ b/tests/components/lovelace/test_cast.py @@ -1,10 +1,10 @@ """Test the Lovelace Cast platform.""" +from collections.abc import AsyncGenerator, Generator from time import time from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.lovelace import cast as lovelace_cast from homeassistant.components.media_player import MediaClass @@ -30,7 +30,7 @@ def mock_onboarding_done() -> Generator[MagicMock]: @pytest.fixture -async def mock_https_url(hass): +async def mock_https_url(hass: HomeAssistant) -> None: """Mock valid URL.""" await async_process_ha_core_config( hass, @@ -39,7 +39,7 @@ async def mock_https_url(hass): @pytest.fixture -async def mock_yaml_dashboard(hass): +async def mock_yaml_dashboard(hass: HomeAssistant) -> AsyncGenerator[None]: """Mock the content of a YAML dashboard.""" # Set up a YAML dashboard with 2 views. assert await async_setup_component( @@ -116,9 +116,8 @@ async def test_browse_media_error(hass: HomeAssistant) -> None: ) -async def test_browse_media( - hass: HomeAssistant, mock_yaml_dashboard, mock_https_url -) -> None: +@pytest.mark.usefixtures("mock_yaml_dashboard", "mock_https_url") +async def test_browse_media(hass: HomeAssistant) -> None: """Test browse media.""" top_level_items = await lovelace_cast.async_browse_media( hass, "lovelace", "", lovelace_cast.CAST_TYPE_CHROMECAST @@ -181,7 +180,8 @@ async def test_browse_media( ) -async def test_play_media(hass: HomeAssistant, mock_yaml_dashboard) -> None: +@pytest.mark.usefixtures("mock_yaml_dashboard") +async def test_play_media(hass: HomeAssistant) -> None: """Test playing media.""" calls = async_mock_service(hass, "cast", "show_lovelace_view") diff --git a/tests/components/lovelace/test_dashboard.py b/tests/components/lovelace/test_dashboard.py index 7577c4dcc0d..3a01e20c1fb 100644 --- a/tests/components/lovelace/test_dashboard.py +++ b/tests/components/lovelace/test_dashboard.py @@ -1,11 +1,11 @@ """Test the Lovelace initialization.""" +from collections.abc import Generator import time from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components import frontend from homeassistant.components.lovelace import const, dashboard diff --git a/tests/components/lovelace/test_init.py b/tests/components/lovelace/test_init.py index dc111ab601e..14d93d8302f 100644 --- a/tests/components/lovelace/test_init.py +++ b/tests/components/lovelace/test_init.py @@ -1,10 +1,10 @@ """Test the Lovelace initialization.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/lovelace/test_system_health.py b/tests/components/lovelace/test_system_health.py index d53ebf2871f..4fe248fa950 100644 --- a/tests/components/lovelace/test_system_health.py +++ b/tests/components/lovelace/test_system_health.py @@ -1,10 +1,10 @@ """Tests for Lovelace system health.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.lovelace import dashboard from homeassistant.core import HomeAssistant diff --git a/tests/components/luftdaten/conftest.py b/tests/components/luftdaten/conftest.py index e1aac7caeb0..c3daa390e49 100644 --- a/tests/components/luftdaten/conftest.py +++ b/tests/components/luftdaten/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.luftdaten.const import CONF_SENSOR_ID, DOMAIN from homeassistant.const import CONF_SHOW_ON_MAP diff --git a/tests/components/lupusec/test_config_flow.py b/tests/components/lupusec/test_config_flow.py index e106bbd5001..f354eaf0644 100644 --- a/tests/components/lupusec/test_config_flow.py +++ b/tests/components/lupusec/test_config_flow.py @@ -153,88 +153,3 @@ async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> No assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("mock_import_step", "mock_title"), - [ - (MOCK_IMPORT_STEP, MOCK_IMPORT_STEP[CONF_IP_ADDRESS]), - (MOCK_IMPORT_STEP_NAME, MOCK_IMPORT_STEP_NAME[CONF_NAME]), - ], -) -async def test_flow_source_import( - hass: HomeAssistant, mock_import_step, mock_title -) -> None: - """Test configuration import from YAML.""" - with ( - patch( - "homeassistant.components.lupusec.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "homeassistant.components.lupusec.config_flow.lupupy.Lupusec", - ) as mock_initialize_lupusec, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=mock_import_step, - ) - - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == mock_title - assert result["data"] == MOCK_DATA_STEP - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_initialize_lupusec.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("raise_error", "text_error"), - [ - (LupusecException("Test lupusec exception"), "cannot_connect"), - (JSONDecodeError("Test JSONDecodeError", "test", 1), "cannot_connect"), - (Exception("Test unknown exception"), "unknown"), - ], -) -async def test_flow_source_import_error_and_recover( - hass: HomeAssistant, raise_error, text_error -) -> None: - """Test exceptions and recovery.""" - - with patch( - "homeassistant.components.lupusec.config_flow.lupupy.Lupusec", - side_effect=raise_error, - ) as mock_initialize_lupusec: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=MOCK_IMPORT_STEP, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == text_error - assert len(mock_initialize_lupusec.mock_calls) == 1 - - -async def test_flow_source_import_already_configured(hass: HomeAssistant) -> None: - """Test duplicate config entry..""" - - entry = MockConfigEntry( - domain=DOMAIN, - title=MOCK_DATA_STEP[CONF_HOST], - data=MOCK_DATA_STEP, - ) - - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=MOCK_IMPORT_STEP, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/lutron/conftest.py b/tests/components/lutron/conftest.py index 90f96f1783d..f2106f736dc 100644 --- a/tests/components/lutron/conftest.py +++ b/tests/components/lutron/conftest.py @@ -1,9 +1,9 @@ """Provide common Lutron fixtures and mocks.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/lutron_caseta/__init__.py b/tests/components/lutron_caseta/__init__.py index 9b25e2a0164..b27d30ac31f 100644 --- a/tests/components/lutron_caseta/__init__.py +++ b/tests/components/lutron_caseta/__init__.py @@ -101,7 +101,7 @@ async def async_setup_integration(hass: HomeAssistant, mock_bridge) -> MockConfi class MockBridge: """Mock Lutron bridge that emulates configured connected status.""" - def __init__(self, can_connect=True): + def __init__(self, can_connect=True) -> None: """Initialize MockBridge instance with configured mock connectivity.""" self.can_connect = can_connect self.is_currently_connected = False diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 208dd36cccd..9353b897602 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -39,11 +39,7 @@ from homeassistant.setup import async_setup_component from . import MockBridge -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations MOCK_BUTTON_DEVICES = [ { @@ -102,12 +98,6 @@ MOCK_BUTTON_DEVICES = [ ] -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def _async_setup_lutron_with_picos(hass): """Setups a lutron bridge with picos.""" config_entry = MockConfigEntry( @@ -135,7 +125,11 @@ async def _async_setup_lutron_with_picos(hass): async def test_get_triggers(hass: HomeAssistant) -> None: """Test we get the expected triggers from a lutron pico.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry_id + ).runtime_data keypads = data.keypad_data.keypads device_id = keypads[list(keypads)[0]]["dr_device_id"] @@ -220,7 +214,9 @@ async def test_none_serial_keypad( async def test_if_fires_on_button_event( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for press trigger firing.""" await _async_setup_lutron_with_picos(hass) @@ -266,12 +262,14 @@ async def test_if_fires_on_button_event( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" async def test_if_fires_on_button_event_without_lip( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for press trigger firing on a device that does not support lip.""" await _async_setup_lutron_with_picos(hass) @@ -315,12 +313,12 @@ async def test_if_fires_on_button_event_without_lip( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" async def test_validate_trigger_config_no_device( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for no press with no device.""" @@ -356,16 +354,20 @@ async def test_validate_trigger_config_no_device( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_validate_trigger_config_unknown_device( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for no press with an unknown device.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry_id + ).runtime_data keypads = data.keypad_data.keypads lutron_device_id = list(keypads)[0] keypad = keypads[lutron_device_id] @@ -404,7 +406,7 @@ async def test_validate_trigger_config_unknown_device( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_validate_trigger_invalid_triggers( @@ -412,7 +414,11 @@ async def test_validate_trigger_invalid_triggers( ) -> None: """Test for click_event with invalid triggers.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry_id + ).runtime_data keypads = data.keypad_data.keypads lutron_device_id = list(keypads)[0] keypad = keypads[lutron_device_id] @@ -444,7 +450,9 @@ async def test_validate_trigger_invalid_triggers( async def test_if_fires_on_button_event_late_setup( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for press trigger firing with integration getting setup late.""" config_entry_id = await _async_setup_lutron_with_picos(hass) @@ -479,8 +487,9 @@ async def test_if_fires_on_button_event_late_setup( }, ) - await hass.config_entries.async_setup(config_entry_id) - await hass.async_block_till_done() + with patch("homeassistant.components.lutron_caseta.Smartbridge.create_tls"): + await hass.config_entries.async_setup(config_entry_id) + await hass.async_block_till_done() message = { ATTR_SERIAL: device.get("serial"), @@ -495,5 +504,5 @@ async def test_if_fires_on_button_event_late_setup( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" diff --git a/tests/components/lutron_caseta/test_logbook.py b/tests/components/lutron_caseta/test_logbook.py index 51c96b9d9a9..9a58838d65c 100644 --- a/tests/components/lutron_caseta/test_logbook.py +++ b/tests/components/lutron_caseta/test_logbook.py @@ -53,7 +53,11 @@ async def test_humanify_lutron_caseta_button_event(hass: HomeAssistant) -> None: await hass.async_block_till_done() - data: LutronCasetaData = hass.data[DOMAIN][config_entry.entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry.entry_id + ).runtime_data keypads = data.keypad_data.keypads keypad = keypads["9"] dr_device_id = keypad["dr_device_id"] @@ -111,7 +115,7 @@ async def test_humanify_lutron_caseta_button_event_integration_not_loaded( await hass.async_block_till_done() for device in device_registry.devices.values(): - if device.config_entries == [config_entry.entry_id]: + if device.config_entries == {config_entry.entry_id}: dr_device_id = device.id break diff --git a/tests/components/lyric/test_config_flow.py b/tests/components/lyric/test_config_flow.py index e1a8d1131dc..1e0ae04f741 100644 --- a/tests/components/lyric/test_config_flow.py +++ b/tests/components/lyric/test_config_flow.py @@ -26,7 +26,7 @@ CLIENT_SECRET = "5678" @pytest.fixture -async def mock_impl(hass): +async def mock_impl(hass: HomeAssistant) -> None: """Mock implementation.""" await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() @@ -45,12 +45,11 @@ async def test_abort_if_no_configuration(hass: HomeAssistant) -> None: assert result["reason"] == "missing_credentials" -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_impl") async def test_full_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_impl, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -112,12 +111,11 @@ async def test_full_flow( assert len(mock_setup.mock_calls) == 1 -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_impl") async def test_reauthentication_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_impl, ) -> None: """Test reauthentication flow.""" old_entry = MockConfigEntry( diff --git a/tests/components/madvr/__init__.py b/tests/components/madvr/__init__.py new file mode 100644 index 00000000000..343dd68a25d --- /dev/null +++ b/tests/components/madvr/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the madvr-envy integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/madvr/conftest.py b/tests/components/madvr/conftest.py new file mode 100644 index 00000000000..187786c6964 --- /dev/null +++ b/tests/components/madvr/conftest.py @@ -0,0 +1,87 @@ +"""MadVR conftest for shared testing setup.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch + +import pytest + +from homeassistant.components.madvr.const import DEFAULT_NAME, DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT + +from .const import MOCK_CONFIG, MOCK_MAC + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.madvr.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_madvr_client() -> Generator[AsyncMock]: + """Mock a MadVR client.""" + with ( + patch( + "homeassistant.components.madvr.config_flow.Madvr", autospec=True + ) as mock_client, + patch("homeassistant.components.madvr.Madvr", new=mock_client), + ): + client = mock_client.return_value + client.host = MOCK_CONFIG[CONF_HOST] + client.port = MOCK_CONFIG[CONF_PORT] + client.mac_address = MOCK_MAC + client.connected.return_value = True + client.is_device_connectable.return_value = True + client.loop = AsyncMock() + client.tasks = AsyncMock() + client.set_update_callback = MagicMock() + + # mock the property to be off on startup (which it is) + is_on_mock = PropertyMock(return_value=True) + type(client).is_on = is_on_mock + + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=MOCK_CONFIG, + unique_id=MOCK_MAC, + title=DEFAULT_NAME, + ) + + +def get_update_callback(mock_client: MagicMock): + """Retrieve the update callback function from the mocked client. + + This function extracts the callback that was passed to set_update_callback + on the mocked MadVR client. This callback is typically the handle_push_data + method of the MadVRCoordinator. + + Args: + mock_client (MagicMock): The mocked MadVR client. + + Returns: + function: The update callback function. + + """ + # Get all the calls made to set_update_callback + calls = mock_client.set_update_callback.call_args_list + + if not calls: + raise ValueError("set_update_callback was not called on the mock client") + + # Get the first (and usually only) call + first_call = calls[0] + + # Get the first argument of this call, which should be the callback function + return first_call.args[0] diff --git a/tests/components/madvr/const.py b/tests/components/madvr/const.py new file mode 100644 index 00000000000..8c5e122377b --- /dev/null +++ b/tests/components/madvr/const.py @@ -0,0 +1,18 @@ +"""Constants for the MadVR tests.""" + +from homeassistant.const import CONF_HOST, CONF_PORT + +MOCK_CONFIG = { + CONF_HOST: "192.168.1.1", + CONF_PORT: 44077, +} + +MOCK_MAC = "00:11:22:33:44:55" + +TEST_CON_ERROR = ConnectionError("Connection failed") +TEST_IMP_ERROR = NotImplementedError("Not implemented") + +TEST_FAILED_ON = "Failed to turn on device" +TEST_FAILED_OFF = "Failed to turn off device" +TEST_FAILED_CMD = "Failed to send command" +TEST_COMMAND = "test" diff --git a/tests/components/madvr/snapshots/test_binary_sensors.ambr b/tests/components/madvr/snapshots/test_binary_sensors.ambr new file mode 100644 index 00000000000..7fd54a7c240 --- /dev/null +++ b/tests/components/madvr/snapshots/test_binary_sensors.ambr @@ -0,0 +1,185 @@ +# serializer version: 1 +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_hdr_flag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.madvr_envy_hdr_flag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'HDR flag', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hdr_flag', + 'unique_id': '00:11:22:33:44:55_hdr_flag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_hdr_flag-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy HDR flag', + }), + 'context': , + 'entity_id': 'binary_sensor.madvr_envy_hdr_flag', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_outgoing_hdr_flag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.madvr_envy_outgoing_hdr_flag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Outgoing HDR flag', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_hdr_flag', + 'unique_id': '00:11:22:33:44:55_outgoing_hdr_flag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_outgoing_hdr_flag-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Outgoing HDR flag', + }), + 'context': , + 'entity_id': 'binary_sensor.madvr_envy_outgoing_hdr_flag', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_power_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.madvr_envy_power_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power state', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_state', + 'unique_id': '00:11:22:33:44:55_power_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_power_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Power state', + }), + 'context': , + 'entity_id': 'binary_sensor.madvr_envy_power_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_signal_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.madvr_envy_signal_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Signal state', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'signal_state', + 'unique_id': '00:11:22:33:44:55_signal_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_signal_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Signal state', + }), + 'context': , + 'entity_id': 'binary_sensor.madvr_envy_signal_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/madvr/snapshots/test_remote.ambr b/tests/components/madvr/snapshots/test_remote.ambr new file mode 100644 index 00000000000..1157496a93e --- /dev/null +++ b/tests/components/madvr/snapshots/test_remote.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_remote_setup[remote.madvr_envy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'remote', + 'entity_category': None, + 'entity_id': 'remote.madvr_envy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:11:22:33:44:55', + 'unit_of_measurement': None, + }) +# --- +# name: test_remote_setup[remote.madvr_envy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy', + 'supported_features': , + }), + 'context': , + 'entity_id': 'remote.madvr_envy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/madvr/snapshots/test_sensors.ambr b/tests/components/madvr/snapshots/test_sensors.ambr new file mode 100644 index 00000000000..7b0dd254f77 --- /dev/null +++ b/tests/components/madvr/snapshots/test_sensors.ambr @@ -0,0 +1,1359 @@ +# serializer version: 1 +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_decimal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_aspect_decimal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aspect decimal', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aspect_dec', + 'unique_id': '00:11:22:33:44:55_aspect_dec', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_decimal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Aspect decimal', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_aspect_decimal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.78', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_integer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_aspect_integer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aspect integer', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aspect_int', + 'unique_id': '00:11:22:33:44:55_aspect_int', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_integer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Aspect integer', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_aspect_integer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '178', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_name-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_aspect_name', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aspect name', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aspect_name', + 'unique_id': '00:11:22:33:44:55_aspect_name', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_name-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Aspect name', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_aspect_name', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Widescreen', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_resolution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_aspect_resolution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aspect resolution', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aspect_res', + 'unique_id': '00:11:22:33:44:55_aspect_res', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_resolution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Aspect resolution', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_aspect_resolution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3840:2160', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_cpu_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_cpu_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CPU temperature', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temp_cpu', + 'unique_id': '00:11:22:33:44:55_temp_cpu', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_cpu_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'madVR Envy CPU temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_cpu_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_gpu_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_gpu_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'GPU temperature', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temp_gpu', + 'unique_id': '00:11:22:33:44:55_temp_gpu', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_gpu_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'madVR Envy GPU temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_gpu_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45.5', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_hdmi_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_hdmi_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'HDMI temperature', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temp_hdmi', + 'unique_id': '00:11:22:33:44:55_temp_hdmi', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_hdmi_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'madVR Envy HDMI temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_hdmi_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.0', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_aspect_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '16:9', + '4:3', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_aspect_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming aspect ratio', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_aspect_ratio', + 'unique_id': '00:11:22:33:44:55_incoming_aspect_ratio', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_aspect_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming aspect ratio', + 'options': list([ + '16:9', + '4:3', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_aspect_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16:9', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_bit_depth-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '8bit', + '10bit', + '12bit', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_bit_depth', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming bit depth', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_bit_depth', + 'unique_id': '00:11:22:33:44:55_incoming_bit_depth', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_bit_depth-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming bit depth', + 'options': list([ + '8bit', + '10bit', + '12bit', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_bit_depth', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10bit', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_black_levels-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'TV', + 'PC', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_black_levels', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming black levels', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_black_levels', + 'unique_id': '00:11:22:33:44:55_incoming_black_levels', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_black_levels-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming black levels', + 'options': list([ + 'TV', + 'PC', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_black_levels', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'PC', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_color_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'RGB', + '444', + '422', + '420', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_color_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming color space', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_color_space', + 'unique_id': '00:11:22:33:44:55_incoming_color_space', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_color_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming color space', + 'options': list([ + 'RGB', + '444', + '422', + '420', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_color_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'RGB', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_colorimetry-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'SDR', + 'HDR10', + 'HLG 601', + 'PAL', + '709', + 'DCI', + '2020', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_colorimetry', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming colorimetry', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_colorimetry', + 'unique_id': '00:11:22:33:44:55_incoming_colorimetry', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_colorimetry-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming colorimetry', + 'options': list([ + 'SDR', + 'HDR10', + 'HLG 601', + 'PAL', + '709', + 'DCI', + '2020', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_colorimetry', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_frame_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_frame_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Incoming frame rate', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_frame_rate', + 'unique_id': '00:11:22:33:44:55_incoming_frame_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_frame_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Incoming frame rate', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_frame_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60p', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_resolution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_resolution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Incoming resolution', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_res', + 'unique_id': '00:11:22:33:44:55_incoming_res', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_resolution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Incoming resolution', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_resolution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3840x2160', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_signal_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '2D', + '3D', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_signal_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming signal type', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_signal_type', + 'unique_id': '00:11:22:33:44:55_incoming_signal_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_signal_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming signal type', + 'options': list([ + '2D', + '3D', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_signal_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3D', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_mainboard_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_mainboard_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mainboard temperature', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temp_mainboard', + 'unique_id': '00:11:22:33:44:55_temp_mainboard', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_mainboard_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'madVR Envy Mainboard temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_mainboard_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.8', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_decimal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_masking_decimal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Masking decimal', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'masking_dec', + 'unique_id': '00:11:22:33:44:55_masking_dec', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_decimal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Masking decimal', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_masking_decimal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.78', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_integer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_masking_integer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Masking integer', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'masking_int', + 'unique_id': '00:11:22:33:44:55_masking_int', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_integer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Masking integer', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_masking_integer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '178', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_resolution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_masking_resolution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Masking resolution', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'masking_res', + 'unique_id': '00:11:22:33:44:55_masking_res', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_resolution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Masking resolution', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_masking_resolution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3840:2160', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_bit_depth-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '8bit', + '10bit', + '12bit', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_bit_depth', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing bit depth', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_bit_depth', + 'unique_id': '00:11:22:33:44:55_outgoing_bit_depth', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_bit_depth-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing bit depth', + 'options': list([ + '8bit', + '10bit', + '12bit', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_bit_depth', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10bit', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_black_levels-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'TV', + 'PC', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_black_levels', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing black levels', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_black_levels', + 'unique_id': '00:11:22:33:44:55_outgoing_black_levels', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_black_levels-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing black levels', + 'options': list([ + 'TV', + 'PC', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_black_levels', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'PC', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_color_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'RGB', + '444', + '422', + '420', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_color_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing color space', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_color_space', + 'unique_id': '00:11:22:33:44:55_outgoing_color_space', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_color_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing color space', + 'options': list([ + 'RGB', + '444', + '422', + '420', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_color_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'RGB', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_colorimetry-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'SDR', + 'HDR10', + 'HLG 601', + 'PAL', + '709', + 'DCI', + '2020', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_colorimetry', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing colorimetry', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_colorimetry', + 'unique_id': '00:11:22:33:44:55_outgoing_colorimetry', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_colorimetry-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing colorimetry', + 'options': list([ + 'SDR', + 'HDR10', + 'HLG 601', + 'PAL', + '709', + 'DCI', + '2020', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_colorimetry', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_frame_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_frame_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Outgoing frame rate', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_frame_rate', + 'unique_id': '00:11:22:33:44:55_outgoing_frame_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_frame_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Outgoing frame rate', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_frame_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60p', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_resolution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_resolution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Outgoing resolution', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_res', + 'unique_id': '00:11:22:33:44:55_outgoing_res', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_resolution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Outgoing resolution', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_resolution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3840x2160', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_signal_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '2D', + '3D', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_signal_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing signal type', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_signal_type', + 'unique_id': '00:11:22:33:44:55_outgoing_signal_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_signal_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing signal type', + 'options': list([ + '2D', + '3D', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_signal_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2D', + }) +# --- diff --git a/tests/components/madvr/test_binary_sensors.py b/tests/components/madvr/test_binary_sensors.py new file mode 100644 index 00000000000..469a3225ca0 --- /dev/null +++ b/tests/components/madvr/test_binary_sensors.py @@ -0,0 +1,79 @@ +"""Tests for the MadVR binary sensor entities.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er + +from . import setup_integration +from .conftest import get_update_callback + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_binary_sensor_setup( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the binary sensor entities.""" + with patch("homeassistant.components.madvr.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "positive_payload", "negative_payload"), + [ + ( + "binary_sensor.madvr_envy_power_state", + {"is_on": True}, + {"is_on": False}, + ), + ( + "binary_sensor.madvr_envy_signal_state", + {"is_signal": True}, + {"is_signal": False}, + ), + ( + "binary_sensor.madvr_envy_hdr_flag", + {"hdr_flag": True}, + {"hdr_flag": False}, + ), + ( + "binary_sensor.madvr_envy_outgoing_hdr_flag", + {"outgoing_hdr_flag": True}, + {"outgoing_hdr_flag": False}, + ), + ], +) +async def test_binary_sensors( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_id: str, + positive_payload: dict, + negative_payload: dict, +) -> None: + """Test the binary sensors.""" + await setup_integration(hass, mock_config_entry) + update_callback = get_update_callback(mock_madvr_client) + + # Test positive state + update_callback(positive_payload) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + # Test negative state + update_callback(negative_payload) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_OFF diff --git a/tests/components/madvr/test_config_flow.py b/tests/components/madvr/test_config_flow.py new file mode 100644 index 00000000000..6dc84fd6b00 --- /dev/null +++ b/tests/components/madvr/test_config_flow.py @@ -0,0 +1,128 @@ +"""Tests for the MadVR config flow.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.madvr.const import DEFAULT_NAME, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import MOCK_CONFIG, MOCK_MAC + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +async def avoid_wait() -> AsyncGenerator[None]: + """Mock sleep.""" + with patch("homeassistant.components.madvr.config_flow.RETRY_INTERVAL", 0): + yield + + +async def test_full_flow( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test full config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_HOST: MOCK_CONFIG[CONF_HOST], + CONF_PORT: MOCK_CONFIG[CONF_PORT], + } + assert result["result"].unique_id == MOCK_MAC + mock_madvr_client.open_connection.assert_called_once() + mock_madvr_client.async_add_tasks.assert_called_once() + mock_madvr_client.async_cancel_tasks.assert_called_once() + + +async def test_flow_errors( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test error handling in config flow.""" + mock_madvr_client.open_connection.side_effect = TimeoutError + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_madvr_client.open_connection.side_effect = None + mock_madvr_client.connected = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_madvr_client.connected = True + mock_madvr_client.mac_address = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "no_mac"} + + # ensure an error is recoverable + mock_madvr_client.mac_address = MOCK_MAC + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == { + CONF_HOST: MOCK_CONFIG[CONF_HOST], + CONF_PORT: MOCK_CONFIG[CONF_PORT], + } + + # Verify method calls + assert mock_madvr_client.open_connection.call_count == 4 + assert mock_madvr_client.async_add_tasks.call_count == 2 + # the first call will not call this due to timeout as expected + assert mock_madvr_client.async_cancel_tasks.call_count == 2 + + +async def test_duplicate( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate config entries.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/madvr/test_init.py b/tests/components/madvr/test_init.py new file mode 100644 index 00000000000..dace812af11 --- /dev/null +++ b/tests/components/madvr/test_init.py @@ -0,0 +1,28 @@ +"""Tests for the MadVR integration.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/madvr/test_remote.py b/tests/components/madvr/test_remote.py new file mode 100644 index 00000000000..6fc507534d6 --- /dev/null +++ b/tests/components/madvr/test_remote.py @@ -0,0 +1,155 @@ +"""Tests for the MadVR remote entity.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.remote import ( + DOMAIN as REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, +) +from homeassistant.const import ( + ATTR_COMMAND, + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er + +from . import setup_integration +from .const import ( + TEST_COMMAND, + TEST_CON_ERROR, + TEST_FAILED_CMD, + TEST_FAILED_OFF, + TEST_FAILED_ON, + TEST_IMP_ERROR, +) + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_remote_setup( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the remote entity.""" + with patch("homeassistant.components.madvr.PLATFORMS", [Platform.REMOTE]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_remote_power( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test turning on the remote entity.""" + + await setup_integration(hass, mock_config_entry) + + entity_id = "remote.madvr_envy" + remote = hass.states.get(entity_id) + assert remote.state == STATE_ON + + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + + mock_madvr_client.power_off.assert_called_once() + + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + await hass.async_block_till_done() + + mock_madvr_client.power_on.assert_called_once() + + # cover exception cases + caplog.clear() + mock_madvr_client.power_off.side_effect = TEST_CON_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert TEST_FAILED_OFF in caplog.text + + # Test turning off with NotImplementedError + caplog.clear() + mock_madvr_client.power_off.side_effect = TEST_IMP_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert TEST_FAILED_OFF in caplog.text + + # Reset side_effect for power_off + mock_madvr_client.power_off.side_effect = None + + # Test turning on with ConnectionError + caplog.clear() + mock_madvr_client.power_on.side_effect = TEST_CON_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert TEST_FAILED_ON in caplog.text + + # Test turning on with NotImplementedError + caplog.clear() + mock_madvr_client.power_on.side_effect = TEST_IMP_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert TEST_FAILED_ON in caplog.text + + +async def test_send_command( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sending command to the remote entity.""" + + await setup_integration(hass, mock_config_entry) + + entity_id = "remote.madvr_envy" + remote = hass.states.get(entity_id) + assert remote.state == STATE_ON + + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: TEST_COMMAND}, + blocking=True, + ) + + mock_madvr_client.add_command_to_queue.assert_called_once_with([TEST_COMMAND]) + # cover exceptions + # Test ConnectionError + mock_madvr_client.add_command_to_queue.side_effect = TEST_CON_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: TEST_COMMAND}, + blocking=True, + ) + assert TEST_FAILED_CMD in caplog.text + + # Test NotImplementedError + mock_madvr_client.add_command_to_queue.side_effect = TEST_IMP_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: TEST_COMMAND}, + blocking=True, + ) + assert TEST_FAILED_CMD in caplog.text diff --git a/tests/components/madvr/test_sensors.py b/tests/components/madvr/test_sensors.py new file mode 100644 index 00000000000..ddc01fc737a --- /dev/null +++ b/tests/components/madvr/test_sensors.py @@ -0,0 +1,108 @@ +"""Tests for the MadVR sensor entities.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.madvr.sensor import get_temperature +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er + +from . import setup_integration +from .conftest import get_update_callback + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_setup_and_states( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_madvr_client: AsyncMock, +) -> None: + """Test setup of the sensor entities and their states.""" + with patch("homeassistant.components.madvr.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + update_callback = get_update_callback(mock_madvr_client) + + # Create a big data update with all sensor values + update_data = { + "temp_gpu": 45.5, + "temp_hdmi": 40.0, + "temp_cpu": 50.2, + "temp_mainboard": 35.8, + "incoming_res": "3840x2160", + "incoming_frame_rate": "60p", + "outgoing_signal_type": "2D", + "incoming_signal_type": "3D", + "incoming_color_space": "RGB", + "incoming_bit_depth": "10bit", + "incoming_colorimetry": "2020", + "incoming_black_levels": "PC", + "incoming_aspect_ratio": "16:9", + "outgoing_res": "3840x2160", + "outgoing_frame_rate": "60p", + "outgoing_color_space": "RGB", + "outgoing_bit_depth": "10bit", + "outgoing_colorimetry": "2020", + "outgoing_black_levels": "PC", + "aspect_res": "3840:2160", + "aspect_dec": "1.78", + "aspect_int": "178", + "aspect_name": "Widescreen", + "masking_res": "3840:2160", + "masking_dec": "1.78", + "masking_int": "178", + } + + # Update all sensors at once + update_callback(update_data) + await hass.async_block_till_done() + + # Snapshot all entity states + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Test invalid temperature value + update_callback({"temp_gpu": -1}) + await hass.async_block_till_done() + assert hass.states.get("sensor.madvr_envy_gpu_temperature").state == STATE_UNKNOWN + + # Test sensor unknown + update_callback({"incoming_res": None}) + await hass.async_block_till_done() + assert ( + hass.states.get("sensor.madvr_envy_incoming_resolution").state == STATE_UNKNOWN + ) + + # Test sensor becomes known again + update_callback({"incoming_res": "1920x1080"}) + await hass.async_block_till_done() + assert hass.states.get("sensor.madvr_envy_incoming_resolution").state == "1920x1080" + + # Test temperature sensor + update_callback({"temp_gpu": 41.2}) + await hass.async_block_till_done() + assert hass.states.get("sensor.madvr_envy_gpu_temperature").state == "41.2" + + # test get_temperature ValueError + assert get_temperature(None, "temp_key") is None + + # test startup placeholder values + update_callback({"outgoing_bit_depth": "0bit"}) + await hass.async_block_till_done() + assert ( + hass.states.get("sensor.madvr_envy_outgoing_bit_depth").state == STATE_UNKNOWN + ) + + update_callback({"outgoing_color_space": "?"}) + await hass.async_block_till_done() + assert ( + hass.states.get("sensor.madvr_envy_outgoing_color_space").state == STATE_UNKNOWN + ) diff --git a/tests/components/mailbox/test_init.py b/tests/components/mailbox/test_init.py index 31e831c3bae..6fcf9176aae 100644 --- a/tests/components/mailbox/test_init.py +++ b/tests/components/mailbox/test_init.py @@ -8,11 +8,11 @@ from typing import Any from aiohttp.test_utils import TestClient import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import mailbox from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from tests.common import MockModule, mock_integration, mock_platform diff --git a/tests/components/mailgun/test_init.py b/tests/components/mailgun/test_init.py index 908e98ae31e..2e60c56faa4 100644 --- a/tests/components/mailgun/test_init.py +++ b/tests/components/mailgun/test_init.py @@ -10,7 +10,7 @@ from homeassistant import config_entries from homeassistant.components import mailgun, webhook from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_API_KEY, CONF_DOMAIN -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -29,7 +29,7 @@ async def http_client( @pytest.fixture -async def webhook_id_with_api_key(hass): +async def webhook_id_with_api_key(hass: HomeAssistant) -> str: """Initialize the Mailgun component and get the webhook_id.""" await async_setup_component( hass, @@ -53,7 +53,7 @@ async def webhook_id_with_api_key(hass): @pytest.fixture -async def webhook_id_without_api_key(hass): +async def webhook_id_without_api_key(hass: HomeAssistant) -> str: """Initialize the Mailgun component and get the webhook_id w/o API key.""" await async_setup_component(hass, mailgun.DOMAIN, {}) @@ -73,7 +73,7 @@ async def webhook_id_without_api_key(hass): @pytest.fixture -async def mailgun_events(hass): +async def mailgun_events(hass: HomeAssistant) -> list[Event]: """Return a list of mailgun_events triggered.""" events = [] diff --git a/tests/components/map/test_init.py b/tests/components/map/test_init.py index afafdd1eb16..217550852bd 100644 --- a/tests/components/map/test_init.py +++ b/tests/components/map/test_init.py @@ -1,10 +1,10 @@ """Test the Map initialization.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.map import DOMAIN from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant diff --git a/tests/components/marytts/test_tts.py b/tests/components/marytts/test_tts.py index 75784bb56c5..0ad27cde29b 100644 --- a/tests/components/marytts/test_tts.py +++ b/tests/components/marytts/test_tts.py @@ -34,9 +34,8 @@ def get_empty_wav() -> bytes: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/mastodon/__init__.py b/tests/components/mastodon/__init__.py new file mode 100644 index 00000000000..a4c730db07a --- /dev/null +++ b/tests/components/mastodon/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Mastodon integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/mastodon/conftest.py b/tests/components/mastodon/conftest.py new file mode 100644 index 00000000000..c64de44d496 --- /dev/null +++ b/tests/components/mastodon/conftest.py @@ -0,0 +1,59 @@ +"""Mastodon tests configuration.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest + +from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET + +from tests.common import MockConfigEntry, load_json_object_fixture +from tests.components.smhi.common import AsyncMock + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.mastodon.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_mastodon_client() -> Generator[AsyncMock]: + """Mock a Mastodon client.""" + with ( + patch( + "homeassistant.components.mastodon.utils.Mastodon", + autospec=True, + ) as mock_client, + ): + client = mock_client.return_value + client.instance.return_value = load_json_object_fixture("instance.json", DOMAIN) + client.account_verify_credentials.return_value = load_json_object_fixture( + "account_verify_credentials.json", DOMAIN + ) + client.status_post.return_value = None + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="@trwnh@mastodon.social", + data={ + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + entry_id="01J35M4AH9HYRC2V0G6RNVNWJH", + unique_id="trwnh_mastodon_social", + version=1, + minor_version=2, + ) diff --git a/tests/components/mastodon/fixtures/account_verify_credentials.json b/tests/components/mastodon/fixtures/account_verify_credentials.json new file mode 100644 index 00000000000..401caa121ae --- /dev/null +++ b/tests/components/mastodon/fixtures/account_verify_credentials.json @@ -0,0 +1,78 @@ +{ + "id": "14715", + "username": "trwnh", + "acct": "trwnh", + "display_name": "infinite love ⴳ", + "locked": false, + "bot": false, + "created_at": "2016-11-24T10:02:12.085Z", + "note": "

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
https://trwnh.com
help me live: https://liberapay.com/at or https://paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

", + "url": "https://mastodon.social/@trwnh", + "avatar": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", + "avatar_static": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", + "header": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", + "header_static": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", + "followers_count": 821, + "following_count": 178, + "statuses_count": 33120, + "last_status_at": "2019-11-24T15:49:42.251Z", + "source": { + "privacy": "public", + "sensitive": false, + "language": "", + "note": "i have approximate knowledge of many things. perpetual student. (nb/ace/they)\r\n\r\nxmpp/email: a@trwnh.com\r\nhttps://trwnh.com\r\nhelp me live: https://liberapay.com/at or https://paypal.me/trwnh\r\n\r\n- my triggers are moths and glitter\r\n- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise\r\n- dm me if i did something wrong, so i can improve\r\n- purest person on fedi, do not lewd in my presence\r\n- #1 ami cole fan account\r\n\r\n:fatyoshi:", + "fields": [ + { + "name": "Website", + "value": "https://trwnh.com", + "verified_at": "2019-08-29T04:14:55.571+00:00" + }, + { + "name": "Sponsor", + "value": "https://liberapay.com/at", + "verified_at": "2019-11-15T10:06:15.557+00:00" + }, + { + "name": "Fan of:", + "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", + "verified_at": null + }, + { + "name": "Main topics:", + "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", + "verified_at": null + } + ], + "follow_requests_count": 0 + }, + "emojis": [ + { + "shortcode": "fatyoshi", + "url": "https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png", + "static_url": "https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png", + "visible_in_picker": true + } + ], + "fields": [ + { + "name": "Website", + "value": "https://trwnh.com", + "verified_at": "2019-08-29T04:14:55.571+00:00" + }, + { + "name": "Sponsor", + "value": "https://liberapay.com/at", + "verified_at": "2019-11-15T10:06:15.557+00:00" + }, + { + "name": "Fan of:", + "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", + "verified_at": null + }, + { + "name": "Main topics:", + "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", + "verified_at": null + } + ] +} diff --git a/tests/components/mastodon/fixtures/instance.json b/tests/components/mastodon/fixtures/instance.json new file mode 100644 index 00000000000..b0e904e80ef --- /dev/null +++ b/tests/components/mastodon/fixtures/instance.json @@ -0,0 +1,147 @@ +{ + "domain": "mastodon.social", + "title": "Mastodon", + "version": "4.0.0rc1", + "source_url": "https://github.com/mastodon/mastodon", + "description": "The original server operated by the Mastodon gGmbH non-profit", + "usage": { + "users": { + "active_month": 123122 + } + }, + "thumbnail": { + "url": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", + "blurhash": "UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$", + "versions": { + "@1x": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", + "@2x": "https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png" + } + }, + "languages": ["en"], + "configuration": { + "urls": { + "streaming": "wss://mastodon.social" + }, + "vapid": { + "public_key": "BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=" + }, + "accounts": { + "max_featured_tags": 10, + "max_pinned_statuses": 4 + }, + "statuses": { + "max_characters": 500, + "max_media_attachments": 4, + "characters_reserved_per_url": 23 + }, + "media_attachments": { + "supported_mime_types": [ + "image/jpeg", + "image/png", + "image/gif", + "image/heic", + "image/heif", + "image/webp", + "video/webm", + "video/mp4", + "video/quicktime", + "video/ogg", + "audio/wave", + "audio/wav", + "audio/x-wav", + "audio/x-pn-wave", + "audio/vnd.wave", + "audio/ogg", + "audio/vorbis", + "audio/mpeg", + "audio/mp3", + "audio/webm", + "audio/flac", + "audio/aac", + "audio/m4a", + "audio/x-m4a", + "audio/mp4", + "audio/3gpp", + "video/x-ms-asf" + ], + "image_size_limit": 10485760, + "image_matrix_limit": 16777216, + "video_size_limit": 41943040, + "video_frame_rate_limit": 60, + "video_matrix_limit": 2304000 + }, + "polls": { + "max_options": 4, + "max_characters_per_option": 50, + "min_expiration": 300, + "max_expiration": 2629746 + }, + "translation": { + "enabled": true + } + }, + "registrations": { + "enabled": false, + "approval_required": false, + "message": null + }, + "contact": { + "email": "staff@mastodon.social", + "account": { + "id": "1", + "username": "Gargron", + "acct": "Gargron", + "display_name": "Eugen 💀", + "locked": false, + "bot": false, + "discoverable": true, + "group": false, + "created_at": "2016-03-16T00:00:00.000Z", + "note": "

Founder, CEO and lead developer @Mastodon, Germany.

", + "url": "https://mastodon.social/@Gargron", + "avatar": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", + "avatar_static": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", + "header": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", + "header_static": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", + "followers_count": 133026, + "following_count": 311, + "statuses_count": 72605, + "last_status_at": "2022-10-31", + "noindex": false, + "emojis": [], + "fields": [ + { + "name": "Patreon", + "value": "https://www.patreon.com/mastodon", + "verified_at": null + } + ] + } + }, + "rules": [ + { + "id": "1", + "text": "Sexually explicit or violent media must be marked as sensitive when posting" + }, + { + "id": "2", + "text": "No racism, sexism, homophobia, transphobia, xenophobia, or casteism" + }, + { + "id": "3", + "text": "No incitement of violence or promotion of violent ideologies" + }, + { + "id": "4", + "text": "No harassment, dogpiling or doxxing of other users" + }, + { + "id": "5", + "text": "No content illegal in Germany" + }, + { + "id": "7", + "text": "Do not share intentionally false or misleading information" + } + ] +} diff --git a/tests/components/mastodon/snapshots/test_diagnostics.ambr b/tests/components/mastodon/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..982ecee7ee2 --- /dev/null +++ b/tests/components/mastodon/snapshots/test_diagnostics.ambr @@ -0,0 +1,247 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'account': dict({ + 'acct': 'trwnh', + 'avatar': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', + 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', + 'bot': False, + 'created_at': '2016-11-24T10:02:12.085Z', + 'display_name': 'infinite love ⴳ', + 'emojis': list([ + dict({ + 'shortcode': 'fatyoshi', + 'static_url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png', + 'url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png', + 'visible_in_picker': True, + }), + ]), + 'fields': list([ + dict({ + 'name': 'Website', + 'value': 'trwnh.com', + 'verified_at': '2019-08-29T04:14:55.571+00:00', + }), + dict({ + 'name': 'Sponsor', + 'value': 'liberapay.com/at', + 'verified_at': '2019-11-15T10:06:15.557+00:00', + }), + dict({ + 'name': 'Fan of:', + 'value': 'Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)', + 'verified_at': None, + }), + dict({ + 'name': 'Main topics:', + 'value': 'systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!', + 'verified_at': None, + }), + ]), + 'followers_count': 821, + 'following_count': 178, + 'header': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', + 'header_static': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', + 'id': '14715', + 'last_status_at': '2019-11-24T15:49:42.251Z', + 'locked': False, + 'note': '

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
trwnh.com
help me live: liberapay.com/at or paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

', + 'source': dict({ + 'fields': list([ + dict({ + 'name': 'Website', + 'value': 'https://trwnh.com', + 'verified_at': '2019-08-29T04:14:55.571+00:00', + }), + dict({ + 'name': 'Sponsor', + 'value': 'https://liberapay.com/at', + 'verified_at': '2019-11-15T10:06:15.557+00:00', + }), + dict({ + 'name': 'Fan of:', + 'value': "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", + 'verified_at': None, + }), + dict({ + 'name': 'Main topics:', + 'value': "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", + 'verified_at': None, + }), + ]), + 'follow_requests_count': 0, + 'language': '', + 'note': ''' + i have approximate knowledge of many things. perpetual student. (nb/ace/they) + + xmpp/email: a@trwnh.com + https://trwnh.com + help me live: https://liberapay.com/at or https://paypal.me/trwnh + + - my triggers are moths and glitter + - i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise + - dm me if i did something wrong, so i can improve + - purest person on fedi, do not lewd in my presence + - #1 ami cole fan account + + :fatyoshi: + ''', + 'privacy': 'public', + 'sensitive': False, + }), + 'statuses_count': 33120, + 'url': 'https://mastodon.social/@trwnh', + 'username': 'trwnh', + }), + 'instance': dict({ + 'configuration': dict({ + 'accounts': dict({ + 'max_featured_tags': 10, + 'max_pinned_statuses': 4, + }), + 'media_attachments': dict({ + 'image_matrix_limit': 16777216, + 'image_size_limit': 10485760, + 'supported_mime_types': list([ + 'image/jpeg', + 'image/png', + 'image/gif', + 'image/heic', + 'image/heif', + 'image/webp', + 'video/webm', + 'video/mp4', + 'video/quicktime', + 'video/ogg', + 'audio/wave', + 'audio/wav', + 'audio/x-wav', + 'audio/x-pn-wave', + 'audio/vnd.wave', + 'audio/ogg', + 'audio/vorbis', + 'audio/mpeg', + 'audio/mp3', + 'audio/webm', + 'audio/flac', + 'audio/aac', + 'audio/m4a', + 'audio/x-m4a', + 'audio/mp4', + 'audio/3gpp', + 'video/x-ms-asf', + ]), + 'video_frame_rate_limit': 60, + 'video_matrix_limit': 2304000, + 'video_size_limit': 41943040, + }), + 'polls': dict({ + 'max_characters_per_option': 50, + 'max_expiration': 2629746, + 'max_options': 4, + 'min_expiration': 300, + }), + 'statuses': dict({ + 'characters_reserved_per_url': 23, + 'max_characters': 500, + 'max_media_attachments': 4, + }), + 'translation': dict({ + 'enabled': True, + }), + 'urls': dict({ + 'streaming': 'wss://mastodon.social', + }), + 'vapid': dict({ + 'public_key': 'BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=', + }), + }), + 'contact': dict({ + 'account': dict({ + 'acct': 'Gargron', + 'avatar': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', + 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', + 'bot': False, + 'created_at': '2016-03-16T00:00:00.000Z', + 'discoverable': True, + 'display_name': 'Eugen 💀', + 'emojis': list([ + ]), + 'fields': list([ + dict({ + 'name': 'Patreon', + 'value': 'patreon.com/mastodon', + 'verified_at': None, + }), + ]), + 'followers_count': 133026, + 'following_count': 311, + 'group': False, + 'header': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', + 'header_static': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', + 'id': '1', + 'last_status_at': '2022-10-31', + 'locked': False, + 'noindex': False, + 'note': '

Founder, CEO and lead developer @Mastodon, Germany.

', + 'statuses_count': 72605, + 'url': 'https://mastodon.social/@Gargron', + 'username': 'Gargron', + }), + 'email': 'staff@mastodon.social', + }), + 'description': 'The original server operated by the Mastodon gGmbH non-profit', + 'domain': 'mastodon.social', + 'languages': list([ + 'en', + ]), + 'registrations': dict({ + 'approval_required': False, + 'enabled': False, + 'message': None, + }), + 'rules': list([ + dict({ + 'id': '1', + 'text': 'Sexually explicit or violent media must be marked as sensitive when posting', + }), + dict({ + 'id': '2', + 'text': 'No racism, sexism, homophobia, transphobia, xenophobia, or casteism', + }), + dict({ + 'id': '3', + 'text': 'No incitement of violence or promotion of violent ideologies', + }), + dict({ + 'id': '4', + 'text': 'No harassment, dogpiling or doxxing of other users', + }), + dict({ + 'id': '5', + 'text': 'No content illegal in Germany', + }), + dict({ + 'id': '7', + 'text': 'Do not share intentionally false or misleading information', + }), + ]), + 'source_url': 'https://github.com/mastodon/mastodon', + 'thumbnail': dict({ + 'blurhash': 'UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$', + 'url': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', + 'versions': dict({ + '@1x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', + '@2x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png', + }), + }), + 'title': 'Mastodon', + 'usage': dict({ + 'users': dict({ + 'active_month': 123122, + }), + }), + 'version': '4.0.0rc1', + }), + }) +# --- diff --git a/tests/components/mastodon/snapshots/test_init.ambr b/tests/components/mastodon/snapshots/test_init.ambr new file mode 100644 index 00000000000..37fa765acea --- /dev/null +++ b/tests/components/mastodon/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'mastodon', + 'trwnh_mastodon_social', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Mastodon gGmbH', + 'model': '@trwnh@mastodon.social', + 'model_id': None, + 'name': 'Mastodon @trwnh@mastodon.social', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.0.0rc1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/mastodon/snapshots/test_sensor.ambr b/tests/components/mastodon/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c8df8cdab19 --- /dev/null +++ b/tests/components/mastodon/snapshots/test_sensor.ambr @@ -0,0 +1,151 @@ +# serializer version: 1 +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_followers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_followers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Followers', + 'platform': 'mastodon', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'followers', + 'unique_id': 'trwnh_mastodon_social_followers', + 'unit_of_measurement': 'accounts', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_followers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mastodon @trwnh@mastodon.social Followers', + 'state_class': , + 'unit_of_measurement': 'accounts', + }), + 'context': , + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_followers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '821', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_following', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Following', + 'platform': 'mastodon', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'following', + 'unique_id': 'trwnh_mastodon_social_following', + 'unit_of_measurement': 'accounts', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mastodon @trwnh@mastodon.social Following', + 'state_class': , + 'unit_of_measurement': 'accounts', + }), + 'context': , + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_following', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '178', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_posts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Posts', + 'platform': 'mastodon', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'posts', + 'unique_id': 'trwnh_mastodon_social_posts', + 'unit_of_measurement': 'posts', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mastodon @trwnh@mastodon.social Posts', + 'state_class': , + 'unit_of_measurement': 'posts', + }), + 'context': , + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_posts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33120', + }) +# --- diff --git a/tests/components/mastodon/test_config_flow.py b/tests/components/mastodon/test_config_flow.py new file mode 100644 index 00000000000..073a6534d7d --- /dev/null +++ b/tests/components/mastodon/test_config_flow.py @@ -0,0 +1,179 @@ +"""Tests for the Mastodon config flow.""" + +from unittest.mock import AsyncMock + +from mastodon.Mastodon import MastodonNetworkError, MastodonUnauthorizedError +import pytest + +from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "@trwnh@mastodon.social" + assert result["data"] == { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + } + assert result["result"].unique_id == "trwnh_mastodon_social" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MastodonNetworkError, "network_error"), + (MastodonUnauthorizedError, "unauthorized_error"), + (Exception, "unknown"), + ], +) +async def test_flow_errors( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test flow errors.""" + mock_mastodon_client.account_verify_credentials.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + + mock_mastodon_client.account_verify_credentials.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_flow( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test importing yaml config.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "import_client_id", + CONF_CLIENT_SECRET: "import_client_secret", + CONF_ACCESS_TOKEN: "import_access_token", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MastodonNetworkError, "network_error"), + (MastodonUnauthorizedError, "unauthorized_error"), + (Exception, "unknown"), + ], +) +async def test_import_flow_abort( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test importing yaml config abort.""" + mock_mastodon_client.account_verify_credentials.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "import_client_id", + CONF_CLIENT_SECRET: "import_client_secret", + CONF_ACCESS_TOKEN: "import_access_token", + }, + ) + assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/mastodon/test_diagnostics.py b/tests/components/mastodon/test_diagnostics.py new file mode 100644 index 00000000000..c2de15d1a51 --- /dev/null +++ b/tests/components/mastodon/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Test Mastodon diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/mastodon/test_init.py b/tests/components/mastodon/test_init.py new file mode 100644 index 00000000000..c3d0728fe08 --- /dev/null +++ b/tests/components/mastodon/test_init.py @@ -0,0 +1,82 @@ +"""Tests for the Mastodon integration.""" + +from unittest.mock import AsyncMock + +from mastodon.Mastodon import MastodonError +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.mastodon.config_flow import MastodonConfigFlow +from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot + + +async def test_initialization_failure( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test initialization failure.""" + mock_mastodon_client.instance.side_effect = MastodonError + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_migrate( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, +) -> None: + """Test migration.""" + # Setup the config entry + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + title="@trwnh@mastodon.social", + unique_id="client_id", + version=1, + minor_version=1, + ) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # Check migration was successful + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry.data == { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + } + assert config_entry.version == MastodonConfigFlow.VERSION + assert config_entry.minor_version == MastodonConfigFlow.MINOR_VERSION + assert config_entry.unique_id == "trwnh_mastodon_social" diff --git a/tests/components/mastodon/test_notify.py b/tests/components/mastodon/test_notify.py new file mode 100644 index 00000000000..ab2d7456baf --- /dev/null +++ b/tests/components/mastodon/test_notify.py @@ -0,0 +1,38 @@ +"""Tests for the Mastodon notify platform.""" + +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_notify( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test sending a message.""" + await setup_integration(hass, mock_config_entry) + + assert hass.services.has_service(NOTIFY_DOMAIN, "trwnh_mastodon_social") + + await hass.services.async_call( + NOTIFY_DOMAIN, + "trwnh_mastodon_social", + { + "message": "test toot", + }, + blocking=True, + return_response=False, + ) + + assert mock_mastodon_client.status_post.assert_called_once diff --git a/tests/components/mastodon/test_sensor.py b/tests/components/mastodon/test_sensor.py new file mode 100644 index 00000000000..343505260e2 --- /dev/null +++ b/tests/components/mastodon/test_sensor.py @@ -0,0 +1,27 @@ +"""Tests for the Mastodon sensors.""" + +from unittest.mock import AsyncMock, patch + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the sensor entities.""" + with patch("homeassistant.components.mastodon.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/matrix/conftest.py b/tests/components/matrix/conftest.py index bb5448a8a09..0b84aff5434 100644 --- a/tests/components/matrix/conftest.py +++ b/tests/components/matrix/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from pathlib import Path import re import tempfile @@ -24,7 +25,6 @@ from nio import ( ) from PIL import Image import pytest -from typing_extensions import Generator from homeassistant.components.matrix import ( CONF_COMMANDS, @@ -48,7 +48,7 @@ from homeassistant.const import ( CONF_USERNAME, CONF_VERIFY_SSL, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from homeassistant.setup import async_setup_component from tests.common import async_capture_events @@ -294,13 +294,13 @@ async def matrix_bot( @pytest.fixture -def matrix_events(hass: HomeAssistant): +def matrix_events(hass: HomeAssistant) -> list[Event]: """Track event calls.""" return async_capture_events(hass, MATRIX_DOMAIN) @pytest.fixture -def command_events(hass: HomeAssistant): +def command_events(hass: HomeAssistant) -> list[Event]: """Track event calls.""" return async_capture_events(hass, EVENT_MATRIX_COMMAND) diff --git a/tests/components/matrix/test_commands.py b/tests/components/matrix/test_commands.py index 8539252ad66..dabee74fdc3 100644 --- a/tests/components/matrix/test_commands.py +++ b/tests/components/matrix/test_commands.py @@ -1,11 +1,11 @@ """Test MatrixBot's ability to parse and respond to commands in matrix rooms.""" +from dataclasses import dataclass from functools import partial from itertools import chain from typing import Any from nio import MatrixRoom, RoomMessageText -from pydantic.dataclasses import dataclass import pytest from homeassistant.components.matrix import MatrixBot, RoomID diff --git a/tests/components/matrix/test_send_message.py b/tests/components/matrix/test_send_message.py index cdea2270cf9..3db2877e789 100644 --- a/tests/components/matrix/test_send_message.py +++ b/tests/components/matrix/test_send_message.py @@ -10,7 +10,7 @@ from homeassistant.components.matrix import ( ) from homeassistant.components.matrix.const import FORMAT_HTML, SERVICE_SEND_MESSAGE from homeassistant.components.notify import ATTR_DATA, ATTR_MESSAGE, ATTR_TARGET -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from .conftest import TEST_BAD_ROOM, TEST_JOINABLE_ROOMS @@ -19,7 +19,7 @@ async def test_send_message( hass: HomeAssistant, matrix_bot: MatrixBot, image_path, - matrix_events, + matrix_events: list[Event], caplog: pytest.LogCaptureFixture, ) -> None: """Test the send_message service.""" @@ -63,7 +63,7 @@ async def test_send_message( async def test_unsendable_message( hass: HomeAssistant, matrix_bot: MatrixBot, - matrix_events, + matrix_events: list[Event], caplog: pytest.LogCaptureFixture, ) -> None: """Test the send_message service with an invalid room.""" diff --git a/tests/components/matter/common.py b/tests/components/matter/common.py index 7878ac564fd..541f7383f1d 100644 --- a/tests/components/matter/common.py +++ b/tests/components/matter/common.py @@ -31,9 +31,12 @@ async def setup_integration_with_node_fixture( hass: HomeAssistant, node_fixture: str, client: MagicMock, + override_attributes: dict[str, Any] | None = None, ) -> MatterNode: """Set up Matter integration with fixture as node.""" node_data = load_and_parse_node_fixture(node_fixture) + if override_attributes: + node_data["attributes"].update(override_attributes) node = MatterNode( dataclass_from_dict( MatterNodeData, diff --git a/tests/components/matter/conftest.py b/tests/components/matter/conftest.py index 05fd776e57a..f3d8740a73b 100644 --- a/tests/components/matter/conftest.py +++ b/tests/components/matter/conftest.py @@ -3,13 +3,13 @@ from __future__ import annotations import asyncio +from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, MagicMock, patch from matter_server.client.models.node import MatterNode from matter_server.common.const import SCHEMA_VERSION from matter_server.common.models import ServerInfoMessage import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.core import HomeAssistant @@ -51,6 +51,7 @@ async def matter_client_fixture() -> AsyncGenerator[MagicMock]: wifi_credentials_set=True, thread_credentials_set=True, min_supported_schema_version=SCHEMA_VERSION, + bluetooth_enabled=False, ) yield client diff --git a/tests/components/matter/fixtures/config_entry_diagnostics.json b/tests/components/matter/fixtures/config_entry_diagnostics.json index f591709fbda..000b0d4e2e6 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics.json @@ -6,7 +6,8 @@ "sdk_version": "2022.12.0", "wifi_credentials_set": true, "thread_credentials_set": false, - "min_supported_schema_version": 1 + "min_supported_schema_version": 1, + "bluetooth_enabled": false }, "nodes": [ { diff --git a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json index 503fd3b9a7a..95447783bbc 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json @@ -7,7 +7,8 @@ "sdk_version": "2022.12.0", "wifi_credentials_set": true, "thread_credentials_set": false, - "min_supported_schema_version": 1 + "min_supported_schema_version": 1, + "bluetooth_enabled": false }, "nodes": [ { diff --git a/tests/components/matter/fixtures/nodes/dimmable-light.json b/tests/components/matter/fixtures/nodes/dimmable-light.json index 74f132a88a9..58c22f1b807 100644 --- a/tests/components/matter/fixtures/nodes/dimmable-light.json +++ b/tests/components/matter/fixtures/nodes/dimmable-light.json @@ -78,7 +78,7 @@ ], "0/42/0": [], "0/42/1": true, - "0/42/2": 0, + "0/42/2": 1, "0/42/3": 0, "0/42/65532": 0, "0/42/65533": 1, @@ -365,7 +365,148 @@ "1/29/65533": 1, "1/29/65528": [], "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533] + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "6/80/0": "LED Color", + "6/80/1": 0, + "6/80/2": [ + { + "0": "Red", + "1": 0, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Orange", + "1": 1, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Lemon", + "1": 2, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Lime", + "1": 3, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Green", + "1": 4, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Teal", + "1": 5, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Cyan", + "1": 6, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Aqua", + "1": 7, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Blue", + "1": 8, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Violet", + "1": 9, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Magenta", + "1": 10, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Pink", + "1": 11, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "White", + "1": 12, + "2": [ + { + "0": 0, + "1": 0 + } + ] + } + ], + "6/80/3": 7, + "6/80/65532": 0, + "6/80/65533": 1, + "6/80/65528": [], + "6/80/65529": [0], + "6/80/65530": [], + "6/80/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533] }, "available": true, "attribute_subscriptions": [] diff --git a/tests/components/matter/fixtures/nodes/door-lock.json b/tests/components/matter/fixtures/nodes/door-lock.json index 8a3f0fd68dd..b6231e04af4 100644 --- a/tests/components/matter/fixtures/nodes/door-lock.json +++ b/tests/components/matter/fixtures/nodes/door-lock.json @@ -469,7 +469,7 @@ "1/47/65531": [ 0, 1, 2, 14, 15, 16, 19, 65528, 65529, 65530, 65531, 65532, 65533 ], - "1/257/0": 1, + "1/257/0": 0, "1/257/1": 0, "1/257/2": true, "1/257/3": 1, diff --git a/tests/components/matter/fixtures/nodes/fan.json b/tests/components/matter/fixtures/nodes/fan.json new file mode 100644 index 00000000000..e33c29ce66d --- /dev/null +++ b/tests/components/matter/fixtures/nodes/fan.json @@ -0,0 +1,340 @@ +{ + "node_id": 29, + "date_commissioned": "2024-07-25T08:34:23.014310", + "last_interview": "2024-07-25T08:34:23.014315", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 18, + "1": 1 + }, + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 42, 48, 49, 51, 53, 60, 62, 63, 64], + "0/29/2": [41], + "0/29/3": [1, 2, 3, 4, 5, 6], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65530": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 5 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65530": [0, 1], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "Mock", + "0/40/2": 4961, + "0/40/3": "Fan", + "0/40/4": 2, + "0/40/5": "Mocked Fan Switch", + "0/40/6": "**REDACTED**", + "0/40/7": 1, + "0/40/8": "1.0", + "0/40/9": 4, + "0/40/10": "0.0.1", + "0/40/11": "", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "", + "0/40/16": false, + "0/40/17": true, + "0/40/18": "", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/65532": 0, + "0/40/65533": 2, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65530": [0], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/42/0": [], + "0/42/1": true, + "0/42/2": 1, + "0/42/3": null, + "0/42/65532": 0, + "0/42/65533": 1, + "0/42/65528": [], + "0/42/65529": [0], + "0/42/65530": [0, 1, 2], + "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65530": [], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "J/YquJb4Ao4=", + "1": true + } + ], + "0/49/2": 10, + "0/49/3": 20, + "0/49/4": true, + "0/49/5": 0, + "0/49/6": "J/YquJb4Ao4=", + "0/49/7": null, + "0/49/65532": 2, + "0/49/65533": 1, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 3, 4, 6, 8], + "0/49/65530": [], + "0/49/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/51/0": [], + "0/51/1": 15, + "0/51/2": 5688, + "0/51/3": 1, + "0/51/4": 0, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 1, + "0/51/65528": [], + "0/51/65529": [0], + "0/51/65530": [3], + "0/51/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/53/0": 25, + "0/53/1": 5, + "0/53/2": "ha-thread", + "0/53/3": 12768, + "0/53/4": 5924944741529093989, + "0/53/5": "", + "0/53/6": 0, + "0/53/7": [], + "0/53/8": [], + "0/53/9": 933034070, + "0/53/10": 68, + "0/53/11": 16, + "0/53/12": 151, + "0/53/13": 31, + "0/53/14": 1, + "0/53/15": 0, + "0/53/16": 1, + "0/53/17": 0, + "0/53/18": 0, + "0/53/19": 1, + "0/53/20": 0, + "0/53/21": 0, + "0/53/22": 3533, + "0/53/23": 3105, + "0/53/24": 428, + "0/53/25": 1889, + "0/53/26": 1879, + "0/53/27": 1644, + "0/53/28": 2317, + "0/53/29": 0, + "0/53/30": 1216, + "0/53/31": 0, + "0/53/32": 0, + "0/53/33": 534, + "0/53/34": 10, + "0/53/35": 0, + "0/53/36": 42, + "0/53/37": 0, + "0/53/38": 0, + "0/53/39": 18130, + "0/53/40": 12178, + "0/53/41": 5863, + "0/53/42": 5103, + "0/53/43": 0, + "0/53/44": 11639, + "0/53/45": 1216, + "0/53/46": 0, + "0/53/47": 0, + "0/53/48": 0, + "0/53/49": 14, + "0/53/50": 0, + "0/53/51": 89, + "0/53/52": 0, + "0/53/53": 69, + "0/53/54": 0, + "0/53/55": 0, + "0/53/56": 131072, + "0/53/57": 0, + "0/53/58": 0, + "0/53/59": { + "0": 672, + "1": 8335 + }, + "0/53/60": "AB//4A==", + "0/53/61": { + "0": true, + "1": false, + "2": true, + "3": true, + "4": true, + "5": true, + "6": false, + "7": true, + "8": true, + "9": true, + "10": true, + "11": true + }, + "0/53/62": [0, 0, 0, 0], + "0/53/65532": 15, + "0/53/65533": 1, + "0/53/65528": [], + "0/53/65529": [0], + "0/53/65530": [], + "0/53/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65530": [], + "0/60/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 5, + "0/62/3": 4, + "0/62/4": [], + "0/62/5": 5, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65530": [], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65530": [], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "0/64/0": [ + { + "0": "Vendor", + "1": "Mocked" + }, + { + "0": "Product", + "1": "Fan" + } + ], + "0/64/65532": 0, + "0/64/65533": 1, + "0/64/65528": [], + "0/64/65529": [], + "0/64/65530": [], + "0/64/65531": [0, 65528, 65529, 65530, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 2, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65530": [], + "1/3/65531": [0, 1, 65528, 65529, 65530, 65531, 65532, 65533], + "1/4/0": 128, + "1/4/65532": 1, + "1/4/65533": 4, + "1/4/65528": [0, 1, 2, 3], + "1/4/65529": [0, 1, 2, 3, 4, 5], + "1/4/65530": [], + "1/4/65531": [0, 65528, 65529, 65530, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 43, + "1": 1 + } + ], + "1/29/1": [3, 4, 6, 8, 29, 64, 80, 514, 305134641], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65530": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "1/64/0": [ + { + "0": "DeviceType", + "1": "Fan" + } + ], + "1/64/65532": 0, + "1/64/65533": 1, + "1/64/65528": [], + "1/64/65529": [], + "1/64/65530": [], + "1/64/65531": [0, 65528, 65529, 65530, 65531, 65532, 65533], + + "1/514/0": 8, + "1/514/1": 2, + "1/514/2": 0, + "1/514/3": 0, + "1/514/4": 3, + "1/514/5": 0, + "1/514/6": 0, + "1/514/9": 3, + "1/514/10": 0, + "1/514/65532": 25, + "1/514/65533": 4, + "1/514/65528": [], + "1/514/65529": [0], + "1/514/65530": [], + "1/514/65531": [ + 0, 1, 2, 3, 4, 5, 6, 9, 10, 65528, 65529, 65530, 65531, 65532, 65533 + ] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/fixtures/nodes/generic-switch-multi.json b/tests/components/matter/fixtures/nodes/generic-switch-multi.json index f564e91a1ce..8923198c31e 100644 --- a/tests/components/matter/fixtures/nodes/generic-switch-multi.json +++ b/tests/components/matter/fixtures/nodes/generic-switch-multi.json @@ -72,8 +72,9 @@ "1/59/0": 2, "1/59/65533": 1, "1/59/1": 0, + "1/59/2": 2, "1/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/59/65532": 14, + "1/59/65532": 30, "1/59/65528": [], "1/64/0": [ { @@ -101,8 +102,9 @@ "2/59/0": 2, "2/59/65533": 1, "2/59/1": 0, + "2/59/2": 2, "2/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "2/59/65532": 14, + "2/59/65532": 30, "2/59/65528": [], "2/64/0": [ { diff --git a/tests/components/matter/fixtures/nodes/generic-switch.json b/tests/components/matter/fixtures/nodes/generic-switch.json index 80773915748..9b334c5fb54 100644 --- a/tests/components/matter/fixtures/nodes/generic-switch.json +++ b/tests/components/matter/fixtures/nodes/generic-switch.json @@ -73,7 +73,7 @@ "1/59/65533": 1, "1/59/1": 0, "1/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/59/65532": 30, + "1/59/65532": 14, "1/59/65528": [] }, "available": true, diff --git a/tests/components/matter/fixtures/nodes/microwave-oven.json b/tests/components/matter/fixtures/nodes/microwave-oven.json new file mode 100644 index 00000000000..ed0a4accd6a --- /dev/null +++ b/tests/components/matter/fixtures/nodes/microwave-oven.json @@ -0,0 +1,405 @@ +{ + "node_id": 157, + "date_commissioned": "2024-07-04T12:31:22.759270", + "last_interview": "2024-07-04T12:31:22.759275", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 44, 48, 49, 51, 54, 60, 62, 63], + "0/29/2": [], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "Mock", + "0/40/2": 65521, + "0/40/3": "Microwave Oven", + "0/40/4": 32769, + "0/40/5": "", + "0/40/6": "**REDACTED**", + "0/40/7": 0, + "0/40/8": "TEST_VERSION", + "0/40/9": 1, + "0/40/10": "1.0", + "0/40/11": "20200101", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "TEST_SN", + "0/40/16": false, + "0/40/18": "D5908CF5E1382F42", + "0/40/19": { + "0": 3, + "1": 65535 + }, + "0/40/20": null, + "0/40/21": 16973824, + "0/40/22": 1, + "0/40/65532": 0, + "0/40/65533": 3, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 20, 21, + 22, 65528, 65529, 65531, 65532, 65533 + ], + "0/44/0": 0, + "0/44/65532": 0, + "0/44/65533": 1, + "0/44/65528": [], + "0/44/65529": [], + "0/44/65531": [0, 65528, 65529, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 2, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "ZW5kMA==", + "1": true + } + ], + "0/49/2": 0, + "0/49/3": 0, + "0/49/4": true, + "0/49/5": null, + "0/49/6": null, + "0/49/7": null, + "0/49/65532": 4, + "0/49/65533": 2, + "0/49/65528": [], + "0/49/65529": [], + "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], + "0/51/0": [ + { + "0": "vethd3cc78a", + "1": true, + "2": null, + "3": null, + "4": "RiMoOM7I", + "5": [], + "6": ["/oAAAAAAAABEIyj//jjOyA=="], + "7": 0 + }, + { + "0": "veth86f4b74", + "1": true, + "2": null, + "3": null, + "4": "ehLA7XI6", + "5": [], + "6": ["/oAAAAAAAAB4EsD//u1yOg=="], + "7": 0 + }, + { + "0": "veth36c1460", + "1": true, + "2": null, + "3": null, + "4": "0sdiwOO7", + "5": [], + "6": ["/oAAAAAAAADQx2L//sDjuw=="], + "7": 0 + }, + { + "0": "veth55a0982", + "1": true, + "2": null, + "3": null, + "4": "fuu5VpgB", + "5": [], + "6": ["/oAAAAAAAAB867n//laYAQ=="], + "7": 0 + }, + { + "0": "vethd446fa5", + "1": true, + "2": null, + "3": null, + "4": "QsY5wCp1", + "5": [], + "6": ["/oAAAAAAAABAxjn//sAqdQ=="], + "7": 0 + }, + { + "0": "vethfc6e4d6", + "1": true, + "2": null, + "3": null, + "4": "IsHWia4E", + "5": [], + "6": ["/oAAAAAAAAAgwdb//omuBA=="], + "7": 0 + }, + { + "0": "veth4b35142", + "1": true, + "2": null, + "3": null, + "4": "RizM/XJz", + "5": [], + "6": ["/oAAAAAAAABELMz//v1ycw=="], + "7": 0 + }, + { + "0": "vetha0a808d", + "1": true, + "2": null, + "3": null, + "4": "JrxkpiTq", + "5": [], + "6": ["/oAAAAAAAAAkvGT//qYk6g=="], + "7": 0 + }, + { + "0": "hassio", + "1": true, + "2": null, + "3": null, + "4": "AkL+6fKF", + "5": ["rB4gAQ=="], + "6": ["/oAAAAAAAAAAQv7//unyhQ=="], + "7": 0 + }, + { + "0": "docker0", + "1": true, + "2": null, + "3": null, + "4": "AkKzcIpP", + "5": ["rB7oAQ=="], + "6": ["/oAAAAAAAAAAQrP//nCKTw=="], + "7": 0 + }, + { + "0": "end0", + "1": true, + "2": null, + "3": null, + "4": "5F8BoroJ", + "5": ["wKgBAg=="], + "6": [ + "KgKkZACnAAHGF8Tinim+lQ==", + "/XH1Cm7wY08fhLPRgO32Uw==", + "/oAAAAAAAAAENYnD2gV25w==" + ], + "7": 2 + }, + { + "0": "lo", + "1": true, + "2": null, + "3": null, + "4": "AAAAAAAA", + "5": ["fwAAAQ=="], + "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], + "7": 0 + } + ], + "0/51/1": 1, + "0/51/2": 16, + "0/51/3": 0, + "0/51/4": 0, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 2, + "0/51/65528": [2], + "0/51/65529": [0, 1], + "0/51/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 + ], + "0/54/0": null, + "0/54/1": null, + "0/54/2": null, + "0/54/3": null, + "0/54/4": null, + "0/54/5": null, + "0/54/6": null, + "0/54/7": null, + "0/54/8": null, + "0/54/9": null, + "0/54/10": null, + "0/54/11": null, + "0/54/12": null, + "0/54/65532": 3, + "0/54/65533": 1, + "0/54/65528": [], + "0/54/65529": [0], + "0/54/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 65528, 65529, 65531, 65532, + 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [ + { + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRnRgkBwEkCAEwCUEEleMInA+X+lZO6bSa7ysHaAvYS13Fg9GoRuhiFk+wvtjLUrouyH+DUp3p3purrVdfUWTp03damVsxp9Lv48goDzcKNQEoARgkAgE2AwQCBAEYMAQUrD2d44zyVXjKbyYgNaEibaXFI7IwBRTphWiJ/NqGe3Cx3Nj8H02NgGioSRgwC0CaASOOwmsHE8cNw7FhQDtRhh0ztvwdfZKANU93vrX/+ww8UifrTjUIgvobgixpCGxmGvEmk3RN7TX6lgX4Qz7MGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEYztrLK2UY1ORHUEFLO7PDfVjw/MnMDNX5kjdHHDU7npeITnSyg/kxxUM+pD7ccxfDuHQKHbBq9+qbJi8oGik8DcKNQEpARgkAmAwBBTphWiJ/NqGe3Cx3Nj8H02NgGioSTAFFMnf5ZkBCRaBluhSmLJkvcVXxHxTGDALQOOcZAL8XEktvE5sjrUmFNhkP2g3Ef+4BHtogItdZYyA9E/WbzW25E0UxZInwjjIzH3YimDUZVoEWGML8NV2kCEY", + "254": 1 + } + ], + "0/62/1": [ + { + "1": "BAg5aeR7RuFKZhukCxMGglCd00dKlhxGq8BbjeyZClKz5kN2Ytzav0xWsiWEEb3s9uvMIYFoQYULnSJvOMTcD14=", + "2": 65521, + "3": 1, + "4": 157, + "5": "", + "254": 1 + } + ], + "0/62/2": 16, + "0/62/3": 1, + "0/62/4": [ + "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEECDlp5HtG4UpmG6QLEwaCUJ3TR0qWHEarwFuN7JkKUrPmQ3Zi3Nq/TFayJYQRvez268whgWhBhQudIm84xNwPXjcKNQEpARgkAmAwBBTJ3+WZAQkWgZboUpiyZL3FV8R8UzAFFMnf5ZkBCRaBluhSmLJkvcVXxHxTGDALQO9QSAdvJkM6b/wIc07MCw1ma46lTyGYG8nvpn0ICI73nuD3QeaWwGIQTkVGEpzF+TuDK7gtTz7YUrR+PSnvMk8Y" + ], + "0/62/5": 1, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 0, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 121, + "1": 1 + } + ], + "1/29/1": [3, 29, 94, 95, 96], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/94/0": [ + { + "0": "Normal", + "1": 0, + "2": [ + { + "1": 16384 + } + ] + }, + { + "0": "Defrost", + "1": 1, + "2": [ + { + "1": 16385 + } + ] + } + ], + "1/94/1": 0, + "1/94/65532": 0, + "1/94/65533": 1, + "1/94/65528": [], + "1/94/65529": [], + "1/94/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/95/0": 30, + "1/95/1": 86400, + "1/95/2": 90, + "1/95/3": 20, + "1/95/4": 90, + "1/95/5": 10, + "1/95/8": 1000, + "1/95/65532": 5, + "1/95/65533": 1, + "1/95/65528": [], + "1/95/65529": [0, 1], + "1/95/65531": [0, 1, 2, 3, 4, 5, 8, 65528, 65529, 65531, 65532, 65533], + "1/96/0": null, + "1/96/1": null, + "1/96/2": 30, + "1/96/3": [ + { + "0": 0 + }, + { + "0": 1 + }, + { + "0": 2 + }, + { + "0": 3 + } + ], + "1/96/4": 0, + "1/96/5": { + "0": 0 + }, + "1/96/65532": 0, + "1/96/65533": 2, + "1/96/65528": [4], + "1/96/65529": [0, 1, 2, 3], + "1/96/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/test_binary_sensor.py b/tests/components/matter/test_binary_sensor.py index becedc0af62..f419a12c59f 100644 --- a/tests/components/matter/test_binary_sensor.py +++ b/tests/components/matter/test_binary_sensor.py @@ -1,10 +1,10 @@ """Test Matter binary sensors.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from matter_server.client.models.node import MatterNode import pytest -from typing_extensions import Generator from homeassistant.components.matter.binary_sensor import ( DISCOVERY_SCHEMAS as BINARY_SENSOR_SCHEMAS, diff --git a/tests/components/matter/test_climate.py b/tests/components/matter/test_climate.py index 6a4cf34a640..4d6978edfde 100644 --- a/tests/components/matter/test_climate.py +++ b/tests/components/matter/test_climate.py @@ -315,14 +315,19 @@ async def test_room_airconditioner( state = hass.states.get("climate.room_airconditioner_thermostat") assert state assert state.attributes["current_temperature"] == 20 - assert state.attributes["min_temp"] == 16 - assert state.attributes["max_temp"] == 32 + # room airconditioner has mains power on OnOff cluster with value set to False + assert state.state == HVACMode.OFF # test supported features correctly parsed # WITHOUT temperature_range support mask = ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.TURN_OFF assert state.attributes["supported_features"] & mask == mask + # set mains power to ON (OnOff cluster) + set_node_attribute(room_airconditioner, 1, 6, 0, True) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("climate.room_airconditioner_thermostat") + # test supported HVAC modes include fan and dry modes assert state.attributes["hvac_modes"] == [ HVACMode.OFF, @@ -345,3 +350,9 @@ async def test_room_airconditioner( state = hass.states.get("climate.room_airconditioner_thermostat") assert state assert state.state == HVACMode.DRY + + # test featuremap update + set_node_attribute(room_airconditioner, 1, 513, 65532, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("climate.room_airconditioner_thermostat") + assert state.attributes["supported_features"] & ClimateEntityFeature.TURN_ON diff --git a/tests/components/matter/test_config_flow.py b/tests/components/matter/test_config_flow.py index 562cf4bb86a..642bfe0f804 100644 --- a/tests/components/matter/test_config_flow.py +++ b/tests/components/matter/test_config_flow.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator from ipaddress import ip_address from typing import Any from unittest.mock import DEFAULT, AsyncMock, MagicMock, call, patch from matter_server.client.exceptions import CannotConnect, InvalidServerVersion import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.hassio import HassioAPIError, HassioServiceInfo diff --git a/tests/components/matter/test_event.py b/tests/components/matter/test_event.py index a7bd7c91f7b..183867642f5 100644 --- a/tests/components/matter/test_event.py +++ b/tests/components/matter/test_event.py @@ -50,8 +50,6 @@ async def test_generic_switch_node( "short_release", "long_press", "long_release", - "multi_press_ongoing", - "multi_press_complete", ] # trigger firing a new event from the device await trigger_subscription_callback( @@ -72,26 +70,6 @@ async def test_generic_switch_node( ) state = hass.states.get("event.mock_generic_switch_button") assert state.attributes[ATTR_EVENT_TYPE] == "initial_press" - # trigger firing a multi press event - await trigger_subscription_callback( - hass, - matter_client, - EventType.NODE_EVENT, - MatterNodeEvent( - node_id=generic_switch_node.node_id, - endpoint_id=1, - cluster_id=59, - event_id=5, - event_number=0, - priority=1, - timestamp=0, - timestamp_type=0, - data={"NewPosition": 3}, - ), - ) - state = hass.states.get("event.mock_generic_switch_button") - assert state.attributes[ATTR_EVENT_TYPE] == "multi_press_ongoing" - assert state.attributes["NewPosition"] == 3 # This tests needs to be adjusted to remove lingering tasks @@ -109,8 +87,8 @@ async def test_generic_switch_multi_node( assert state_button_1.name == "Mock Generic Switch Button (1)" # check event_types from featuremap 14 assert state_button_1.attributes[ATTR_EVENT_TYPES] == [ - "initial_press", - "short_release", + "multi_press_1", + "multi_press_2", "long_press", "long_release", ] @@ -120,3 +98,23 @@ async def test_generic_switch_multi_node( assert state_button_1.state == "unknown" # name should be 'DeviceName Fancy Button' due to the label set to 'Fancy Button' assert state_button_1.name == "Mock Generic Switch Fancy Button" + + # trigger firing a multi press event + await trigger_subscription_callback( + hass, + matter_client, + EventType.NODE_EVENT, + MatterNodeEvent( + node_id=generic_switch_multi_node.node_id, + endpoint_id=1, + cluster_id=59, + event_id=6, + event_number=0, + priority=1, + timestamp=0, + timestamp_type=0, + data={"totalNumberOfPressesCounted": 2}, + ), + ) + state = hass.states.get("event.mock_generic_switch_button_1") + assert state.attributes[ATTR_EVENT_TYPE] == "multi_press_2" diff --git a/tests/components/matter/test_fan.py b/tests/components/matter/test_fan.py index 30bd7f4a009..690209b1165 100644 --- a/tests/components/matter/test_fan.py +++ b/tests/components/matter/test_fan.py @@ -1,5 +1,6 @@ """Test Matter Fan platform.""" +from typing import Any from unittest.mock import MagicMock, call from matter_server.client.models.node import MatterNode @@ -27,6 +28,14 @@ from .common import ( ) +@pytest.fixture(name="fan_node") +async def simple_fan_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a Fan node.""" + return await setup_integration_with_node_fixture(hass, "fan", matter_client) + + @pytest.fixture(name="air_purifier") async def air_purifier_fixture( hass: HomeAssistant, matter_client: MagicMock @@ -92,8 +101,20 @@ async def test_fan_base( await trigger_subscription_callback(hass, matter_client) state = hass.states.get(entity_id) assert state.attributes["preset_mode"] == "sleep_wind" + # set mains power to OFF (OnOff cluster) + set_node_attribute(air_purifier, 1, 6, 0, False) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state.attributes["preset_mode"] is None + assert state.attributes["percentage"] == 0 + # test featuremap update + set_node_attribute(air_purifier, 1, 514, 65532, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state.attributes["supported_features"] & FanEntityFeature.SET_SPEED +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_fan_turn_on_with_percentage( hass: HomeAssistant, matter_client: MagicMock, @@ -113,15 +134,31 @@ async def test_fan_turn_on_with_percentage( attribute_path="1/514/2", value=50, ) + # test again where preset_mode is omitted in the service call + # which should select the last active percentage + matter_client.write_attribute.reset_mock() + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert matter_client.write_attribute.call_count == 1 + assert matter_client.write_attribute.call_args == call( + node_id=air_purifier.node_id, + attribute_path="1/514/2", + value=255, + ) +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_fan_turn_on_with_preset_mode( hass: HomeAssistant, matter_client: MagicMock, - air_purifier: MatterNode, + fan_node: MatterNode, ) -> None: """Test turning on the fan with a specific preset mode.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.mocked_fan_switch_fan" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, @@ -130,7 +167,7 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=fan_node.node_id, attribute_path="1/514/0", value=2, ) @@ -145,28 +182,13 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=fan_node.node_id, attribute_path="1/514/10", value=value, ) - # test again where preset_mode is omitted in the service call - # which should select a default preset mode - matter_client.write_attribute.reset_mock() - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - assert matter_client.write_attribute.call_count == 1 - assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, - attribute_path="1/514/0", - value=5, - ) # test again if wind mode is explicitly turned off when we set a new preset mode matter_client.write_attribute.reset_mock() - set_node_attribute(air_purifier, 1, 514, 10, 2) + set_node_attribute(fan_node, 1, 514, 10, 2) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( FAN_DOMAIN, @@ -176,15 +198,33 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=air_purifier.node_id, + node_id=fan_node.node_id, attribute_path="1/514/10", value=0, ) assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=fan_node.node_id, attribute_path="1/514/0", value=2, ) + # test again where preset_mode is omitted in the service call + # which should select the last active preset + matter_client.write_attribute.reset_mock() + set_node_attribute(fan_node, 1, 514, 0, 1) + set_node_attribute(fan_node, 1, 514, 10, 0) + await trigger_subscription_callback(hass, matter_client) + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert matter_client.write_attribute.call_count == 1 + assert matter_client.write_attribute.call_args == call( + node_id=fan_node.node_id, + attribute_path="1/514/0", + value=1, + ) async def test_fan_turn_off( @@ -273,3 +313,133 @@ async def test_fan_set_direction( value=value, ) matter_client.write_attribute.reset_mock() + + +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize( + ("fixture", "entity_id", "attributes", "features"), + [ + ( + "fan", + "fan.mocked_fan_switch_fan", + { + "1/514/65532": 0, + }, + (FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF), + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + { + "1/514/65532": 1, + }, + ( + FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + | FanEntityFeature.SET_SPEED + ), + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + { + "1/514/65532": 4, + }, + ( + FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + | FanEntityFeature.OSCILLATE + ), + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + { + "1/514/65532": 36, + }, + ( + FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + | FanEntityFeature.OSCILLATE + | FanEntityFeature.DIRECTION + ), + ), + ], +) +async def test_fan_supported_features( + hass: HomeAssistant, + matter_client: MagicMock, + fixture: str, + entity_id: str, + attributes: dict[str, Any], + features: int, +) -> None: + """Test if the correct features get discovered from featuremap.""" + await setup_integration_with_node_fixture(hass, fixture, matter_client, attributes) + state = hass.states.get(entity_id) + assert state + assert state.attributes["supported_features"] & features == features + + +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize( + ("fixture", "entity_id", "attributes", "preset_modes"), + [ + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 0, "1/514/65532": 0}, + [ + "low", + "medium", + "high", + ], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 1, "1/514/65532": 0}, + [ + "low", + "high", + ], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 2, "1/514/65532": 0}, + ["low", "medium", "high", "auto"], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 4, "1/514/65532": 0}, + ["high", "auto"], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 5, "1/514/65532": 0}, + ["high"], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 5, "1/514/65532": 8, "1/514/9": 3}, + ["high", "natural_wind", "sleep_wind"], + ), + ], +) +async def test_fan_features( + hass: HomeAssistant, + matter_client: MagicMock, + fixture: str, + entity_id: str, + attributes: dict[str, Any], + preset_modes: list[str], +) -> None: + """Test if the correct presets get discovered from fanmodesequence.""" + await setup_integration_with_node_fixture(hass, fixture, matter_client, attributes) + state = hass.states.get(entity_id) + assert state + assert state.attributes["preset_modes"] == preset_modes diff --git a/tests/components/matter/test_init.py b/tests/components/matter/test_init.py index d3712f24d12..cd5ef307cd3 100644 --- a/tests/components/matter/test_init.py +++ b/tests/components/matter/test_init.py @@ -3,15 +3,19 @@ from __future__ import annotations import asyncio +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, call, patch -from matter_server.client.exceptions import CannotConnect, InvalidServerVersion +from matter_server.client.exceptions import ( + CannotConnect, + ServerVersionTooNew, + ServerVersionTooOld, +) from matter_server.client.models.node import MatterNode from matter_server.common.errors import MatterError from matter_server.common.helpers.util import dataclass_from_dict from matter_server.common.models import MatterNodeData import pytest -from typing_extensions import Generator from homeassistant.components.hassio import HassioAPIError from homeassistant.components.matter.const import DOMAIN @@ -362,12 +366,30 @@ async def test_addon_info_failure( "backup_calls", "update_addon_side_effect", "create_backup_side_effect", + "connect_side_effect", ), [ - ("1.0.0", True, 1, 1, None, None), - ("1.0.0", False, 0, 0, None, None), - ("1.0.0", True, 1, 1, HassioAPIError("Boom"), None), - ("1.0.0", True, 0, 1, None, HassioAPIError("Boom")), + ("1.0.0", True, 1, 1, None, None, ServerVersionTooOld("Invalid version")), + ("1.0.0", True, 0, 0, None, None, ServerVersionTooNew("Invalid version")), + ("1.0.0", False, 0, 0, None, None, ServerVersionTooOld("Invalid version")), + ( + "1.0.0", + True, + 1, + 1, + HassioAPIError("Boom"), + None, + ServerVersionTooOld("Invalid version"), + ), + ( + "1.0.0", + True, + 0, + 1, + None, + HassioAPIError("Boom"), + ServerVersionTooOld("Invalid version"), + ), ], ) async def test_update_addon( @@ -386,13 +408,14 @@ async def test_update_addon( backup_calls: int, update_addon_side_effect: Exception | None, create_backup_side_effect: Exception | None, + connect_side_effect: Exception, ) -> None: """Test update the Matter add-on during entry setup.""" addon_info.return_value["version"] = addon_version addon_info.return_value["update_available"] = update_available create_backup.side_effect = create_backup_side_effect update_addon.side_effect = update_addon_side_effect - matter_client.connect.side_effect = InvalidServerVersion("Invalid version") + matter_client.connect.side_effect = connect_side_effect entry = MockConfigEntry( domain=DOMAIN, title="Matter", @@ -413,12 +436,32 @@ async def test_update_addon( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize( + ( + "connect_side_effect", + "issue_raised", + ), + [ + ( + ServerVersionTooOld("Invalid version"), + "server_version_version_too_old", + ), + ( + ServerVersionTooNew("Invalid version"), + "server_version_version_too_new", + ), + ], +) async def test_issue_registry_invalid_version( - hass: HomeAssistant, matter_client: MagicMock, issue_registry: ir.IssueRegistry + hass: HomeAssistant, + matter_client: MagicMock, + issue_registry: ir.IssueRegistry, + connect_side_effect: Exception, + issue_raised: str, ) -> None: """Test issue registry for invalid version.""" original_connect_side_effect = matter_client.connect.side_effect - matter_client.connect.side_effect = InvalidServerVersion("Invalid version") + matter_client.connect.side_effect = connect_side_effect entry = MockConfigEntry( domain=DOMAIN, title="Matter", @@ -434,7 +477,7 @@ async def test_issue_registry_invalid_version( entry_state = entry.state assert entry_state is ConfigEntryState.SETUP_RETRY - assert issue_registry.async_get_issue(DOMAIN, "invalid_server_version") + assert issue_registry.async_get_issue(DOMAIN, issue_raised) matter_client.connect.side_effect = original_connect_side_effect @@ -442,7 +485,7 @@ async def test_issue_registry_invalid_version( await hass.async_block_till_done() assert entry.state is ConfigEntryState.LOADED - assert not issue_registry.async_get_issue(DOMAIN, "invalid_server_version") + assert not issue_registry.async_get_issue(DOMAIN, issue_raised) @pytest.mark.parametrize( diff --git a/tests/components/matter/test_door_lock.py b/tests/components/matter/test_lock.py similarity index 87% rename from tests/components/matter/test_door_lock.py rename to tests/components/matter/test_lock.py index a0664612aba..f279430b393 100644 --- a/tests/components/matter/test_door_lock.py +++ b/tests/components/matter/test_lock.py @@ -8,13 +8,11 @@ import pytest from homeassistant.components.lock import ( STATE_LOCKED, - STATE_LOCKING, STATE_OPEN, STATE_UNLOCKED, - STATE_UNLOCKING, LockEntityFeature, ) -from homeassistant.const import ATTR_CODE, STATE_UNKNOWN +from homeassistant.const import ATTR_CODE, STATE_LOCKING, STATE_OPENING, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.entity_registry as er @@ -66,16 +64,17 @@ async def test_lock( ) matter_client.send_device_command.reset_mock() + await hass.async_block_till_done() state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_LOCKED + assert state.state == STATE_LOCKING set_node_attribute(door_lock, 1, 257, 0, 0) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_UNLOCKING + assert state.state == STATE_UNLOCKED set_node_attribute(door_lock, 1, 257, 0, 2) await trigger_subscription_callback(hass, matter_client) @@ -84,12 +83,12 @@ async def test_lock( assert state assert state.state == STATE_UNLOCKED - set_node_attribute(door_lock, 1, 257, 0, 0) + set_node_attribute(door_lock, 1, 257, 0, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_LOCKING + assert state.state == STATE_LOCKED set_node_attribute(door_lock, 1, 257, 0, None) await trigger_subscription_callback(hass, matter_client) @@ -98,6 +97,12 @@ async def test_lock( assert state assert state.state == STATE_UNKNOWN + # test featuremap update + set_node_attribute(door_lock, 1, 257, 65532, 4096) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("lock.mock_door_lock_lock") + assert state.attributes["supported_features"] & LockEntityFeature.OPEN + # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) @@ -210,7 +215,19 @@ async def test_lock_with_unbolt( timed_request_timeout_ms=1000, ) - set_node_attribute(door_lock_with_unbolt, 1, 257, 3, 0) + await hass.async_block_till_done() + state = hass.states.get("lock.mock_door_lock_lock") + assert state + assert state.state == STATE_OPENING + + set_node_attribute(door_lock_with_unbolt, 1, 257, 0, 0) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("lock.mock_door_lock_lock") + assert state + assert state.state == STATE_UNLOCKED + + set_node_attribute(door_lock_with_unbolt, 1, 257, 0, 3) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("lock.mock_door_lock_lock") diff --git a/tests/components/matter/test_select.py b/tests/components/matter/test_select.py new file mode 100644 index 00000000000..f84e5870392 --- /dev/null +++ b/tests/components/matter/test_select.py @@ -0,0 +1,99 @@ +"""Test Matter select entities.""" + +from unittest.mock import MagicMock, call + +from chip.clusters import Objects as clusters +from matter_server.client.models.node import MatterNode +import pytest + +from homeassistant.core import HomeAssistant + +from .common import ( + set_node_attribute, + setup_integration_with_node_fixture, + trigger_subscription_callback, +) + + +@pytest.fixture(name="light_node") +async def dimmable_light_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a dimmable light node.""" + return await setup_integration_with_node_fixture( + hass, "dimmable-light", matter_client + ) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_mode_select_entities( + hass: HomeAssistant, + matter_client: MagicMock, + light_node: MatterNode, +) -> None: + """Test select entities are created for the ModeSelect cluster attributes.""" + state = hass.states.get("select.mock_dimmable_light_led_color") + assert state + assert state.state == "Aqua" + assert state.attributes["options"] == [ + "Red", + "Orange", + "Lemon", + "Lime", + "Green", + "Teal", + "Cyan", + "Aqua", + "Blue", + "Violet", + "Magenta", + "Pink", + "White", + ] + # name should be derived from description attribute + assert state.attributes["friendly_name"] == "Mock Dimmable Light LED Color" + set_node_attribute(light_node, 6, 80, 3, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("select.mock_dimmable_light_led_color") + assert state.state == "Orange" + # test select option + await hass.services.async_call( + "select", + "select_option", + { + "entity_id": "select.mock_dimmable_light_led_color", + "option": "Lime", + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=light_node.node_id, + endpoint_id=6, + command=clusters.ModeSelect.Commands.ChangeToMode(newMode=3), + ) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_attribute_select_entities( + hass: HomeAssistant, + matter_client: MagicMock, + light_node: MatterNode, +) -> None: + """Test select entities are created for attribute based discovery schema(s).""" + entity_id = "select.mock_dimmable_light_power_on_behavior_on_startup" + state = hass.states.get(entity_id) + assert state + assert state.state == "Previous" + assert state.attributes["options"] == ["On", "Off", "Toggle", "Previous"] + assert ( + state.attributes["friendly_name"] + == "Mock Dimmable Light Power-on behavior on Startup" + ) + set_node_attribute(light_node, 1, 6, 16387, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state.state == "On" diff --git a/tests/components/matter/test_update.py b/tests/components/matter/test_update.py new file mode 100644 index 00000000000..73c69407bbc --- /dev/null +++ b/tests/components/matter/test_update.py @@ -0,0 +1,171 @@ +"""Test Matter number entities.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +from chip.clusters import Objects as clusters +from chip.clusters.ClusterObjects import ClusterAttributeDescriptor +from matter_server.client.models.node import MatterNode +from matter_server.common.models import MatterSoftwareVersion, UpdateSource +import pytest + +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .common import ( + set_node_attribute, + setup_integration_with_node_fixture, + trigger_subscription_callback, +) + + +def set_node_attribute_typed( + node: MatterNode, + endpoint: int, + attribute: ClusterAttributeDescriptor, + value: Any, +) -> None: + """Set a node attribute.""" + set_node_attribute( + node, endpoint, attribute.cluster_id, attribute.attribute_id, value + ) + + +@pytest.fixture(name="check_node_update") +async def check_node_update_fixture(matter_client: MagicMock) -> AsyncMock: + """Fixture for a flow sensor node.""" + matter_client.check_node_update = AsyncMock(return_value=None) + return matter_client.check_node_update + + +@pytest.fixture(name="updateable_node") +async def updateable_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a flow sensor node.""" + return await setup_integration_with_node_fixture( + hass, "dimmable-light", matter_client + ) + + +async def test_update_entity( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + updateable_node: MatterNode, +) -> None: + """Test update entity exists and update check got made.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + + assert matter_client.check_node_update.call_count == 1 + + +async def test_update_install( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + updateable_node: MatterNode, +) -> None: + """Test update entity exists and update check got made.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v1.0" + + await async_setup_component(hass, "homeassistant", {}) + + check_node_update.return_value = MatterSoftwareVersion( + vid=65521, + pid=32768, + software_version=2, + software_version_string="v2.0", + firmware_information="", + min_applicable_software_version=0, + max_applicable_software_version=1, + release_notes_url="http://home-assistant.io/non-existing-product", + update_source=UpdateSource.LOCAL, + ) + + await hass.services.async_call( + "homeassistant", + "update_entity", + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + }, + blocking=True, + ) + + assert matter_client.check_node_update.call_count == 2 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + assert ( + state.attributes.get("release_url") + == "http://home-assistant.io/non-existing-product" + ) + + await async_setup_component(hass, "update", {}) + + await hass.services.async_call( + "update", + "install", + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + }, + blocking=True, + ) + + set_node_attribute_typed( + updateable_node, + 0, + clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateState, + clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kDownloading, + ) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("in_progress") + + set_node_attribute_typed( + updateable_node, + 0, + clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateStateProgress, + 50, + ) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("in_progress") == 50 + + set_node_attribute_typed( + updateable_node, + 0, + clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateState, + clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kIdle, + ) + set_node_attribute_typed( + updateable_node, + 0, + clusters.BasicInformation.Attributes.SoftwareVersion, + 2, + ) + set_node_attribute_typed( + updateable_node, + 0, + clusters.BasicInformation.Attributes.SoftwareVersionString, + "v2.0", + ) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("update.mock_dimmable_light") + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v2.0" diff --git a/tests/components/mealie/conftest.py b/tests/components/mealie/conftest.py index dd6309cb524..ba42d16e56e 100644 --- a/tests/components/mealie/conftest.py +++ b/tests/components/mealie/conftest.py @@ -1,11 +1,20 @@ """Mealie tests configuration.""" +from collections.abc import Generator from unittest.mock import patch -from aiomealie import Mealplan, MealplanResponse +from aiomealie import ( + About, + Mealplan, + MealplanResponse, + Recipe, + ShoppingItemsResponse, + ShoppingListsResponse, + Statistics, + UserInfo, +) from mashumaro.codecs.orjson import ORJSONDecoder import pytest -from typing_extensions import Generator from homeassistant.components.mealie.const import DOMAIN from homeassistant.const import CONF_API_TOKEN, CONF_HOST @@ -13,6 +22,9 @@ from homeassistant.const import CONF_API_TOKEN, CONF_HOST from tests.common import MockConfigEntry, load_fixture from tests.components.smhi.common import AsyncMock +SHOPPING_LIST_ID = "list-id-1" +SHOPPING_ITEM_NOTE = "Shopping Item 1" + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -29,7 +41,7 @@ def mock_mealie_client() -> Generator[AsyncMock]: """Mock a Mealie client.""" with ( patch( - "homeassistant.components.mealie.coordinator.MealieClient", + "homeassistant.components.mealie.MealieClient", autospec=True, ) as mock_client, patch( @@ -44,6 +56,27 @@ def mock_mealie_client() -> Generator[AsyncMock]: client.get_mealplan_today.return_value = ORJSONDecoder(list[Mealplan]).decode( load_fixture("get_mealplan_today.json", DOMAIN) ) + client.get_user_info.return_value = UserInfo.from_json( + load_fixture("users_self.json", DOMAIN) + ) + client.get_about.return_value = About.from_json( + load_fixture("about.json", DOMAIN) + ) + recipe = Recipe.from_json(load_fixture("get_recipe.json", DOMAIN)) + client.get_recipe.return_value = recipe + client.import_recipe.return_value = recipe + client.get_shopping_lists.return_value = ShoppingListsResponse.from_json( + load_fixture("get_shopping_lists.json", DOMAIN) + ) + client.get_shopping_items.return_value = ShoppingItemsResponse.from_json( + load_fixture("get_shopping_items.json", DOMAIN) + ) + client.get_statistics.return_value = Statistics.from_json( + load_fixture("statistics.json", DOMAIN) + ) + mealplan = Mealplan.from_json(load_fixture("mealplan.json", DOMAIN)) + client.random_mealplan.return_value = mealplan + client.set_mealplan.return_value = mealplan yield client @@ -55,4 +88,5 @@ def mock_config_entry() -> MockConfigEntry: title="Mealie", data={CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, entry_id="01J0BC4QM2YBRP6H5G933CETT7", + unique_id="bf1c62fe-4941-4332-9886-e54e88dbdba0", ) diff --git a/tests/components/mealie/fixtures/about.json b/tests/components/mealie/fixtures/about.json new file mode 100644 index 00000000000..86f74ec66d6 --- /dev/null +++ b/tests/components/mealie/fixtures/about.json @@ -0,0 +1,3 @@ +{ + "version": "v1.10.2" +} diff --git a/tests/components/mealie/fixtures/get_mealplans.json b/tests/components/mealie/fixtures/get_mealplans.json index 2d63b753d99..9255f9b7396 100644 --- a/tests/components/mealie/fixtures/get_mealplans.json +++ b/tests/components/mealie/fixtures/get_mealplans.json @@ -605,6 +605,17 @@ "updateAt": "2024-01-02T06:35:05.209189", "lastMade": "2024-01-02T22:59:59" } + }, + { + "date": "2024-01-21", + "entryType": "dinner", + "title": "Aquavite", + "text": "Dineren met de boys", + "recipeId": null, + "id": 1, + "groupId": "3931df86-0679-4579-8c63-4bedc9ca9a85", + "userId": "6caa6e4d-521f-4ef4-9ed7-388bdd63f47d", + "recipe": null } ], "next": null, diff --git a/tests/components/mealie/fixtures/get_recipe.json b/tests/components/mealie/fixtures/get_recipe.json new file mode 100644 index 00000000000..a5ccd1876e5 --- /dev/null +++ b/tests/components/mealie/fixtures/get_recipe.json @@ -0,0 +1,266 @@ +{ + "id": "fada9582-709b-46aa-b384-d5952123ad93", + "userId": "bf1c62fe-4941-4332-9886-e54e88dbdba0", + "groupId": "24477569-f6af-4b53-9e3f-6d04b0ca6916", + "name": "Original Sacher-Torte (2)", + "slug": "original-sacher-torte-2", + "image": "SuPW", + "recipeYield": "4 servings", + "totalTime": "2 hours 30 minutes", + "prepTime": "1 hour 30 minutes", + "cookTime": null, + "performTime": "1 hour", + "description": "The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”", + "recipeCategory": [], + "tags": [ + { + "id": "1b5789b9-3af6-412e-8c77-8a01caa0aac9", + "name": "Sacher", + "slug": "sacher" + }, + { + "id": "1cf17f96-58b5-4bd3-b1e8-1606a64b413d", + "name": "Cake", + "slug": "cake" + }, + { + "id": "3f5f0a3d-728f-440d-a6c7-5a68612e8c67", + "name": "Torte", + "slug": "torte" + }, + { + "id": "525f388d-6ee0-4ebe-91fc-dd320a7583f0", + "name": "Sachertorte", + "slug": "sachertorte" + }, + { + "id": "544a6e08-a899-4f63-9c72-bb2924df70cb", + "name": "Sacher Torte Cake", + "slug": "sacher-torte-cake" + }, + { + "id": "576c0a82-84ee-4e50-a14e-aa7a675b6352", + "name": "Sacher Torte", + "slug": "sacher-torte" + }, + { + "id": "d530b8e4-275a-4093-804b-6d0de154c206", + "name": "Original Sachertorte", + "slug": "original-sachertorte" + } + ], + "tools": [], + "rating": null, + "orgURL": "https://www.sacher.com/en/original-sacher-torte/recipe/", + "dateAdded": "2024-06-29", + "dateUpdated": "2024-06-29T06:10:34.412665", + "createdAt": "2024-06-29T06:10:34.414927", + "updateAt": "2024-06-29T06:10:34.414928", + "lastMade": null, + "recipeIngredient": [ + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "130g dark couverture chocolate (min. 55% cocoa content)", + "isFood": true, + "disableAmount": false, + "display": "1 130g dark couverture chocolate (min. 55% cocoa content)", + "title": null, + "originalText": null, + "referenceId": "a3adfe78-d157-44d8-98be-9c133e45bb4e" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "1 Vanilla Pod", + "isFood": true, + "disableAmount": false, + "display": "1 1 Vanilla Pod", + "title": null, + "originalText": null, + "referenceId": "41d234d7-c040-48f9-91e6-f4636aebb77b" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "150g softened butter", + "isFood": true, + "disableAmount": false, + "display": "1 150g softened butter", + "title": null, + "originalText": null, + "referenceId": "f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "100g Icing sugar", + "isFood": true, + "disableAmount": false, + "display": "1 100g Icing sugar", + "title": null, + "originalText": null, + "referenceId": "f7fcd86e-b04b-4e07-b69c-513925811491" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "6 Eggs", + "isFood": true, + "disableAmount": false, + "display": "1 6 Eggs", + "title": null, + "originalText": null, + "referenceId": "a831fbc3-e2f5-452e-a745-450be8b4a130" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "100g Castor sugar", + "isFood": true, + "disableAmount": false, + "display": "1 100g Castor sugar", + "title": null, + "originalText": null, + "referenceId": "b5ee4bdc-0047-4de7-968b-f3360bbcb31e" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "140g Plain wheat flour", + "isFood": true, + "disableAmount": false, + "display": "1 140g Plain wheat flour", + "title": null, + "originalText": null, + "referenceId": "a67db09d-429c-4e77-919d-cfed3da675ad" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "200g apricot jam", + "isFood": true, + "disableAmount": false, + "display": "1 200g apricot jam", + "title": null, + "originalText": null, + "referenceId": "55479752-c062-4b25-aae3-2b210999d7b9" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "200g castor sugar", + "isFood": true, + "disableAmount": false, + "display": "1 200g castor sugar", + "title": null, + "originalText": null, + "referenceId": "ff9cd404-24ec-4d38-b0aa-0120ce1df679" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "150g dark couverture chocolate (min. 55% cocoa content)", + "isFood": true, + "disableAmount": false, + "display": "1 150g dark couverture chocolate (min. 55% cocoa content)", + "title": null, + "originalText": null, + "referenceId": "c7fca92e-971e-4728-a227-8b04783583ed" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "Unsweetend whipped cream to garnish", + "isFood": true, + "disableAmount": false, + "display": "1 Unsweetend whipped cream to garnish", + "title": null, + "originalText": null, + "referenceId": "ef023f23-7816-4871-87f6-4d29f9a283f7" + } + ], + "recipeInstructions": [ + { + "id": "2d558dbf-5361-4ef2-9d86-4161f5eb6146", + "title": "", + "text": "Preheat oven to 170°C. Line the base of a springform with baking paper, grease the sides, and dust with a little flour. Melt couverture over boiling water. Let cool slightly.", + "ingredientReferences": [] + }, + { + "id": "dbcc1c37-3cbf-4045-9902-8f7fd1e68f0a", + "title": "", + "text": "Slit vanilla pod lengthwise and scrape out seeds. Using a hand mixer with whisks, beat the softened butter with the icing sugar and vanilla seeds until bubbles appear.", + "ingredientReferences": [] + }, + { + "id": "2265bd14-a691-40b1-9fe6-7b5dfeac8401", + "title": "", + "text": "Separate the eggs. Whisk the egg yolks into the butter mixture one by one. Now gradually add melted couverture chocolate. Beat the egg whites with the castor sugar until stiff, then place on top of the butter and chocolate mixture. Sift the flour over the mixture, then fold in the flour and beaten egg whites.", + "ingredientReferences": [] + }, + { + "id": "0aade447-dfac-4aae-8e67-ac250ad13ae2", + "title": "", + "text": "Transfer the mixture to the springform, smooth the top, and bake in the oven (middle rack) for 10–15 minutes, leaving the oven door a finger's width ajar. Then close the oven and bake for approximately 50 minutes. (The cake is done when it yields slightly to the touch.)", + "ingredientReferences": [] + }, + { + "id": "5fdcb703-7103-468d-a65d-a92460b92eb3", + "title": "", + "text": "Remove the cake from the oven and loosen the sides of the springform. Carefully tip the cake onto a cake rack lined with baking paper and let cool for approximately 20 minutes. Then pull off the baking paper, turn the cake over, and leave on rack to cool completely.", + "ingredientReferences": [] + }, + { + "id": "81474afc-b44e-49b3-bb67-5d7dab8f832a", + "title": "", + "text": "Cut the cake in half horizontally. Warm the jam and stir until smooth. Brush the top of both cake halves with the jam and place one on top of the other. Brush the sides with the jam as well.", + "ingredientReferences": [] + }, + { + "id": "8fac8aee-0d3c-4f78-9ff8-56d20472e5f1", + "title": "", + "text": "To make the glaze, put the castor sugar into a saucepan with 125 ml water and boil over high heat for approximately 5 minutes. Take the sugar syrup off the stove and leave to cool a little. Coarsely chop the couverture, gradually adding it to the syrup, and stir until it forms a thick liquid (see tip below).", + "ingredientReferences": [] + }, + { + "id": "7162e099-d651-4656-902a-a09a9b40c4e1", + "title": "", + "text": "Pour all the lukewarm glaze liquid at once over the top of the cake and quickly spread using a palette knife. Leave the glaze to set for a few hours. Serve garnished with whipped cream.", + "ingredientReferences": [] + } + ], + "nutrition": { + "calories": "400", + "fatContent": "17", + "proteinContent": null, + "carbohydrateContent": null, + "fiberContent": null, + "sodiumContent": null, + "sugarContent": null + }, + "settings": { + "public": true, + "showNutrition": true, + "showAssets": true, + "landscapeView": false, + "disableComments": false, + "disableAmount": false, + "locked": false + }, + "assets": [], + "notes": [], + "extras": {}, + "comments": [] +} diff --git a/tests/components/mealie/fixtures/get_shopping_items.json b/tests/components/mealie/fixtures/get_shopping_items.json new file mode 100644 index 00000000000..1016440816b --- /dev/null +++ b/tests/components/mealie/fixtures/get_shopping_items.json @@ -0,0 +1,108 @@ +{ + "page": 1, + "per_page": 1000, + "total": 3, + "total_pages": 1, + "items": [ + { + "quantity": 2.0, + "unit": null, + "food": null, + "note": "Apples", + "isFood": false, + "disableAmount": true, + "display": "2 Apples", + "shoppingListId": "9ce096fe-ded2-4077-877d-78ba450ab13e", + "checked": false, + "position": 0, + "foodId": null, + "labelId": null, + "unitId": null, + "extras": {}, + "id": "f45430f7-3edf-45a9-a50f-73bb375090be", + "label": null, + "recipeReferences": [], + "createdAt": "2024-06-25T10:45:03.362623", + "updateAt": "2024-06-25T11:57:22.412650" + }, + { + "quantity": 1.0, + "unit": { + "id": "7bf539d4-fc78-48bc-b48e-c35ccccec34a", + "name": "can", + "pluralName": null, + "description": "", + "extras": {}, + "fraction": true, + "abbreviation": "", + "pluralAbbreviation": "", + "useAbbreviation": false, + "aliases": [], + "createdAt": "2024-05-14T14:45:02.464122", + "updateAt": "2024-05-14T14:45:02.464124" + }, + "food": { + "id": "09322430-d24c-4b1a-abb6-22b6ed3a88f5", + "name": "acorn squash", + "pluralName": null, + "description": "", + "extras": {}, + "labelId": null, + "aliases": [], + "label": null, + "createdAt": "2024-05-14T14:45:04.454134", + "updateAt": "2024-05-14T14:45:04.454141" + }, + "note": "", + "isFood": true, + "disableAmount": false, + "display": "1 can acorn squash", + "shoppingListId": "9ce096fe-ded2-4077-877d-78ba450ab13e", + "checked": false, + "position": 1, + "foodId": "09322430-d24c-4b1a-abb6-22b6ed3a88f5", + "labelId": null, + "unitId": "7bf539d4-fc78-48bc-b48e-c35ccccec34a", + "extras": {}, + "id": "84d8fd74-8eb0-402e-84b6-71f251bfb7cc", + "label": null, + "recipeReferences": [], + "createdAt": "2024-06-25T10:45:14.547922", + "updateAt": "2024-06-25T10:45:14.547925" + }, + { + "quantity": 0.0, + "unit": null, + "food": { + "id": "96801494-4e26-4148-849a-8155deb76327", + "name": "aubergine", + "pluralName": null, + "description": "", + "extras": {}, + "labelId": null, + "aliases": [], + "label": null, + "createdAt": "2024-05-14T14:45:03.868792", + "updateAt": "2024-05-14T14:45:03.868794" + }, + "note": "", + "isFood": true, + "disableAmount": false, + "display": "aubergine", + "shoppingListId": "9ce096fe-ded2-4077-877d-78ba450ab13e", + "checked": false, + "position": 2, + "foodId": "96801494-4e26-4148-849a-8155deb76327", + "labelId": null, + "unitId": null, + "extras": {}, + "id": "69913b9a-7c75-4935-abec-297cf7483f88", + "label": null, + "recipeReferences": [], + "createdAt": "2024-06-25T11:56:59.656699", + "updateAt": "2024-06-25T11:56:59.656701" + } + ], + "next": null, + "previous": null +} diff --git a/tests/components/mealie/fixtures/get_shopping_lists.json b/tests/components/mealie/fixtures/get_shopping_lists.json new file mode 100644 index 00000000000..7b7ba0aaa7a --- /dev/null +++ b/tests/components/mealie/fixtures/get_shopping_lists.json @@ -0,0 +1,838 @@ +{ + "page": 1, + "per_page": 50, + "total": 3, + "total_pages": 1, + "items": [ + { + "name": "Supermarket", + "extras": {}, + "createdAt": "2024-06-17T11:01:54.267314", + "updateAt": "2024-06-22T10:22:13.555389", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "userId": "90b03954-00e1-46de-9520-f0305022b84f", + "id": "27edbaab-2ec6-441f-8490-0283ea77585f", + "recipeReferences": [], + "labelSettings": [ + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "0f63545a-606a-47ea-a784-452d45de6158", + "position": 0, + "id": "ad5f48b0-5b26-4c2d-a2aa-79b0beae1e42", + "label": { + "name": "Alcohol", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0f63545a-606a-47ea-a784-452d45de6158" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "0c2d6111-9837-4319-acb5-490a32979993", + "position": 1, + "id": "c9b8289a-6693-4bec-9841-d7d08c3b240b", + "label": { + "name": "Baked Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0c2d6111-9837-4319-acb5-490a32979993" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "3922802c-8e8c-47d4-9c68-e60b0a1338b6", + "position": 2, + "id": "9be06f8a-6c23-476b-a8cc-334884bcdd40", + "label": { + "name": "Beverages", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3922802c-8e8c-47d4-9c68-e60b0a1338b6" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "4111bfff-d834-4e8c-88ed-5eff761e06db", + "position": 3, + "id": "47bc36ae-1ee4-40be-ad68-ad8662c26cae", + "label": { + "name": "Canned Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "4111bfff-d834-4e8c-88ed-5eff761e06db" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "24fa2836-25e8-44af-b497-ad0d428a7f78", + "position": 4, + "id": "ad41f42c-08c3-49ef-8b96-dc1740ec95b6", + "label": { + "name": "Condiments", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "24fa2836-25e8-44af-b497-ad0d428a7f78" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "20a735de-c75b-4fdb-abaf-b8d71ef192f8", + "position": 5, + "id": "5514842f-8c05-4003-a42d-7a5a70d80148", + "label": { + "name": "Confectionary", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "20a735de-c75b-4fdb-abaf-b8d71ef192f8" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "71178428-70aa-4491-b5b4-b8d93e7b04cf", + "position": 6, + "id": "0465a139-6571-4599-836b-a562afc95536", + "label": { + "name": "Dairy Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "71178428-70aa-4491-b5b4-b8d93e7b04cf" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "c58ed864-b5bf-4aac-88a1-007833c706c7", + "position": 7, + "id": "8d85fe1b-ec4d-49d0-aecc-15f9dbc66fd0", + "label": { + "name": "Frozen Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c58ed864-b5bf-4aac-88a1-007833c706c7" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "f398f1a4-ce53-42df-95d4-8a3403bb6a38", + "position": 8, + "id": "b6980720-bd88-4703-a115-50c0b915f607", + "label": { + "name": "Fruits", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "f398f1a4-ce53-42df-95d4-8a3403bb6a38" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "fd936065-3d53-4844-99df-9332f1bf0c8a", + "position": 9, + "id": "5d69d13c-5d7f-45af-9ecc-045ca914f7ca", + "label": { + "name": "Grains", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "fd936065-3d53-4844-99df-9332f1bf0c8a" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf", + "position": 10, + "id": "a5e65ce7-3588-412b-a118-2fe1a2ca0104", + "label": { + "name": "Health Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b", + "position": 11, + "id": "9890d86a-98e9-4599-8daf-82d341ef1e8d", + "label": { + "name": "Household", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "cf136576-1929-4fc9-a3da-34c49ff58920", + "position": 12, + "id": "18fc0f39-3e45-412f-afa7-7eb779f7bfdf", + "label": { + "name": "Meat", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf136576-1929-4fc9-a3da-34c49ff58920" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa", + "position": 13, + "id": "4cd55de7-7c2e-4078-8c61-87d40b33ebda", + "label": { + "name": "Meat Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "2a035661-fd5d-462c-8eb0-6b78af982e0c", + "position": 14, + "id": "21c55b4a-c1b1-44c0-962e-040bbfa5e148", + "label": { + "name": "Other", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "2a035661-fd5d-462c-8eb0-6b78af982e0c" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "af147838-d114-4a92-bd0f-08f05f59bbe5", + "position": 15, + "id": "b295a6be-1437-4415-92bb-4eee21d3195d", + "label": { + "name": "Produce", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "af147838-d114-4a92-bd0f-08f05f59bbe5" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "cf7672b8-036a-45a4-8323-6a167d2731be", + "position": 16, + "id": "d3ae533f-c1a8-4f08-8a0f-a88914b2c84b", + "label": { + "name": "Regular", + "color": "#2E7D32FF", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf7672b8-036a-45a4-8323-6a167d2731be" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18", + "position": 17, + "id": "572dbf60-4308-499e-ad7c-d806462ee501", + "label": { + "name": "Seafood", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "1c59a263-227a-4f43-a450-d53ca1485b36", + "position": 18, + "id": "5321b4d8-3aba-4a64-95b2-03ac533dda32", + "label": { + "name": "Snacks", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "1c59a263-227a-4f43-a450-d53ca1485b36" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "189099a9-0033-4783-804a-ec6805e7d557", + "position": 19, + "id": "98aebebf-27fe-4834-b3d3-0e45201a182f", + "label": { + "name": "Spices", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "189099a9-0033-4783-804a-ec6805e7d557" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "c28efdde-5993-4044-b824-f111f3a118ef", + "position": 20, + "id": "3e3aa706-3008-4280-b332-a7d2c31cf683", + "label": { + "name": "Sweets", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c28efdde-5993-4044-b824-f111f3a118ef" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c", + "position": 21, + "id": "48f109ca-c57a-4828-98ab-a2db1e6514c6", + "label": { + "name": "Vegetables", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c" + } + } + ] + }, + { + "name": "Special groceries", + "extras": {}, + "createdAt": "2024-06-07T07:17:05.479808", + "updateAt": "2024-06-12T08:44:58.831239", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "userId": "90b03954-00e1-46de-9520-f0305022b84f", + "id": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "recipeReferences": [], + "labelSettings": [ + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "0f63545a-606a-47ea-a784-452d45de6158", + "position": 0, + "id": "1a5dc45b-e6ae-4db2-bd2f-fa3c07efedeb", + "label": { + "name": "Alcohol", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0f63545a-606a-47ea-a784-452d45de6158" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "0c2d6111-9837-4319-acb5-490a32979993", + "position": 1, + "id": "d1594c9d-f1b6-4160-a4eb-0686499a40ea", + "label": { + "name": "Baked Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0c2d6111-9837-4319-acb5-490a32979993" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "3922802c-8e8c-47d4-9c68-e60b0a1338b6", + "position": 2, + "id": "077106d0-5c85-493c-ae6b-dea06002c824", + "label": { + "name": "Beverages", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3922802c-8e8c-47d4-9c68-e60b0a1338b6" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "4111bfff-d834-4e8c-88ed-5eff761e06db", + "position": 3, + "id": "bf66b7e8-3758-4f9e-9e13-c7b9ff564889", + "label": { + "name": "Canned Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "4111bfff-d834-4e8c-88ed-5eff761e06db" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "24fa2836-25e8-44af-b497-ad0d428a7f78", + "position": 4, + "id": "bb34f741-10b4-490a-a512-67bbd374427c", + "label": { + "name": "Condiments", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "24fa2836-25e8-44af-b497-ad0d428a7f78" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "20a735de-c75b-4fdb-abaf-b8d71ef192f8", + "position": 5, + "id": "d88b23a5-e397-4cf2-b527-d8982ecf89e0", + "label": { + "name": "Confectionary", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "20a735de-c75b-4fdb-abaf-b8d71ef192f8" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "71178428-70aa-4491-b5b4-b8d93e7b04cf", + "position": 6, + "id": "82d44804-5bef-4cc3-9d1f-0d8e879783c0", + "label": { + "name": "Dairy Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "71178428-70aa-4491-b5b4-b8d93e7b04cf" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "c58ed864-b5bf-4aac-88a1-007833c706c7", + "position": 7, + "id": "0ae70dde-7403-408f-a6c6-c19b8c0f6a4d", + "label": { + "name": "Frozen Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c58ed864-b5bf-4aac-88a1-007833c706c7" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "f398f1a4-ce53-42df-95d4-8a3403bb6a38", + "position": 8, + "id": "7667a581-8d63-4785-a013-8e164994dfc4", + "label": { + "name": "Fruits", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "f398f1a4-ce53-42df-95d4-8a3403bb6a38" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "fd936065-3d53-4844-99df-9332f1bf0c8a", + "position": 9, + "id": "749c8cbd-c4e5-4879-bce1-40c3b62ada71", + "label": { + "name": "Grains", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "fd936065-3d53-4844-99df-9332f1bf0c8a" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf", + "position": 10, + "id": "e7979797-7679-47be-b14f-5fdcfe1c987d", + "label": { + "name": "Health Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b", + "position": 11, + "id": "1a9b6d19-d8b5-41a0-8e75-548c36fc0b1b", + "label": { + "name": "Household", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "cf136576-1929-4fc9-a3da-34c49ff58920", + "position": 12, + "id": "0df24ff7-1767-46a1-9841-97f816079580", + "label": { + "name": "Meat", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf136576-1929-4fc9-a3da-34c49ff58920" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa", + "position": 13, + "id": "761b5985-9f49-450b-a33c-5b85366501da", + "label": { + "name": "Meat Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "2a035661-fd5d-462c-8eb0-6b78af982e0c", + "position": 14, + "id": "cd993b6c-2c06-40b3-8fe2-8f9613d29b8e", + "label": { + "name": "Other", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "2a035661-fd5d-462c-8eb0-6b78af982e0c" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "af147838-d114-4a92-bd0f-08f05f59bbe5", + "position": 15, + "id": "9c9f8e0d-a9e8-4503-ad98-ee7039ec6eec", + "label": { + "name": "Produce", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "af147838-d114-4a92-bd0f-08f05f59bbe5" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "cf7672b8-036a-45a4-8323-6a167d2731be", + "position": 16, + "id": "f2a1fa92-1ee3-47b5-9d5f-1ac21e0d6bf3", + "label": { + "name": "Regular", + "color": "#2E7D32FF", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf7672b8-036a-45a4-8323-6a167d2731be" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18", + "position": 17, + "id": "bf2eb5db-bf88-44bc-a83f-7c69c38fc03f", + "label": { + "name": "Seafood", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "1c59a263-227a-4f43-a450-d53ca1485b36", + "position": 18, + "id": "14f5ca34-fcec-4847-8ee7-71b29488dc5b", + "label": { + "name": "Snacks", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "1c59a263-227a-4f43-a450-d53ca1485b36" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "189099a9-0033-4783-804a-ec6805e7d557", + "position": 19, + "id": "197f3d41-27a6-4782-a78d-60ea582108c8", + "label": { + "name": "Spices", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "189099a9-0033-4783-804a-ec6805e7d557" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "c28efdde-5993-4044-b824-f111f3a118ef", + "position": 20, + "id": "b5021331-2004-4570-a2bb-c6f364787bcc", + "label": { + "name": "Sweets", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c28efdde-5993-4044-b824-f111f3a118ef" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c", + "position": 21, + "id": "98e9ecff-d650-4717-96fe-d7744258bf43", + "label": { + "name": "Vegetables", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c" + } + } + ] + }, + { + "name": "Freezer", + "extras": {}, + "createdAt": "2024-06-05T09:49:00.404632", + "updateAt": "2024-06-23T08:21:51.764793", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "userId": "90b03954-00e1-46de-9520-f0305022b84f", + "id": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "recipeReferences": [], + "labelSettings": [ + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "0f63545a-606a-47ea-a784-452d45de6158", + "position": 0, + "id": "666b5b98-dcf6-4121-a5a6-2782f06f5f7e", + "label": { + "name": "Alcohol", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0f63545a-606a-47ea-a784-452d45de6158" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "0c2d6111-9837-4319-acb5-490a32979993", + "position": 1, + "id": "6d25fc7e-33d2-459c-ba14-7e0aaf30a522", + "label": { + "name": "Baked Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0c2d6111-9837-4319-acb5-490a32979993" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "3922802c-8e8c-47d4-9c68-e60b0a1338b6", + "position": 2, + "id": "56402a4e-c94e-4480-9f68-87370dbda209", + "label": { + "name": "Beverages", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3922802c-8e8c-47d4-9c68-e60b0a1338b6" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "4111bfff-d834-4e8c-88ed-5eff761e06db", + "position": 3, + "id": "743e9e2b-a13a-4d80-b203-431d1c23f691", + "label": { + "name": "Canned Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "4111bfff-d834-4e8c-88ed-5eff761e06db" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "24fa2836-25e8-44af-b497-ad0d428a7f78", + "position": 4, + "id": "93b46c6e-0542-4adf-ad9d-8942b47dd9e3", + "label": { + "name": "Condiments", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "24fa2836-25e8-44af-b497-ad0d428a7f78" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "20a735de-c75b-4fdb-abaf-b8d71ef192f8", + "position": 5, + "id": "8c6f20ff-a5e3-4c64-a1ff-aa07bbdd455a", + "label": { + "name": "Confectionary", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "20a735de-c75b-4fdb-abaf-b8d71ef192f8" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "71178428-70aa-4491-b5b4-b8d93e7b04cf", + "position": 6, + "id": "02995d80-108f-4949-bd58-d04d670b388d", + "label": { + "name": "Dairy Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "71178428-70aa-4491-b5b4-b8d93e7b04cf" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "c58ed864-b5bf-4aac-88a1-007833c706c7", + "position": 7, + "id": "b20c178c-e719-4159-b199-91a6dd25dcd3", + "label": { + "name": "Frozen Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c58ed864-b5bf-4aac-88a1-007833c706c7" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "f398f1a4-ce53-42df-95d4-8a3403bb6a38", + "position": 8, + "id": "5ff12e47-9b84-46d2-aabf-da4165a68f65", + "label": { + "name": "Fruits", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "f398f1a4-ce53-42df-95d4-8a3403bb6a38" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "fd936065-3d53-4844-99df-9332f1bf0c8a", + "position": 9, + "id": "e0ec7da9-c0b8-4d78-a5b8-591c99d87370", + "label": { + "name": "Grains", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "fd936065-3d53-4844-99df-9332f1bf0c8a" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf", + "position": 10, + "id": "3dc2d2e7-274e-40ec-8ba1-09ce1820b29b", + "label": { + "name": "Health Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b", + "position": 11, + "id": "e30fa937-4bb1-4ff9-b163-2da67e2749ca", + "label": { + "name": "Household", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "cf136576-1929-4fc9-a3da-34c49ff58920", + "position": 12, + "id": "ecd715af-fafe-4d32-a376-538e476bf215", + "label": { + "name": "Meat", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf136576-1929-4fc9-a3da-34c49ff58920" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa", + "position": 13, + "id": "5ded867c-473f-456d-b0a0-83cae279df71", + "label": { + "name": "Meat Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "2a035661-fd5d-462c-8eb0-6b78af982e0c", + "position": 14, + "id": "eb88d477-cd50-4b84-a1bb-5adc077d38e5", + "label": { + "name": "Other", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "2a035661-fd5d-462c-8eb0-6b78af982e0c" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "af147838-d114-4a92-bd0f-08f05f59bbe5", + "position": 15, + "id": "ab7e96e3-f8d5-4e4e-91ee-b966bd980cf0", + "label": { + "name": "Produce", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "af147838-d114-4a92-bd0f-08f05f59bbe5" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "cf7672b8-036a-45a4-8323-6a167d2731be", + "position": 16, + "id": "3fcf5e5a-f8e2-4174-be79-2496a1cb505a", + "label": { + "name": "Regular", + "color": "#2E7D32FF", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf7672b8-036a-45a4-8323-6a167d2731be" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18", + "position": 17, + "id": "e768c9e7-c568-44d1-a263-081d93fd1298", + "label": { + "name": "Seafood", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "1c59a263-227a-4f43-a450-d53ca1485b36", + "position": 18, + "id": "f8a78147-c6d1-4a86-b159-5f178ae72089", + "label": { + "name": "Snacks", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "1c59a263-227a-4f43-a450-d53ca1485b36" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "189099a9-0033-4783-804a-ec6805e7d557", + "position": 19, + "id": "23253f2f-bc71-4ecf-837c-d1697738b505", + "label": { + "name": "Spices", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "189099a9-0033-4783-804a-ec6805e7d557" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "c28efdde-5993-4044-b824-f111f3a118ef", + "position": 20, + "id": "706d656b-3755-46f7-8c12-c9196730baf2", + "label": { + "name": "Sweets", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c28efdde-5993-4044-b824-f111f3a118ef" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c", + "position": 21, + "id": "d9d60d8d-f2de-4636-864f-d7262e24ead3", + "label": { + "name": "Vegetables", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c" + } + } + ] + } + ], + "next": null, + "previous": null +} diff --git a/tests/components/mealie/fixtures/mealplan.json b/tests/components/mealie/fixtures/mealplan.json new file mode 100644 index 00000000000..b540280d83f --- /dev/null +++ b/tests/components/mealie/fixtures/mealplan.json @@ -0,0 +1,34 @@ +{ + "date": "2024-01-22", + "entryType": "dinner", + "title": "", + "text": "", + "recipeId": "c5f00a93-71a2-4e48-900f-d9ad0bb9de93", + "id": 230, + "groupId": "0bf60b2e-ca89-42a9-94d4-8f67ca72b157", + "userId": "1ce8b5fe-04e8-4b80-aab1-d92c94685c6d", + "recipe": { + "id": "c5f00a93-71a2-4e48-900f-d9ad0bb9de93", + "userId": "1ce8b5fe-04e8-4b80-aab1-d92c94685c6d", + "groupId": "0bf60b2e-ca89-42a9-94d4-8f67ca72b157", + "name": "Zoete aardappel curry traybake", + "slug": "zoete-aardappel-curry-traybake", + "image": "AiIo", + "recipeYield": "2 servings", + "totalTime": "40 Minutes", + "prepTime": null, + "cookTime": null, + "performTime": null, + "description": "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + "recipeCategory": [], + "tags": [], + "tools": [], + "rating": null, + "orgURL": "https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/", + "dateAdded": "2024-01-22", + "dateUpdated": "2024-01-22T00:27:46.324512", + "createdAt": "2024-01-22T00:27:46.327546", + "updateAt": "2024-01-22T00:27:46.327548", + "lastMade": null + } +} diff --git a/tests/components/mealie/fixtures/statistics.json b/tests/components/mealie/fixtures/statistics.json new file mode 100644 index 00000000000..350bf1fd9ff --- /dev/null +++ b/tests/components/mealie/fixtures/statistics.json @@ -0,0 +1,7 @@ +{ + "totalRecipes": 765, + "totalUsers": 3, + "totalCategories": 24, + "totalTags": 454, + "totalTools": 11 +} diff --git a/tests/components/mealie/fixtures/users_self.json b/tests/components/mealie/fixtures/users_self.json new file mode 100644 index 00000000000..6d5901c8cc0 --- /dev/null +++ b/tests/components/mealie/fixtures/users_self.json @@ -0,0 +1,24 @@ +{ + "id": "bf1c62fe-4941-4332-9886-e54e88dbdba0", + "username": "admin", + "fullName": "Change Me", + "email": "changeme@example.com", + "authMethod": "Mealie", + "admin": true, + "group": "home", + "advanced": true, + "canInvite": true, + "canManage": true, + "canOrganize": true, + "groupId": "24477569-f6af-4b53-9e3f-6d04b0ca6916", + "groupSlug": "home", + "tokens": [ + { + "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJsb25nX3Rva2VuIjp0cnVlLCJpZCI6ImJmMWM2MmZlLTQ5NDEtNDMzMi05ODg2LWU1NGU4OGRiZGJhMCIsIm5hbWUiOiJ0ZXN0aW5nIiwiaW50ZWdyYXRpb25faWQiOiJnZW5lcmljIiwiZXhwIjoxODczOTA5ODk4fQ.xwXZp4fL2g1RbIqGtBeOaS6RDfsYbQDHj8XtRM3wlX0", + "name": "testing", + "id": 2, + "createdAt": "2024-05-20T10:31:38.179669" + } + ], + "cacheKey": "1234" +} diff --git a/tests/components/mealie/snapshots/test_calendar.ambr b/tests/components/mealie/snapshots/test_calendar.ambr index 6af53c112de..e5a0a697157 100644 --- a/tests/components/mealie/snapshots/test_calendar.ambr +++ b/tests/components/mealie/snapshots/test_calendar.ambr @@ -147,6 +147,20 @@ 'summary': 'Mousse de saumon', 'uid': None, }), + dict({ + 'description': 'Dineren met de boys', + 'end': dict({ + 'date': '2024-01-22', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'date': '2024-01-21', + }), + 'summary': 'Aquavite', + 'uid': None, + }), ]) # --- # name: test_entities[calendar.mealie_breakfast-entry] @@ -178,7 +192,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'breakfast', - 'unique_id': '01J0BC4QM2YBRP6H5G933CETT7_breakfast', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_breakfast', 'unit_of_measurement': None, }) # --- @@ -230,7 +244,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dinner', - 'unique_id': '01J0BC4QM2YBRP6H5G933CETT7_dinner', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_dinner', 'unit_of_measurement': None, }) # --- @@ -238,12 +252,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': True, - 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", - 'end_time': '2024-01-23 00:00:00', + 'description': 'Dineren met de boys', + 'end_time': '2024-01-22 00:00:00', 'friendly_name': 'Mealie Dinner', 'location': '', - 'message': 'Zoete aardappel curry traybake', - 'start_time': '2024-01-22 00:00:00', + 'message': 'Aquavite', + 'start_time': '2024-01-21 00:00:00', }), 'context': , 'entity_id': 'calendar.mealie_dinner', @@ -282,7 +296,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'lunch', - 'unique_id': '01J0BC4QM2YBRP6H5G933CETT7_lunch', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_lunch', 'unit_of_measurement': None, }) # --- @@ -290,12 +304,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': True, - 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', - 'end_time': '2024-01-24 00:00:00', + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'end_time': '2024-01-23 00:00:00', 'friendly_name': 'Mealie Lunch', 'location': '', - 'message': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', - 'start_time': '2024-01-23 00:00:00', + 'message': 'All-American Beef Stew Recipe', + 'start_time': '2024-01-22 00:00:00', }), 'context': , 'entity_id': 'calendar.mealie_lunch', @@ -334,7 +348,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'side', - 'unique_id': '01J0BC4QM2YBRP6H5G933CETT7_side', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_side', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/mealie/snapshots/test_diagnostics.ambr b/tests/components/mealie/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e6c72c950cc --- /dev/null +++ b/tests/components/mealie/snapshots/test_diagnostics.ambr @@ -0,0 +1,505 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'about': dict({ + 'version': 'v1.10.2', + }), + 'mealplans': dict({ + 'breakfast': list([ + dict({ + 'description': None, + 'entry_type': 'breakfast', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 229, + 'recipe': dict({ + 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'JeQ2', + 'name': 'Roast Chicken', + 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', + 'recipe_id': '5b055066-d57d-4fd0-8dfd-a2c2f07b36f1', + 'recipe_yield': '6 servings', + 'slug': 'roast-chicken', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + ]), + 'dinner': list([ + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 222, + 'recipe': dict({ + 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'En9o', + 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', + 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', + 'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34', + 'recipe_yield': '6 servings', + 'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 221, + 'recipe': dict({ + 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'Kn62', + 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', + 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', + 'recipe_id': '47595e4c-52bc-441d-b273-3edf4258806d', + 'recipe_yield': '4 servings', + 'slug': 'greek-turkey-meatballs-with-lemon-orzo-creamy-feta-yogurt-sauce', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 219, + 'recipe': dict({ + 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'ibL6', + 'name': 'Pampered Chef Double Chocolate Mocha Trifle', + 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', + 'recipe_id': '92635fd0-f2dc-4e78-a6e4-ecd556ad361f', + 'recipe_yield': '12 servings', + 'slug': 'pampered-chef-double-chocolate-mocha-trifle', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 217, + 'recipe': dict({ + 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'beGq', + 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', + 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', + 'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22', + 'recipe_yield': '24 servings', + 'slug': 'cheeseburger-sliders-easy-30-min-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 212, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 211, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 196, + 'recipe': dict({ + 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '5G1v', + 'name': 'Miso Udon Noodles with Spinach and Tofu', + 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', + 'recipe_id': '25b814f2-d9bf-4df0-b40d-d2f2457b4317', + 'recipe_yield': '2 servings', + 'slug': 'miso-udon-noodles-with-spinach-and-tofu', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 195, + 'recipe': dict({ + 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'rrNL', + 'name': 'Mousse de saumon', + 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', + 'recipe_id': '55c88810-4cf1-4d86-ae50-63b15fd173fb', + 'recipe_yield': '12 servings', + 'slug': 'mousse-de-saumon', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': 'Dineren met de boys', + 'entry_type': 'dinner', + 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-21', + }), + 'mealplan_id': 1, + 'recipe': None, + 'title': 'Aquavite', + 'user_id': '6caa6e4d-521f-4ef4-9ed7-388bdd63f47d', + }), + ]), + 'lunch': list([ + dict({ + 'description': None, + 'entry_type': 'lunch', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 226, + 'recipe': dict({ + 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'INQz', + 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', + 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', + 'recipe_id': 'e360a0cc-18b0-4a84-a91b-8aa59e2451c9', + 'recipe_yield': '2 servings', + 'slug': 'receta-de-pollo-al-curry-en-10-minutos-con-video-incluido', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'lunch', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 224, + 'recipe': dict({ + 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nj5M', + 'name': 'Boeuf bourguignon : la vraie recette (2)', + 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', + 'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a', + 'recipe_yield': '4 servings', + 'slug': 'boeuf-bourguignon-la-vraie-recette-2', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'lunch', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 216, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + ]), + 'side': list([ + dict({ + 'description': None, + 'entry_type': 'side', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 220, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + ]), + }), + 'shoppinglist': dict({ + '27edbaab-2ec6-441f-8490-0283ea77585f': dict({ + 'items': list([ + dict({ + 'checked': False, + 'disable_amount': True, + 'display': '2 Apples', + 'food_id': None, + 'is_food': False, + 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': 'Apples', + 'position': 0, + 'quantity': 2.0, + 'unit_id': None, + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': '1 can acorn squash', + 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', + 'is_food': True, + 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 1, + 'quantity': 1.0, + 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': 'aubergine', + 'food_id': '96801494-4e26-4148-849a-8155deb76327', + 'is_food': True, + 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 2, + 'quantity': 0.0, + 'unit_id': None, + }), + ]), + 'shopping_list': dict({ + 'list_id': '27edbaab-2ec6-441f-8490-0283ea77585f', + 'name': 'Supermarket', + }), + }), + 'e9d78ff2-4b23-4b77-a3a8-464827100b46': dict({ + 'items': list([ + dict({ + 'checked': False, + 'disable_amount': True, + 'display': '2 Apples', + 'food_id': None, + 'is_food': False, + 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': 'Apples', + 'position': 0, + 'quantity': 2.0, + 'unit_id': None, + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': '1 can acorn squash', + 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', + 'is_food': True, + 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 1, + 'quantity': 1.0, + 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': 'aubergine', + 'food_id': '96801494-4e26-4148-849a-8155deb76327', + 'is_food': True, + 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 2, + 'quantity': 0.0, + 'unit_id': None, + }), + ]), + 'shopping_list': dict({ + 'list_id': 'e9d78ff2-4b23-4b77-a3a8-464827100b46', + 'name': 'Freezer', + }), + }), + 'f8438635-8211-4be8-80d0-0aa42e37a5f2': dict({ + 'items': list([ + dict({ + 'checked': False, + 'disable_amount': True, + 'display': '2 Apples', + 'food_id': None, + 'is_food': False, + 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': 'Apples', + 'position': 0, + 'quantity': 2.0, + 'unit_id': None, + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': '1 can acorn squash', + 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', + 'is_food': True, + 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 1, + 'quantity': 1.0, + 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': 'aubergine', + 'food_id': '96801494-4e26-4148-849a-8155deb76327', + 'is_food': True, + 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 2, + 'quantity': 0.0, + 'unit_id': None, + }), + ]), + 'shopping_list': dict({ + 'list_id': 'f8438635-8211-4be8-80d0-0aa42e37a5f2', + 'name': 'Special groceries', + }), + }), + }), + }) +# --- diff --git a/tests/components/mealie/snapshots/test_init.ambr b/tests/components/mealie/snapshots/test_init.ambr index c2752d938e4..98ca52dd15e 100644 --- a/tests/components/mealie/snapshots/test_init.ambr +++ b/tests/components/mealie/snapshots/test_init.ambr @@ -13,7 +13,7 @@ 'identifiers': set({ tuple( 'mealie', - '01J0BC4QM2YBRP6H5G933CETT7', + 'bf1c62fe-4941-4332-9886-e54e88dbdba0', ), }), 'is_new': False, @@ -21,11 +21,13 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Mealie', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'v1.10.2', 'via_device_id': None, }) # --- diff --git a/tests/components/mealie/snapshots/test_sensor.ambr b/tests/components/mealie/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..e645cf4c45f --- /dev/null +++ b/tests/components/mealie/snapshots/test_sensor.ambr @@ -0,0 +1,251 @@ +# serializer version: 1 +# name: test_entities[sensor.mealie_categories-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_categories', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Categories', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'categories', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_categories', + 'unit_of_measurement': 'categories', + }) +# --- +# name: test_entities[sensor.mealie_categories-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Categories', + 'state_class': , + 'unit_of_measurement': 'categories', + }), + 'context': , + 'entity_id': 'sensor.mealie_categories', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24', + }) +# --- +# name: test_entities[sensor.mealie_recipes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_recipes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Recipes', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'recipes', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_recipes', + 'unit_of_measurement': 'recipes', + }) +# --- +# name: test_entities[sensor.mealie_recipes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Recipes', + 'state_class': , + 'unit_of_measurement': 'recipes', + }), + 'context': , + 'entity_id': 'sensor.mealie_recipes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '765', + }) +# --- +# name: test_entities[sensor.mealie_tags-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_tags', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tags', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tags', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_tags', + 'unit_of_measurement': 'tags', + }) +# --- +# name: test_entities[sensor.mealie_tags-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Tags', + 'state_class': , + 'unit_of_measurement': 'tags', + }), + 'context': , + 'entity_id': 'sensor.mealie_tags', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '454', + }) +# --- +# name: test_entities[sensor.mealie_tools-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_tools', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tools', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tools', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_tools', + 'unit_of_measurement': 'tools', + }) +# --- +# name: test_entities[sensor.mealie_tools-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Tools', + 'state_class': , + 'unit_of_measurement': 'tools', + }), + 'context': , + 'entity_id': 'sensor.mealie_tools', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11', + }) +# --- +# name: test_entities[sensor.mealie_users-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_users', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Users', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'users', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_users', + 'unit_of_measurement': 'users', + }) +# --- +# name: test_entities[sensor.mealie_users-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Users', + 'state_class': , + 'unit_of_measurement': 'users', + }), + 'context': , + 'entity_id': 'sensor.mealie_users', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- diff --git a/tests/components/mealie/snapshots/test_services.ambr b/tests/components/mealie/snapshots/test_services.ambr new file mode 100644 index 00000000000..3ae158f1d2d --- /dev/null +++ b/tests/components/mealie/snapshots/test_services.ambr @@ -0,0 +1,749 @@ +# serializer version: 1 +# name: test_service_import_recipe + dict({ + 'recipe': dict({ + 'date_added': datetime.date(2024, 6, 29), + 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', + 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'image': 'SuPW', + 'ingredients': list([ + dict({ + 'is_food': True, + 'note': '130g dark couverture chocolate (min. 55% cocoa content)', + 'quantity': 1.0, + 'reference_id': 'a3adfe78-d157-44d8-98be-9c133e45bb4e', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '1 Vanilla Pod', + 'quantity': 1.0, + 'reference_id': '41d234d7-c040-48f9-91e6-f4636aebb77b', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '150g softened butter', + 'quantity': 1.0, + 'reference_id': 'f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '100g Icing sugar', + 'quantity': 1.0, + 'reference_id': 'f7fcd86e-b04b-4e07-b69c-513925811491', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '6 Eggs', + 'quantity': 1.0, + 'reference_id': 'a831fbc3-e2f5-452e-a745-450be8b4a130', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '100g Castor sugar', + 'quantity': 1.0, + 'reference_id': 'b5ee4bdc-0047-4de7-968b-f3360bbcb31e', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '140g Plain wheat flour', + 'quantity': 1.0, + 'reference_id': 'a67db09d-429c-4e77-919d-cfed3da675ad', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '200g apricot jam', + 'quantity': 1.0, + 'reference_id': '55479752-c062-4b25-aae3-2b210999d7b9', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '200g castor sugar', + 'quantity': 1.0, + 'reference_id': 'ff9cd404-24ec-4d38-b0aa-0120ce1df679', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '150g dark couverture chocolate (min. 55% cocoa content)', + 'quantity': 1.0, + 'reference_id': 'c7fca92e-971e-4728-a227-8b04783583ed', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': 'Unsweetend whipped cream to garnish', + 'quantity': 1.0, + 'reference_id': 'ef023f23-7816-4871-87f6-4d29f9a283f7', + 'unit': None, + }), + ]), + 'instructions': list([ + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '2d558dbf-5361-4ef2-9d86-4161f5eb6146', + 'text': 'Preheat oven to 170°C. Line the base of a springform with baking paper, grease the sides, and dust with a little flour. Melt couverture over boiling water. Let cool slightly.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': 'dbcc1c37-3cbf-4045-9902-8f7fd1e68f0a', + 'text': 'Slit vanilla pod lengthwise and scrape out seeds. Using a hand mixer with whisks, beat the softened butter with the icing sugar and vanilla seeds until bubbles appear.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '2265bd14-a691-40b1-9fe6-7b5dfeac8401', + 'text': 'Separate the eggs. Whisk the egg yolks into the butter mixture one by one. Now gradually add melted couverture chocolate. Beat the egg whites with the castor sugar until stiff, then place on top of the butter and chocolate mixture. Sift the flour over the mixture, then fold in the flour and beaten egg whites.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '0aade447-dfac-4aae-8e67-ac250ad13ae2', + 'text': "Transfer the mixture to the springform, smooth the top, and bake in the oven (middle rack) for 10–15 minutes, leaving the oven door a finger's width ajar. Then close the oven and bake for approximately 50 minutes. (The cake is done when it yields slightly to the touch.)", + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '5fdcb703-7103-468d-a65d-a92460b92eb3', + 'text': 'Remove the cake from the oven and loosen the sides of the springform. Carefully tip the cake onto a cake rack lined with baking paper and let cool for approximately 20 minutes. Then pull off the baking paper, turn the cake over, and leave on rack to cool completely.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '81474afc-b44e-49b3-bb67-5d7dab8f832a', + 'text': 'Cut the cake in half horizontally. Warm the jam and stir until smooth. Brush the top of both cake halves with the jam and place one on top of the other. Brush the sides with the jam as well.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '8fac8aee-0d3c-4f78-9ff8-56d20472e5f1', + 'text': 'To make the glaze, put the castor sugar into a saucepan with 125 ml water and boil over high heat for approximately 5 minutes. Take the sugar syrup off the stove and leave to cool a little. Coarsely chop the couverture, gradually adding it to the syrup, and stir until it forms a thick liquid (see tip below).', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '7162e099-d651-4656-902a-a09a9b40c4e1', + 'text': 'Pour all the lukewarm glaze liquid at once over the top of the cake and quickly spread using a palette knife. Leave the glaze to set for a few hours. Serve garnished with whipped cream.', + 'title': None, + }), + ]), + 'name': 'Original Sacher-Torte (2)', + 'original_url': 'https://www.sacher.com/en/original-sacher-torte/recipe/', + 'recipe_id': 'fada9582-709b-46aa-b384-d5952123ad93', + 'recipe_yield': '4 servings', + 'slug': 'original-sacher-torte-2', + 'tags': list([ + dict({ + 'name': 'Sacher', + 'slug': 'sacher', + 'tag_id': '1b5789b9-3af6-412e-8c77-8a01caa0aac9', + }), + dict({ + 'name': 'Cake', + 'slug': 'cake', + 'tag_id': '1cf17f96-58b5-4bd3-b1e8-1606a64b413d', + }), + dict({ + 'name': 'Torte', + 'slug': 'torte', + 'tag_id': '3f5f0a3d-728f-440d-a6c7-5a68612e8c67', + }), + dict({ + 'name': 'Sachertorte', + 'slug': 'sachertorte', + 'tag_id': '525f388d-6ee0-4ebe-91fc-dd320a7583f0', + }), + dict({ + 'name': 'Sacher Torte Cake', + 'slug': 'sacher-torte-cake', + 'tag_id': '544a6e08-a899-4f63-9c72-bb2924df70cb', + }), + dict({ + 'name': 'Sacher Torte', + 'slug': 'sacher-torte', + 'tag_id': '576c0a82-84ee-4e50-a14e-aa7a675b6352', + }), + dict({ + 'name': 'Original Sachertorte', + 'slug': 'original-sachertorte', + 'tag_id': 'd530b8e4-275a-4093-804b-6d0de154c206', + }), + ]), + 'user_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0', + }), + }) +# --- +# name: test_service_mealplan + dict({ + 'mealplan': list([ + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 229, + 'recipe': dict({ + 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'JeQ2', + 'name': 'Roast Chicken', + 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', + 'recipe_id': '5b055066-d57d-4fd0-8dfd-a2c2f07b36f1', + 'recipe_yield': '6 servings', + 'slug': 'roast-chicken', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 226, + 'recipe': dict({ + 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'INQz', + 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', + 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', + 'recipe_id': 'e360a0cc-18b0-4a84-a91b-8aa59e2451c9', + 'recipe_yield': '2 servings', + 'slug': 'receta-de-pollo-al-curry-en-10-minutos-con-video-incluido', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 224, + 'recipe': dict({ + 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nj5M', + 'name': 'Boeuf bourguignon : la vraie recette (2)', + 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', + 'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a', + 'recipe_yield': '4 servings', + 'slug': 'boeuf-bourguignon-la-vraie-recette-2', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 222, + 'recipe': dict({ + 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'En9o', + 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', + 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', + 'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34', + 'recipe_yield': '6 servings', + 'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 221, + 'recipe': dict({ + 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'Kn62', + 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', + 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', + 'recipe_id': '47595e4c-52bc-441d-b273-3edf4258806d', + 'recipe_yield': '4 servings', + 'slug': 'greek-turkey-meatballs-with-lemon-orzo-creamy-feta-yogurt-sauce', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 220, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 219, + 'recipe': dict({ + 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'ibL6', + 'name': 'Pampered Chef Double Chocolate Mocha Trifle', + 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', + 'recipe_id': '92635fd0-f2dc-4e78-a6e4-ecd556ad361f', + 'recipe_yield': '12 servings', + 'slug': 'pampered-chef-double-chocolate-mocha-trifle', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 217, + 'recipe': dict({ + 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'beGq', + 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', + 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', + 'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22', + 'recipe_yield': '24 servings', + 'slug': 'cheeseburger-sliders-easy-30-min-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 216, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 212, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 211, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 196, + 'recipe': dict({ + 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '5G1v', + 'name': 'Miso Udon Noodles with Spinach and Tofu', + 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', + 'recipe_id': '25b814f2-d9bf-4df0-b40d-d2f2457b4317', + 'recipe_yield': '2 servings', + 'slug': 'miso-udon-noodles-with-spinach-and-tofu', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 195, + 'recipe': dict({ + 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'rrNL', + 'name': 'Mousse de saumon', + 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', + 'recipe_id': '55c88810-4cf1-4d86-ae50-63b15fd173fb', + 'recipe_yield': '12 servings', + 'slug': 'mousse-de-saumon', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': 'Dineren met de boys', + 'entry_type': , + 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'mealplan_date': FakeDate(2024, 1, 21), + 'mealplan_id': 1, + 'recipe': None, + 'title': 'Aquavite', + 'user_id': '6caa6e4d-521f-4ef4-9ed7-388bdd63f47d', + }), + ]), + }) +# --- +# name: test_service_recipe + dict({ + 'recipe': dict({ + 'date_added': datetime.date(2024, 6, 29), + 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', + 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'image': 'SuPW', + 'ingredients': list([ + dict({ + 'is_food': True, + 'note': '130g dark couverture chocolate (min. 55% cocoa content)', + 'quantity': 1.0, + 'reference_id': 'a3adfe78-d157-44d8-98be-9c133e45bb4e', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '1 Vanilla Pod', + 'quantity': 1.0, + 'reference_id': '41d234d7-c040-48f9-91e6-f4636aebb77b', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '150g softened butter', + 'quantity': 1.0, + 'reference_id': 'f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '100g Icing sugar', + 'quantity': 1.0, + 'reference_id': 'f7fcd86e-b04b-4e07-b69c-513925811491', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '6 Eggs', + 'quantity': 1.0, + 'reference_id': 'a831fbc3-e2f5-452e-a745-450be8b4a130', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '100g Castor sugar', + 'quantity': 1.0, + 'reference_id': 'b5ee4bdc-0047-4de7-968b-f3360bbcb31e', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '140g Plain wheat flour', + 'quantity': 1.0, + 'reference_id': 'a67db09d-429c-4e77-919d-cfed3da675ad', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '200g apricot jam', + 'quantity': 1.0, + 'reference_id': '55479752-c062-4b25-aae3-2b210999d7b9', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '200g castor sugar', + 'quantity': 1.0, + 'reference_id': 'ff9cd404-24ec-4d38-b0aa-0120ce1df679', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '150g dark couverture chocolate (min. 55% cocoa content)', + 'quantity': 1.0, + 'reference_id': 'c7fca92e-971e-4728-a227-8b04783583ed', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': 'Unsweetend whipped cream to garnish', + 'quantity': 1.0, + 'reference_id': 'ef023f23-7816-4871-87f6-4d29f9a283f7', + 'unit': None, + }), + ]), + 'instructions': list([ + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '2d558dbf-5361-4ef2-9d86-4161f5eb6146', + 'text': 'Preheat oven to 170°C. Line the base of a springform with baking paper, grease the sides, and dust with a little flour. Melt couverture over boiling water. Let cool slightly.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': 'dbcc1c37-3cbf-4045-9902-8f7fd1e68f0a', + 'text': 'Slit vanilla pod lengthwise and scrape out seeds. Using a hand mixer with whisks, beat the softened butter with the icing sugar and vanilla seeds until bubbles appear.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '2265bd14-a691-40b1-9fe6-7b5dfeac8401', + 'text': 'Separate the eggs. Whisk the egg yolks into the butter mixture one by one. Now gradually add melted couverture chocolate. Beat the egg whites with the castor sugar until stiff, then place on top of the butter and chocolate mixture. Sift the flour over the mixture, then fold in the flour and beaten egg whites.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '0aade447-dfac-4aae-8e67-ac250ad13ae2', + 'text': "Transfer the mixture to the springform, smooth the top, and bake in the oven (middle rack) for 10–15 minutes, leaving the oven door a finger's width ajar. Then close the oven and bake for approximately 50 minutes. (The cake is done when it yields slightly to the touch.)", + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '5fdcb703-7103-468d-a65d-a92460b92eb3', + 'text': 'Remove the cake from the oven and loosen the sides of the springform. Carefully tip the cake onto a cake rack lined with baking paper and let cool for approximately 20 minutes. Then pull off the baking paper, turn the cake over, and leave on rack to cool completely.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '81474afc-b44e-49b3-bb67-5d7dab8f832a', + 'text': 'Cut the cake in half horizontally. Warm the jam and stir until smooth. Brush the top of both cake halves with the jam and place one on top of the other. Brush the sides with the jam as well.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '8fac8aee-0d3c-4f78-9ff8-56d20472e5f1', + 'text': 'To make the glaze, put the castor sugar into a saucepan with 125 ml water and boil over high heat for approximately 5 minutes. Take the sugar syrup off the stove and leave to cool a little. Coarsely chop the couverture, gradually adding it to the syrup, and stir until it forms a thick liquid (see tip below).', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '7162e099-d651-4656-902a-a09a9b40c4e1', + 'text': 'Pour all the lukewarm glaze liquid at once over the top of the cake and quickly spread using a palette knife. Leave the glaze to set for a few hours. Serve garnished with whipped cream.', + 'title': None, + }), + ]), + 'name': 'Original Sacher-Torte (2)', + 'original_url': 'https://www.sacher.com/en/original-sacher-torte/recipe/', + 'recipe_id': 'fada9582-709b-46aa-b384-d5952123ad93', + 'recipe_yield': '4 servings', + 'slug': 'original-sacher-torte-2', + 'tags': list([ + dict({ + 'name': 'Sacher', + 'slug': 'sacher', + 'tag_id': '1b5789b9-3af6-412e-8c77-8a01caa0aac9', + }), + dict({ + 'name': 'Cake', + 'slug': 'cake', + 'tag_id': '1cf17f96-58b5-4bd3-b1e8-1606a64b413d', + }), + dict({ + 'name': 'Torte', + 'slug': 'torte', + 'tag_id': '3f5f0a3d-728f-440d-a6c7-5a68612e8c67', + }), + dict({ + 'name': 'Sachertorte', + 'slug': 'sachertorte', + 'tag_id': '525f388d-6ee0-4ebe-91fc-dd320a7583f0', + }), + dict({ + 'name': 'Sacher Torte Cake', + 'slug': 'sacher-torte-cake', + 'tag_id': '544a6e08-a899-4f63-9c72-bb2924df70cb', + }), + dict({ + 'name': 'Sacher Torte', + 'slug': 'sacher-torte', + 'tag_id': '576c0a82-84ee-4e50-a14e-aa7a675b6352', + }), + dict({ + 'name': 'Original Sachertorte', + 'slug': 'original-sachertorte', + 'tag_id': 'd530b8e4-275a-4093-804b-6d0de154c206', + }), + ]), + 'user_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0', + }), + }) +# --- +# name: test_service_set_mealplan[payload0-kwargs0] + dict({ + 'mealplan': dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': datetime.date(2024, 1, 22), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + }) +# --- +# name: test_service_set_mealplan[payload1-kwargs1] + dict({ + 'mealplan': dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': datetime.date(2024, 1, 22), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + }) +# --- +# name: test_service_set_random_mealplan + dict({ + 'mealplan': dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': datetime.date(2024, 1, 22), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + }) +# --- diff --git a/tests/components/mealie/snapshots/test_todo.ambr b/tests/components/mealie/snapshots/test_todo.ambr new file mode 100644 index 00000000000..4c58a839f57 --- /dev/null +++ b/tests/components/mealie/snapshots/test_todo.ambr @@ -0,0 +1,142 @@ +# serializer version: 1 +# name: test_entities[todo.mealie_freezer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.mealie_freezer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Freezer', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_e9d78ff2-4b23-4b77-a3a8-464827100b46', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[todo.mealie_freezer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Freezer', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.mealie_freezer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_entities[todo.mealie_special_groceries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.mealie_special_groceries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Special groceries', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_f8438635-8211-4be8-80d0-0aa42e37a5f2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[todo.mealie_special_groceries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Special groceries', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.mealie_special_groceries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_entities[todo.mealie_supermarket-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.mealie_supermarket', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Supermarket', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_27edbaab-2ec6-441f-8490-0283ea77585f', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[todo.mealie_supermarket-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Supermarket', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.mealie_supermarket', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- diff --git a/tests/components/mealie/test_calendar.py b/tests/components/mealie/test_calendar.py index 9df2c1810fd..d11fe5d2354 100644 --- a/tests/components/mealie/test_calendar.py +++ b/tests/components/mealie/test_calendar.py @@ -2,10 +2,11 @@ from datetime import date from http import HTTPStatus -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from syrupy.assertion import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -40,7 +41,8 @@ async def test_entities( mock_config_entry: MockConfigEntry, ) -> None: """Test the API returns the calendar.""" - await setup_integration(hass, mock_config_entry) + with patch("homeassistant.components.mealie.PLATFORMS", [Platform.CALENDAR]): + await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/mealie/test_config_flow.py b/tests/components/mealie/test_config_flow.py index ac68ed2fac5..f2886578744 100644 --- a/tests/components/mealie/test_config_flow.py +++ b/tests/components/mealie/test_config_flow.py @@ -2,15 +2,17 @@ from unittest.mock import AsyncMock -from aiomealie import MealieAuthenticationError, MealieConnectionError +from aiomealie import About, MealieAuthenticationError, MealieConnectionError import pytest from homeassistant.components.mealie.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_API_TOKEN, CONF_HOST +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_integration + from tests.common import MockConfigEntry @@ -36,7 +38,9 @@ async def test_full_flow( assert result["data"] == { CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token", + CONF_VERIFY_SSL: True, } + assert result["result"].unique_id == "bf1c62fe-4941-4332-9886-e54e88dbdba0" @pytest.mark.parametrize( @@ -55,7 +59,7 @@ async def test_flow_errors( error: str, ) -> None: """Test flow errors.""" - mock_mealie_client.get_mealplan_today.side_effect = exception + mock_mealie_client.get_user_info.side_effect = exception result = await hass.config_entries.flow.async_init( DOMAIN, @@ -72,16 +76,48 @@ async def test_flow_errors( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": error} - mock_mealie_client.get_mealplan_today.side_effect = None + mock_mealie_client.get_user_info.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( + ("version"), + [ + ("v1.0.0beta-5"), + ("v1.0.0-RC2"), + ("v0.1.0"), + ], +) +async def test_flow_version_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + version, +) -> None: + """Test flow version error.""" + mock_mealie_client.get_about.return_value = About(version=version) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "mealie_version"} + + async def test_duplicate( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -105,3 +141,213 @@ async def test_duplicate( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_config_entry.data[CONF_API_TOKEN] == "token2" + + +async def test_reauth_flow_wrong_account( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow with wrong account.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_account" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MealieConnectionError, "cannot_connect"), + (MealieAuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_reauth_flow_exceptions( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test reauth flow errors.""" + await setup_integration(hass, mock_config_entry) + mock_mealie_client.get_user_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {"base": error} + + mock_mealie_client.get_user_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "http://test:9090", + CONF_API_TOKEN: "token2", + CONF_VERIFY_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config_entry.data[CONF_API_TOKEN] == "token2" + assert mock_config_entry.data[CONF_HOST] == "http://test:9090" + assert mock_config_entry.data[CONF_VERIFY_SSL] is False + + +async def test_reconfigure_flow_wrong_account( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow with wrong account.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_account" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MealieConnectionError, "cannot_connect"), + (MealieAuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_reconfigure_flow_exceptions( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test reconfigure flow errors.""" + await setup_integration(hass, mock_config_entry) + mock_mealie_client.get_user_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + assert result["errors"] == {"base": error} + + mock_mealie_client.get_user_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/mealie/test_diagnostics.py b/tests/components/mealie/test_diagnostics.py new file mode 100644 index 00000000000..88680da9784 --- /dev/null +++ b/tests/components/mealie/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Test Mealie diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/mealie/test_init.py b/tests/components/mealie/test_init.py index 7d63ad135f9..a45a67801df 100644 --- a/tests/components/mealie/test_init.py +++ b/tests/components/mealie/test_init.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiomealie import MealieAuthenticationError, MealieConnectionError +from aiomealie import About, MealieAuthenticationError, MealieConnectionError import pytest from syrupy import SnapshotAssertion @@ -26,12 +26,84 @@ async def test_device_info( """Test device registry integration.""" await setup_integration(hass, mock_config_entry) device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.entry_id)} + identifiers={(DOMAIN, mock_config_entry.unique_id)} ) assert device_entry is not None assert device_entry == snapshot +@pytest.mark.parametrize( + "field", + [ + "get_about", + "get_mealplans", + "get_shopping_lists", + "get_statistics", + ], +) +@pytest.mark.parametrize( + ("exc", "state"), + [ + (MealieConnectionError, ConfigEntryState.SETUP_RETRY), + (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), + ], +) +async def test_setup_failure( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + field: str, + exc: Exception, + state: ConfigEntryState, +) -> None: + """Test setup failure.""" + getattr(mock_mealie_client, field).side_effect = exc + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is state + + +@pytest.mark.parametrize( + ("version"), + [ + ("v1.0.0beta-5"), + ("v1.0.0-RC2"), + ("v0.1.0"), + ], +) +async def test_setup_too_old( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + version, +) -> None: + """Test setup of Mealie entry with too old version of Mealie.""" + mock_mealie_client.get_about.return_value = About(version=version) + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_setup_invalid( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test setup of Mealie entry with too old version of Mealie.""" + mock_mealie_client.get_about.return_value = About(version="nightly") + + await setup_integration(hass, mock_config_entry) + + assert ( + "It seems like you are using the nightly version of Mealie, nightly" + " versions could have changes that stop this integration working" in caplog.text + ) + assert mock_config_entry.state is ConfigEntryState.LOADED + + async def test_load_unload_entry( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -55,7 +127,7 @@ async def test_load_unload_entry( (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), ], ) -async def test_initialization_failure( +async def test_mealplan_initialization_failure( hass: HomeAssistant, mock_mealie_client: AsyncMock, mock_config_entry: MockConfigEntry, @@ -68,3 +140,25 @@ async def test_initialization_failure( await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is state + + +@pytest.mark.parametrize( + ("exc", "state"), + [ + (MealieConnectionError, ConfigEntryState.SETUP_RETRY), + (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), + ], +) +async def test_shoppingitems_initialization_failure( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exc: Exception, + state: ConfigEntryState, +) -> None: + """Test initialization failure.""" + mock_mealie_client.get_shopping_items.side_effect = exc + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is state diff --git a/tests/components/mealie/test_sensor.py b/tests/components/mealie/test_sensor.py new file mode 100644 index 00000000000..5a55b89ad21 --- /dev/null +++ b/tests/components/mealie/test_sensor.py @@ -0,0 +1,27 @@ +"""Tests for the Mealie sensors.""" + +from unittest.mock import AsyncMock, patch + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the sensor entities.""" + with patch("homeassistant.components.mealie.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/mealie/test_services.py b/tests/components/mealie/test_services.py new file mode 100644 index 00000000000..1c8c6f19de7 --- /dev/null +++ b/tests/components/mealie/test_services.py @@ -0,0 +1,446 @@ +"""Tests for the Mealie services.""" + +from datetime import date +from unittest.mock import AsyncMock + +from aiomealie import ( + MealieConnectionError, + MealieNotFoundError, + MealieValidationError, + MealplanEntryType, +) +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.mealie.const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_END_DATE, + ATTR_ENTRY_TYPE, + ATTR_INCLUDE_TAGS, + ATTR_NOTE_TEXT, + ATTR_NOTE_TITLE, + ATTR_RECIPE_ID, + ATTR_START_DATE, + ATTR_URL, + DOMAIN, +) +from homeassistant.components.mealie.services import ( + SERVICE_GET_MEALPLAN, + SERVICE_GET_RECIPE, + SERVICE_IMPORT_RECIPE, + SERVICE_SET_MEALPLAN, + SERVICE_SET_RANDOM_MEALPLAN, +) +from homeassistant.const import ATTR_DATE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_service_mealplan( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the get_mealplan service.""" + + await setup_integration(hass, mock_config_entry) + + freezer.move_to("2023-10-21") + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id}, + blocking=True, + return_response=True, + ) + assert mock_mealie_client.get_mealplans.call_args_list[1][0] == ( + date(2023, 10, 21), + date(2023, 10, 21), + ) + assert response == snapshot + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_START_DATE: "2023-10-22", + ATTR_END_DATE: "2023-10-25", + }, + blocking=True, + return_response=True, + ) + assert response + assert mock_mealie_client.get_mealplans.call_args_list[2][0] == ( + date(2023, 10, 22), + date(2023, 10, 25), + ) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_START_DATE: "2023-10-19", + }, + blocking=True, + return_response=True, + ) + assert response + assert mock_mealie_client.get_mealplans.call_args_list[3][0] == ( + date(2023, 10, 19), + date(2023, 10, 21), + ) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_END_DATE: "2023-10-22", + }, + blocking=True, + return_response=True, + ) + assert response + assert mock_mealie_client.get_mealplans.call_args_list[4][0] == ( + date(2023, 10, 21), + date(2023, 10, 22), + ) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_START_DATE: "2023-10-22", + ATTR_END_DATE: "2023-10-19", + }, + blocking=True, + return_response=True, + ) + + +async def test_service_recipe( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the get_recipe service.""" + + await setup_integration(hass, mock_config_entry) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_RECIPE, + {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, ATTR_RECIPE_ID: "recipe_id"}, + blocking=True, + return_response=True, + ) + assert response == snapshot + + +async def test_service_import_recipe( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the import_recipe service.""" + + await setup_integration(hass, mock_config_entry) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_IMPORT_RECIPE, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_URL: "http://example.com", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + mock_mealie_client.import_recipe.assert_called_with( + "http://example.com", include_tags=False + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_IMPORT_RECIPE, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_URL: "http://example.com", + ATTR_INCLUDE_TAGS: True, + }, + blocking=True, + return_response=False, + ) + mock_mealie_client.import_recipe.assert_called_with( + "http://example.com", include_tags=True + ) + + +async def test_service_set_random_mealplan( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the set_random_mealplan service.""" + + await setup_integration(hass, mock_config_entry) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_SET_RANDOM_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + mock_mealie_client.random_mealplan.assert_called_with( + date(2023, 10, 21), MealplanEntryType.LUNCH + ) + + mock_mealie_client.random_mealplan.reset_mock() + await hass.services.async_call( + DOMAIN, + SERVICE_SET_RANDOM_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + }, + blocking=True, + return_response=False, + ) + mock_mealie_client.random_mealplan.assert_called_with( + date(2023, 10, 21), MealplanEntryType.LUNCH + ) + + +@pytest.mark.parametrize( + ("payload", "kwargs"), + [ + ( + { + ATTR_RECIPE_ID: "recipe_id", + }, + {"recipe_id": "recipe_id", "note_title": None, "note_text": None}, + ), + ( + { + ATTR_NOTE_TITLE: "Note Title", + ATTR_NOTE_TEXT: "Note Text", + }, + {"recipe_id": None, "note_title": "Note Title", "note_text": "Note Text"}, + ), + ], +) +async def test_service_set_mealplan( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + payload: dict[str, str], + kwargs: dict[str, str], +) -> None: + """Test the set_mealplan service.""" + + await setup_integration(hass, mock_config_entry) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_SET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + } + | payload, + blocking=True, + return_response=True, + ) + assert response == snapshot + mock_mealie_client.set_mealplan.assert_called_with( + date(2023, 10, 21), MealplanEntryType.LUNCH, **kwargs + ) + + mock_mealie_client.random_mealplan.reset_mock() + await hass.services.async_call( + DOMAIN, + SERVICE_SET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + } + | payload, + blocking=True, + return_response=False, + ) + mock_mealie_client.set_mealplan.assert_called_with( + date(2023, 10, 21), MealplanEntryType.LUNCH, **kwargs + ) + + +@pytest.mark.parametrize( + ("service", "payload", "function", "exception", "raised_exception", "message"), + [ + ( + SERVICE_GET_MEALPLAN, + {}, + "get_mealplans", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ( + SERVICE_GET_RECIPE, + {ATTR_RECIPE_ID: "recipe_id"}, + "get_recipe", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ( + SERVICE_GET_RECIPE, + {ATTR_RECIPE_ID: "recipe_id"}, + "get_recipe", + MealieNotFoundError, + ServiceValidationError, + "Recipe with ID or slug `recipe_id` not found", + ), + ( + SERVICE_IMPORT_RECIPE, + {ATTR_URL: "http://example.com"}, + "import_recipe", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ( + SERVICE_IMPORT_RECIPE, + {ATTR_URL: "http://example.com"}, + "import_recipe", + MealieValidationError, + ServiceValidationError, + "Mealie could not import the recipe from the URL", + ), + ( + SERVICE_SET_RANDOM_MEALPLAN, + {ATTR_DATE: "2023-10-21", ATTR_ENTRY_TYPE: "lunch"}, + "random_mealplan", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ( + SERVICE_SET_MEALPLAN, + { + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + ATTR_RECIPE_ID: "recipe_id", + }, + "set_mealplan", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ], +) +async def test_services_connection_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + service: str, + payload: dict[str, str], + function: str, + exception: Exception, + raised_exception: type[Exception], + message: str, +) -> None: + """Test a connection error in the services.""" + + await setup_integration(hass, mock_config_entry) + + getattr(mock_mealie_client, function).side_effect = exception + + with pytest.raises(raised_exception, match=message): + await hass.services.async_call( + DOMAIN, + service, + {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id} | payload, + blocking=True, + return_response=True, + ) + + +@pytest.mark.parametrize( + ("service", "payload"), + [ + (SERVICE_GET_MEALPLAN, {}), + (SERVICE_GET_RECIPE, {ATTR_RECIPE_ID: "recipe_id"}), + (SERVICE_IMPORT_RECIPE, {ATTR_URL: "http://example.com"}), + ( + SERVICE_SET_RANDOM_MEALPLAN, + {ATTR_DATE: "2023-10-21", ATTR_ENTRY_TYPE: "lunch"}, + ), + ( + SERVICE_SET_MEALPLAN, + { + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + ATTR_RECIPE_ID: "recipe_id", + }, + ), + ], +) +async def test_service_entry_availability( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + service: str, + payload: dict[str, str], +) -> None: + """Test the services without valid entry.""" + mock_config_entry.add_to_hass(hass) + mock_config_entry2 = MockConfigEntry(domain=DOMAIN) + mock_config_entry2.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises(ServiceValidationError, match="Mock Title is not loaded"): + await hass.services.async_call( + DOMAIN, + service, + {ATTR_CONFIG_ENTRY_ID: mock_config_entry2.entry_id} | payload, + blocking=True, + return_response=True, + ) + + with pytest.raises( + ServiceValidationError, match='Integration "mealie" not found in registry' + ): + await hass.services.async_call( + DOMAIN, + service, + {ATTR_CONFIG_ENTRY_ID: "bad-config_id"} | payload, + blocking=True, + return_response=True, + ) diff --git a/tests/components/mealie/test_todo.py b/tests/components/mealie/test_todo.py new file mode 100644 index 00000000000..920cfc47397 --- /dev/null +++ b/tests/components/mealie/test_todo.py @@ -0,0 +1,197 @@ +"""Tests for the Mealie todo.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from aiomealie import MealieError, ShoppingListsResponse +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.mealie import DOMAIN +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + + +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test todo entities.""" + with patch("homeassistant.components.mealie.PLATFORMS", [Platform.TODO]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_add_todo_list_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for adding a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + mock_mealie_client.add_shopping_item.assert_called_once() + + +async def test_add_todo_list_item_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for failing to add a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + mock_mealie_client.add_shopping_item.side_effect = MealieError + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + +async def test_update_todo_list_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for updating a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + mock_mealie_client.update_shopping_item.assert_called_once() + + +async def test_update_todo_list_item_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for failing to update a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + mock_mealie_client.update_shopping_item.side_effect = MealieError + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + +async def test_delete_todo_list_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for deleting a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "aubergine"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + mock_mealie_client.delete_shopping_item.assert_called_once() + + +async def test_delete_todo_list_item_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for failing to delete a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + mock_mealie_client.delete_shopping_item = AsyncMock() + mock_mealie_client.delete_shopping_item.side_effect = MealieError + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "aubergine"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + +async def test_runtime_management( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test for creating and deleting shopping lists.""" + response = ShoppingListsResponse.from_json( + load_fixture("get_shopping_lists.json", DOMAIN) + ).items + mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( + items=[response[0]] + ) + await setup_integration(hass, mock_config_entry) + assert hass.states.get("todo.mealie_supermarket") is not None + assert hass.states.get("todo.mealie_special_groceries") is None + + mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( + items=response[0:2] + ) + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("todo.mealie_special_groceries") is not None + + mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( + items=[response[0]] + ) + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("todo.mealie_special_groceries") is None diff --git a/tests/components/media_extractor/conftest.py b/tests/components/media_extractor/conftest.py index 1d198681f3f..58d51f1cb2e 100644 --- a/tests/components/media_extractor/conftest.py +++ b/tests/components/media_extractor/conftest.py @@ -1,20 +1,18 @@ """Common fixtures for the Media Extractor tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.media_extractor import DOMAIN -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MockYoutubeDL from .const import AUDIO_QUERY -from tests.common import async_mock_service - @pytest.fixture(autouse=True) async def setup_homeassistant(hass: HomeAssistant): @@ -31,12 +29,6 @@ async def setup_media_player(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "media_player", "play_media") - - @pytest.fixture(name="mock_youtube_dl") async def setup_mock_yt_dlp(hass: HomeAssistant) -> MockYoutubeDL: """Mock YoutubeDL.""" diff --git a/tests/components/media_extractor/snapshots/test_init.ambr b/tests/components/media_extractor/snapshots/test_init.ambr index ed56f40af73..9731a415c00 100644 --- a/tests/components/media_extractor/snapshots/test_init.ambr +++ b/tests/components/media_extractor/snapshots/test_init.ambr @@ -30,15 +30,6 @@ 'media_content_type': 'VIDEO', }) # --- -# name: test_play_media_service - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694794256/ei/sC0EZYCPHbuZx_AP3bGz0Ac/ip/84.31.234.146/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr2---sn-5hnekn7k.googlevideo.com/mh/7c/mm/31,29/mn/sn-5hnekn7k,sn-5hne6nzy/ms/au,rdu/mv/m/mvi/2/pl/14/initcwndbps/2267500/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694772337/fvip/3/short_key/1/keepalive/yes/fexp/24007246,24362685/beids/24350018/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRgIhAIC0iobMnRschmQ3QaYsytXg9eg7l9B_-UNvMciis4bmAiEAg-3jr6SwOfAGCCU-JyTyxcXmraug-hPcjjJzm__43ug%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRQIhAOlqbgmuueNhIuGENYKCsdwiNAUPheXw-RMUqsiaB7YuAiANN43FxJl14Ve_H_c9K-aDoXG4sI7PDCqKDhov6Qro_g%3D%3D/playlist/index.m3u8', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-AUDIO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -57,24 +48,6 @@ 'media_content_type': 'AUDIO', }) # --- -# name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-VIDEO-audio_media_extractor_config] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://cf-media.sndcdn.com/50remGX1OqRY.128.mp3?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiKjovL2NmLW1lZGlhLnNuZGNkbi5jb20vNTByZW1HWDFPcVJZLjEyOC5tcDMqIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNjk0Nzk5MTc5fX19XX0_&Signature=JtF8BXxTCElhjCrhnSAq3W6z960VmdVXx7BPhQvI0MCxr~J43JFGO8CVw9-VBM2oEf14mqWo63-C0FO29DvUuBZnmLD3dhDfryVfWJsrix7voimoRDaNFE~3zntDbg7O2S8uWYyZK8OZC9anzwokvjH7jbmviWqK4~2IM9dwgejGgzrQU1aadV2Yro7NJZnF7SD~7tVjkM-hBg~X5zDYVxmGrdzN3tFoLwRmUch6RNDL~1DcWBk0AveBKQFAdBrFBjDDUeIyDz9Idhw2aG9~fjfckcf95KwqrVQxz1N5XEzfNDDo8xkUgDt0eb9dtXdwxLJ0swC6e5VLS8bsH91GMg__&Key-Pair-Id=APKAI6TU7MMXM5DG6EPQ', - 'media_content_type': 'VIDEO', - }) -# --- -# name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-VIDEO-empty_media_extractor_config] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://cf-media.sndcdn.com/50remGX1OqRY.128.mp3?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiKjovL2NmLW1lZGlhLnNuZGNkbi5jb20vNTByZW1HWDFPcVJZLjEyOC5tcDMqIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNjk0Nzk4NTkzfX19XX0_&Signature=flALJvEBnzS0ZOOhf0-07Ap~NURw2Gn2OqkeKKTTMX5HRGJw9eXFay79tcC4GsMMXWUgWoCx-n3yelpyilE2MOEIufBNUbjqRfMSJaX5YhYxjQdoDYuiU~gqBzJyPw9pKzr6P8~5HNKL3Idr0CNhUzdV6FQLaUPKMMibq9ghV833mUmdyvdk1~GZBc8MOg9GrTdcigGgpPzd-vrIMICMvFzFnwBOeOotxX2Vfqf9~wVekBKGlvB9A~7TlZ71lv9Fl9u4m8rse9E-mByweVc1M784ehJV3~tRPjuF~FXXWKP8x0nGJmoq7RAnG7iFIt~fQFmsfOq2o~PG7dHMRPh7hw__&Key-Pair-Id=APKAI6TU7MMXM5DG6EPQ', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://test.com/abc-AUDIO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -93,15 +66,6 @@ 'media_content_type': 'AUDIO', }) # --- -# name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-audio_media_extractor_config-] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694805268/ei/tFgEZcu0DoOD-gaqg47wBA/ip/45.93.75.130/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr3---sn-5hne6nzy.googlevideo.com/mh/7c/mm/31,29/mn/sn-5hne6nzy,sn-5hnekn7k/ms/au,rdu/mv/m/mvi/3/pl/22/initcwndbps/1957500/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694783146/fvip/2/short_key/1/keepalive/yes/fexp/24007246/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRQIhALAASH0_ZDQQoMA82qWNCXSHPZ0bb9TQldIs7AAxktiiAiASA5bQy7IAa6NwdGIOpfye5OgcY_BNuo0WgSdh84tosw%3D%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRgIhAIsDcLGH8KJpQpBgyJ5VWlDxfr75HyO8hMSVS9v7nRu4AiEA2xjtLZOzeNFoJlxwCsH3YqsUQt-BF_4gikhi_P4FbBc%3D/playlist/index.m3u8', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -111,15 +75,6 @@ 'media_content_type': 'VIDEO', }) # --- -# name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-empty_media_extractor_config-] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694805294/ei/zlgEZcCPFpqOx_APj42f2Ao/ip/45.93.75.130/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr3---sn-5hne6nzy.googlevideo.com/mh/7c/mm/31,26/mn/sn-5hne6nzy,sn-aigzrnld/ms/au,onr/mv/m/mvi/3/pl/22/initcwndbps/2095000/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694783390/fvip/1/short_key/1/keepalive/yes/fexp/24007246,24362685/beids/24350017/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRgIhANCPwWNfq6wBp1Xo1L8bRJpDrzOyv7kfH_J65cZ_PRZLAiEAwo-0wQgeIjPe7OgyAAvMCx_A9wd1h8Qyh7VntKwGJUs%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRQIgIqS9Ub_6L9ScKXr0T9bkeu6TZsEsyNApYfF_MqeukqECIQCMSeJ1sSEw5QGMgHAW8Fhsir4TYHEK5KVg-PzJbrT6hw%3D%3D/playlist/index.m3u8', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-empty_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', diff --git a/tests/components/media_extractor/test_init.py b/tests/components/media_extractor/test_init.py index 8c8a1407ccc..bc80e063697 100644 --- a/tests/components/media_extractor/test_init.py +++ b/tests/components/media_extractor/test_init.py @@ -100,7 +100,7 @@ async def test_extracting_playlist_no_entries( async def test_play_media_service( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, - calls: list[ServiceCall], + service_calls: list[ServiceCall], snapshot: SnapshotAssertion, request: pytest.FixtureRequest, config_fixture: str, @@ -123,13 +123,14 @@ async def test_play_media_service( ) await hass.async_block_till_done() - assert calls[0].data == snapshot + assert len(service_calls) == 2 + assert service_calls[1].data == snapshot async def test_download_error( hass: HomeAssistant, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test handling DownloadError.""" @@ -152,7 +153,7 @@ async def test_download_error( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 assert f"Could not retrieve data for the URL: {YOUTUBE_VIDEO}" in caplog.text @@ -160,7 +161,7 @@ async def test_no_target_entity( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], snapshot: SnapshotAssertion, ) -> None: """Test having no target entity.""" @@ -179,14 +180,15 @@ async def test_no_target_entity( ) await hass.async_block_till_done() - assert calls[0].data == snapshot + assert len(service_calls) == 2 + assert service_calls[1].data == snapshot async def test_playlist( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], snapshot: SnapshotAssertion, ) -> None: """Test extracting a playlist.""" @@ -205,14 +207,15 @@ async def test_playlist( ) await hass.async_block_till_done() - assert calls[0].data == snapshot + assert len(service_calls) == 2 + assert service_calls[1].data == snapshot async def test_playlist_no_entries( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test extracting a playlist without entries.""" @@ -231,7 +234,7 @@ async def test_playlist_no_entries( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 assert ( f"Could not retrieve data for the URL: {YOUTUBE_EMPTY_PLAYLIST}" in caplog.text ) @@ -240,7 +243,7 @@ async def test_playlist_no_entries( async def test_query_error( hass: HomeAssistant, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test handling error with query.""" @@ -270,15 +273,13 @@ async def test_query_error( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_cookiefile_detection( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], - snapshot: SnapshotAssertion, caplog: pytest.LogCaptureFixture, ) -> None: """Test cookie file detection.""" @@ -289,16 +290,19 @@ async def test_cookiefile_detection( cookies_dir = os.path.join(hass.config.config_dir, "media_extractor") cookies_file = os.path.join(cookies_dir, "cookies.txt") - if not os.path.exists(cookies_dir): - os.makedirs(cookies_dir) + def _write_cookies_file() -> None: + if not os.path.exists(cookies_dir): + os.makedirs(cookies_dir) - with open(cookies_file, "w+", encoding="utf-8") as f: - f.write( - """# Netscape HTTP Cookie File + with open(cookies_file, "w+", encoding="utf-8") as f: + f.write( + """# Netscape HTTP Cookie File - .youtube.com TRUE / TRUE 1701708706 GPS 1 - """ - ) + .youtube.com TRUE / TRUE 1701708706 GPS 1 + """ + ) + + await hass.async_add_executor_job(_write_cookies_file) await hass.services.async_call( DOMAIN, @@ -313,7 +317,7 @@ async def test_cookiefile_detection( assert "Media extractor loaded cookies file" in caplog.text - os.remove(cookies_file) + await hass.async_add_executor_job(os.remove, cookies_file) await hass.services.async_call( DOMAIN, diff --git a/tests/components/media_player/test_async_helpers.py b/tests/components/media_player/test_async_helpers.py index 783846d8857..750d2861f21 100644 --- a/tests/components/media_player/test_async_helpers.py +++ b/tests/components/media_player/test_async_helpers.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant class SimpleMediaPlayer(mp.MediaPlayerEntity): """Media player test class.""" - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: """Initialize the test media player.""" self.hass = hass self._volume = 0 diff --git a/tests/components/media_player/test_device_condition.py b/tests/components/media_player/test_device_condition.py index 186cd674b39..78d30e2ca6e 100644 --- a/tests/components/media_player/test_device_condition.py +++ b/tests/components/media_player/test_device_condition.py @@ -20,11 +20,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -32,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -136,7 +126,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -274,8 +264,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on - event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") @@ -285,8 +275,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off - event - test_event2" hass.states.async_set(entry.entity_id, STATE_IDLE) hass.bus.async_fire("test_event1") @@ -296,8 +286,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_idle - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_idle - event - test_event3" hass.states.async_set(entry.entity_id, STATE_PAUSED) hass.bus.async_fire("test_event1") @@ -307,8 +297,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "is_paused - event - test_event4" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "is_paused - event - test_event4" hass.states.async_set(entry.entity_id, STATE_PLAYING) hass.bus.async_fire("test_event1") @@ -318,8 +308,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 5 - assert calls[4].data["some"] == "is_playing - event - test_event5" + assert len(service_calls) == 5 + assert service_calls[4].data["some"] == "is_playing - event - test_event5" hass.states.async_set(entry.entity_id, STATE_BUFFERING) hass.bus.async_fire("test_event1") @@ -329,15 +319,15 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 6 - assert calls[5].data["some"] == "is_buffering - event - test_event6" + assert len(service_calls) == 6 + assert service_calls[5].data["some"] == "is_buffering - event - test_event6" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -380,5 +370,5 @@ async def test_if_state_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on - event - test_event1" diff --git a/tests/components/media_player/test_device_trigger.py b/tests/components/media_player/test_device_trigger.py index e9d5fbd646e..4bb27b73f24 100644 --- a/tests/components/media_player/test_device_trigger.py +++ b/tests/components/media_player/test_device_trigger.py @@ -28,7 +28,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -37,12 +36,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -209,7 +202,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -265,8 +258,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { "turned_on - device - media_player.test_5678 - off - on - None", "changed_states - device - media_player.test_5678 - off - on - None", } @@ -274,8 +267,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning off. hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { "turned_off - device - media_player.test_5678 - on - off - None", "changed_states - device - media_player.test_5678 - on - off - None", } @@ -283,8 +276,8 @@ async def test_if_fires_on_state_change( # Fake that the entity becomes idle. hass.states.async_set(entry.entity_id, STATE_IDLE) await hass.async_block_till_done() - assert len(calls) == 6 - assert {calls[4].data["some"], calls[5].data["some"]} == { + assert len(service_calls) == 6 + assert {service_calls[4].data["some"], service_calls[5].data["some"]} == { "idle - device - media_player.test_5678 - off - idle - None", "changed_states - device - media_player.test_5678 - off - idle - None", } @@ -292,8 +285,8 @@ async def test_if_fires_on_state_change( # Fake that the entity starts playing. hass.states.async_set(entry.entity_id, STATE_PLAYING) await hass.async_block_till_done() - assert len(calls) == 8 - assert {calls[6].data["some"], calls[7].data["some"]} == { + assert len(service_calls) == 8 + assert {service_calls[6].data["some"], service_calls[7].data["some"]} == { "playing - device - media_player.test_5678 - idle - playing - None", "changed_states - device - media_player.test_5678 - idle - playing - None", } @@ -301,8 +294,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is paused. hass.states.async_set(entry.entity_id, STATE_PAUSED) await hass.async_block_till_done() - assert len(calls) == 10 - assert {calls[8].data["some"], calls[9].data["some"]} == { + assert len(service_calls) == 10 + assert {service_calls[8].data["some"], service_calls[9].data["some"]} == { "paused - device - media_player.test_5678 - playing - paused - None", "changed_states - device - media_player.test_5678 - playing - paused - None", } @@ -310,8 +303,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is buffering. hass.states.async_set(entry.entity_id, STATE_BUFFERING) await hass.async_block_till_done() - assert len(calls) == 12 - assert {calls[10].data["some"], calls[11].data["some"]} == { + assert len(service_calls) == 12 + assert {service_calls[10].data["some"], service_calls[11].data["some"]} == { "buffering - device - media_player.test_5678 - paused - buffering - None", "changed_states - device - media_player.test_5678 - paused - buffering - None", } @@ -321,7 +314,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -369,9 +362,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "turned_on - device - media_player.test_5678 - off - on - None" ) @@ -380,7 +373,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -426,16 +419,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - off - on - 0:00:05" ) diff --git a/tests/components/media_source/test_local_source.py b/tests/components/media_source/test_local_source.py index 4c7fbd06edc..de90f229a85 100644 --- a/tests/components/media_source/test_local_source.py +++ b/tests/components/media_source/test_local_source.py @@ -1,5 +1,6 @@ """Test Local Media Source.""" +from collections.abc import AsyncGenerator from http import HTTPStatus import io from pathlib import Path @@ -7,7 +8,6 @@ from tempfile import TemporaryDirectory from unittest.mock import patch import pytest -from typing_extensions import AsyncGenerator from homeassistant.components import media_source, websocket_api from homeassistant.components.media_source import const diff --git a/tests/components/melcloud/test_diagnostics.py b/tests/components/melcloud/test_diagnostics.py index cbb35eadfd4..32ec94a54d1 100644 --- a/tests/components/melcloud/test_diagnostics.py +++ b/tests/components/melcloud/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.melcloud.const import DOMAIN from homeassistant.core import HomeAssistant @@ -36,4 +37,4 @@ async def test_get_config_entry_diagnostics( diagnostics = await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) - assert diagnostics == snapshot + assert diagnostics == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/melnor/conftest.py b/tests/components/melnor/conftest.py index 38bc1a62d51..f30213c4efd 100644 --- a/tests/components/melnor/conftest.py +++ b/tests/components/melnor/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from datetime import UTC, datetime, time, timedelta from unittest.mock import AsyncMock, _patch, patch from melnor_bluetooth.device import Device import pytest -from typing_extensions import Generator from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak from homeassistant.components.melnor.const import DOMAIN diff --git a/tests/components/melnor/test_sensor.py b/tests/components/melnor/test_sensor.py index d04494d44ad..a2ba23d9e61 100644 --- a/tests/components/melnor/test_sensor.py +++ b/tests/components/melnor/test_sensor.py @@ -2,6 +2,8 @@ from __future__ import annotations +from datetime import timedelta + from freezegun import freeze_time from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass @@ -51,7 +53,7 @@ async def test_minutes_remaining_sensor(hass: HomeAssistant) -> None: entry = mock_config_entry(hass) device = mock_melnor_device() - end_time = now + dt_util.dt.timedelta(minutes=10) + end_time = now + timedelta(minutes=10) # we control this mock @@ -76,7 +78,7 @@ async def test_minutes_remaining_sensor(hass: HomeAssistant) -> None: # Turn valve on device.zone1._is_watering = True - async_fire_time_changed(hass, now + dt_util.dt.timedelta(seconds=10)) + async_fire_time_changed(hass, now + timedelta(seconds=10)) await hass.async_block_till_done() # Valve is on, report 10 @@ -94,7 +96,7 @@ async def test_schedule_next_cycle_sensor(hass: HomeAssistant) -> None: entry = mock_config_entry(hass) device = mock_melnor_device() - next_cycle = now + dt_util.dt.timedelta(minutes=10) + next_cycle = now + timedelta(minutes=10) # we control this mock device.zone1.frequency._next_run_time = next_cycle @@ -118,7 +120,7 @@ async def test_schedule_next_cycle_sensor(hass: HomeAssistant) -> None: # Turn valve on device.zone1._schedule_enabled = True - async_fire_time_changed(hass, now + dt_util.dt.timedelta(seconds=10)) + async_fire_time_changed(hass, now + timedelta(seconds=10)) await hass.async_block_till_done() # Valve is on, report 10 diff --git a/tests/components/melnor/test_time.py b/tests/components/melnor/test_time.py index 1d12c3b47f8..50b51d31ff8 100644 --- a/tests/components/melnor/test_time.py +++ b/tests/components/melnor/test_time.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import time +from datetime import time, timedelta from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util @@ -46,7 +46,7 @@ async def test_schedule_start_time(hass: HomeAssistant) -> None: blocking=True, ) - async_fire_time_changed(hass, now + dt_util.dt.timedelta(seconds=10)) + async_fire_time_changed(hass, now + timedelta(seconds=10)) await hass.async_block_till_done() time_entity = hass.states.get("time.zone_1_schedule_start_time") diff --git a/tests/components/met_eireann/snapshots/test_weather.ambr b/tests/components/met_eireann/snapshots/test_weather.ambr index 90f36d09d25..de8b69de18a 100644 --- a/tests/components/met_eireann/snapshots/test_weather.ambr +++ b/tests/components/met_eireann/snapshots/test_weather.ambr @@ -1,104 +1,4 @@ # serializer version: 1 -# name: test_forecast_service - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- -# name: test_forecast_service.1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- -# name: test_forecast_service[forecast] - dict({ - 'weather.somewhere': dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[forecast].1 - dict({ - 'weather.somewhere': dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.somewhere': dict({ diff --git a/tests/components/metoffice/snapshots/test_weather.ambr b/tests/components/metoffice/snapshots/test_weather.ambr index a6991a8631b..0bbc0e06a0a 100644 --- a/tests/components/metoffice/snapshots/test_weather.ambr +++ b/tests/components/metoffice/snapshots/test_weather.ambr @@ -1,658 +1,4 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 13.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-25T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 19.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T18:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 17.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 14.0, - 'wind_bearing': 'NW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T00:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 13.0, - 'wind_bearing': 'WSW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T03:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T09:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T15:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T18:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T00:00:00+00:00', - 'precipitation_probability': 11, - 'temperature': 9.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T03:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 8.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T06:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 8.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 4, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T18:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-27T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T00:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 8.0, - 'wind_bearing': 'NNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 7.0, - 'wind_bearing': 'W', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-28T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 6.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-28T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T15:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T18:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NNE', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T00:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'E', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-29T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 8.0, - 'wind_bearing': 'SSE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T06:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 8.0, - 'wind_bearing': 'SE', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T09:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 10.0, - 'wind_bearing': 'SE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 47, - 'temperature': 12.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'pouring', - 'datetime': '2020-04-29T15:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T18:00:00+00:00', - 'precipitation_probability': 39, - 'temperature': 12.0, - 'wind_bearing': 'SSE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T21:00:00+00:00', - 'precipitation_probability': 19, - 'temperature': 11.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 13.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].3 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-25T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 19.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T18:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 17.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 14.0, - 'wind_bearing': 'NW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T00:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 13.0, - 'wind_bearing': 'WSW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T03:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T09:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T15:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T18:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T00:00:00+00:00', - 'precipitation_probability': 11, - 'temperature': 9.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T03:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 8.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T06:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 8.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 4, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T18:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-27T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T00:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 8.0, - 'wind_bearing': 'NNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 7.0, - 'wind_bearing': 'W', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-28T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 6.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-28T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T15:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T18:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NNE', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T00:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'E', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-29T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 8.0, - 'wind_bearing': 'SSE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T06:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 8.0, - 'wind_bearing': 'SE', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T09:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 10.0, - 'wind_bearing': 'SE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 47, - 'temperature': 12.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'pouring', - 'datetime': '2020-04-29T15:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T18:00:00+00:00', - 'precipitation_probability': 39, - 'temperature': 12.0, - 'wind_bearing': 'SSE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T21:00:00+00:00', - 'precipitation_probability': 19, - 'temperature': 11.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].4 - dict({ - 'forecast': list([ - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.met_office_wavertree_daily': dict({ diff --git a/tests/components/mfi/test_sensor.py b/tests/components/mfi/test_sensor.py index 49efdd5dc71..37512ca78f8 100644 --- a/tests/components/mfi/test_sensor.py +++ b/tests/components/mfi/test_sensor.py @@ -116,13 +116,13 @@ async def test_setup_adds_proper_devices(hass: HomeAssistant) -> None: @pytest.fixture(name="port") -def port_fixture(): +def port_fixture() -> mock.MagicMock: """Port fixture.""" return mock.MagicMock() @pytest.fixture(name="sensor") -def sensor_fixture(hass, port): +def sensor_fixture(hass: HomeAssistant, port: mock.MagicMock) -> mfi.MfiSensor: """Sensor fixture.""" sensor = mfi.MfiSensor(port, hass) sensor.hass = hass diff --git a/tests/components/microsoft/test_tts.py b/tests/components/microsoft/test_tts.py index 082def901c5..0f11501843e 100644 --- a/tests/components/microsoft/test_tts.py +++ b/tests/components/microsoft/test_tts.py @@ -8,32 +8,20 @@ from pycsspeechtts import pycsspeechtts import pytest from homeassistant.components import tts -from homeassistant.components.media_player import ( - ATTR_MEDIA_CONTENT_ID, - DOMAIN as DOMAIN_MP, - SERVICE_PLAY_MEDIA, -) +from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID from homeassistant.components.microsoft.tts import SUPPORTED_LANGUAGES from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ServiceNotFound from homeassistant.setup import async_setup_component -from tests.common import async_mock_service from tests.components.tts.common import retrieve_media from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Mock media player calls.""" - return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) @pytest.fixture(autouse=True) @@ -58,7 +46,7 @@ async def test_service_say( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say.""" @@ -77,9 +65,11 @@ async def test_service_say( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -102,7 +92,7 @@ async def test_service_say_en_gb_config( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with en-gb code in the config.""" @@ -130,9 +120,11 @@ async def test_service_say_en_gb_config( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -154,7 +146,7 @@ async def test_service_say_en_gb_service( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with en-gb code in the service.""" @@ -177,9 +169,11 @@ async def test_service_say_en_gb_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -201,7 +195,7 @@ async def test_service_say_fa_ir_config( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with fa-ir code in the config.""" @@ -229,9 +223,11 @@ async def test_service_say_fa_ir_config( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -253,7 +249,7 @@ async def test_service_say_fa_ir_service( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with fa-ir code in the service.""" @@ -280,9 +276,11 @@ async def test_service_say_fa_ir_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -317,9 +315,7 @@ def test_supported_languages() -> None: assert len(SUPPORTED_LANGUAGES) > 100 -async def test_invalid_language( - hass: HomeAssistant, mock_tts, calls: list[ServiceCall] -) -> None: +async def test_invalid_language(hass: HomeAssistant, mock_tts) -> None: """Test setup component with invalid language.""" await async_setup_component( hass, @@ -339,7 +335,6 @@ async def test_invalid_language( blocking=True, ) - assert len(calls) == 0 assert len(mock_tts.mock_calls) == 0 @@ -347,7 +342,7 @@ async def test_service_say_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with http error.""" mock_tts.return_value.speak.side_effect = pycsspeechtts.requests.HTTPError @@ -366,9 +361,11 @@ async def test_service_say_error( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.NOT_FOUND ) diff --git a/tests/components/mjpeg/conftest.py b/tests/components/mjpeg/conftest.py index 00eaf946113..12e0b4c0faf 100644 --- a/tests/components/mjpeg/conftest.py +++ b/tests/components/mjpeg/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from requests_mock import Mocker -from typing_extensions import Generator from homeassistant.components.mjpeg.const import ( CONF_MJPEG_URL, diff --git a/tests/components/mobile_app/conftest.py b/tests/components/mobile_app/conftest.py index 657b80a759a..53e90cb61ae 100644 --- a/tests/components/mobile_app/conftest.py +++ b/tests/components/mobile_app/conftest.py @@ -1,6 +1,7 @@ """Tests for mobile_app component.""" from http import HTTPStatus +from typing import Any from aiohttp.test_utils import TestClient import pytest @@ -15,7 +16,9 @@ from tests.typing import ClientSessionGenerator @pytest.fixture -async def create_registrations(hass, webhook_client): +async def create_registrations( + hass: HomeAssistant, webhook_client: TestClient +) -> tuple[dict[str, Any], dict[str, Any]]: """Return two new registrations.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) @@ -37,7 +40,7 @@ async def create_registrations(hass, webhook_client): @pytest.fixture -async def push_registration(hass, webhook_client): +async def push_registration(hass: HomeAssistant, webhook_client: TestClient): """Return registration with push notifications enabled.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) @@ -67,7 +70,7 @@ async def webhook_client( @pytest.fixture(autouse=True) -async def setup_ws(hass): +async def setup_ws(hass: HomeAssistant) -> None: """Configure the websocket_api component.""" assert await async_setup_component(hass, "repairs", {}) assert await async_setup_component(hass, "websocket_api", {}) diff --git a/tests/components/mobile_app/test_binary_sensor.py b/tests/components/mobile_app/test_binary_sensor.py index acebd8796b7..9ffb61f92ab 100644 --- a/tests/components/mobile_app/test_binary_sensor.py +++ b/tests/components/mobile_app/test_binary_sensor.py @@ -1,7 +1,9 @@ """Entity tests for mobile_app.""" from http import HTTPStatus +from typing import Any +from aiohttp.test_utils import TestClient import pytest from homeassistant.const import STATE_UNKNOWN @@ -12,8 +14,8 @@ from homeassistant.helpers import device_registry as dr async def test_sensor( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be registered and updated.""" webhook_id = create_registrations[1]["webhook_id"] @@ -98,7 +100,9 @@ async def test_sensor( async def test_sensor_must_register( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors must be registered before updating.""" webhook_id = create_registrations[1]["webhook_id"] @@ -122,8 +126,8 @@ async def test_sensor_must_register( async def test_sensor_id_no_dupes( hass: HomeAssistant, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test that a duplicate unique ID in registration updates the sensor.""" @@ -185,7 +189,9 @@ async def test_sensor_id_no_dupes( async def test_register_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be registered, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -244,7 +250,9 @@ async def test_register_sensor_no_state( async def test_update_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be updated, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mobile_app/test_device_tracker.py b/tests/components/mobile_app/test_device_tracker.py index e3e2ce3227a..d1cbc21c36b 100644 --- a/tests/components/mobile_app/test_device_tracker.py +++ b/tests/components/mobile_app/test_device_tracker.py @@ -1,12 +1,17 @@ """Test mobile app device tracker.""" from http import HTTPStatus +from typing import Any + +from aiohttp.test_utils import TestClient from homeassistant.core import HomeAssistant async def test_sending_location( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( @@ -76,7 +81,9 @@ async def test_sending_location( async def test_restoring_location( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( diff --git a/tests/components/mobile_app/test_init.py b/tests/components/mobile_app/test_init.py index 15380a0d8d7..e1c7ed27cf9 100644 --- a/tests/components/mobile_app/test_init.py +++ b/tests/components/mobile_app/test_init.py @@ -89,6 +89,7 @@ async def _test_create_cloud_hook( "homeassistant.components.cloud.async_active_subscription", return_value=async_active_subscription_return_value, ), + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), patch("homeassistant.components.cloud.async_is_connected", return_value=True), patch( "homeassistant.components.cloud.async_get_or_create_cloudhook", @@ -187,3 +188,41 @@ async def test_create_cloud_hook_after_connection( ) await _test_create_cloud_hook(hass, hass_admin_user, {}, False, additional_steps) + + +@pytest.mark.parametrize( + ("cloud_logged_in", "should_cloudhook_exist"), + [(True, True), (False, False)], +) +async def test_delete_cloud_hook( + hass: HomeAssistant, + hass_admin_user: MockUser, + cloud_logged_in: bool, + should_cloudhook_exist: bool, +) -> None: + """Test deleting the cloud hook only when logged out of the cloud.""" + + config_entry = MockConfigEntry( + data={ + **REGISTER_CLEARTEXT, + CONF_WEBHOOK_ID: "test-webhook-id", + ATTR_DEVICE_NAME: "Test", + ATTR_DEVICE_ID: "Test", + CONF_USER_ID: hass_admin_user.id, + CONF_CLOUDHOOK_URL: "https://hook-url-already-exists", + }, + domain=DOMAIN, + title="Test", + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.cloud.async_is_logged_in", + return_value=cloud_logged_in, + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + assert (CONF_CLOUDHOOK_URL in config_entry.data) == should_cloudhook_exist diff --git a/tests/components/mobile_app/test_sensor.py b/tests/components/mobile_app/test_sensor.py index a7fb0ffc183..6411274fc4e 100644 --- a/tests/components/mobile_app/test_sensor.py +++ b/tests/components/mobile_app/test_sensor.py @@ -1,8 +1,10 @@ """Entity tests for mobile_app.""" from http import HTTPStatus +from typing import Any from unittest.mock import patch +from aiohttp.test_utils import TestClient import pytest from homeassistant.components.sensor import SensorDeviceClass @@ -14,7 +16,11 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) @pytest.mark.parametrize( @@ -28,12 +34,12 @@ async def test_sensor( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, - unit_system, - state_unit, - state1, - state2, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + unit_system: UnitSystem, + state_unit: UnitOfTemperature, + state1: str, + state2: str, ) -> None: """Test that sensors can be registered and updated.""" hass.config.units = unit_system @@ -149,13 +155,13 @@ async def test_sensor( ) async def test_sensor_migration( hass: HomeAssistant, - create_registrations, - webhook_client, - unique_id, - unit_system, - state_unit, - state1, - state2, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + unique_id: str, + unit_system: UnitSystem, + state_unit: UnitOfTemperature, + state1: str, + state2: str, ) -> None: """Test migration to RestoreSensor.""" hass.config.units = unit_system @@ -243,7 +249,9 @@ async def test_sensor_migration( async def test_sensor_must_register( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors must be registered before updating.""" webhook_id = create_registrations[1]["webhook_id"] @@ -265,8 +273,8 @@ async def test_sensor_must_register( async def test_sensor_id_no_dupes( hass: HomeAssistant, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test that a duplicate unique ID in registration updates the sensor.""" @@ -331,7 +339,9 @@ async def test_sensor_id_no_dupes( async def test_register_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be registered, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -390,7 +400,9 @@ async def test_register_sensor_no_state( async def test_update_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be updated, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -464,11 +476,11 @@ async def test_update_sensor_no_state( ) async def test_sensor_datetime( hass: HomeAssistant, - create_registrations, - webhook_client, - device_class, - native_value, - state_value, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + device_class: SensorDeviceClass, + native_value: str, + state_value: str, ) -> None: """Test that sensors can be registered and updated.""" webhook_id = create_registrations[1]["webhook_id"] @@ -505,8 +517,8 @@ async def test_sensor_datetime( async def test_default_disabling_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be disabled by default upon registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -543,8 +555,8 @@ async def test_default_disabling_entity( async def test_updating_disabled_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors return error if disabled in instance.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mobile_app/test_timers.py b/tests/components/mobile_app/test_timers.py new file mode 100644 index 00000000000..9f7d4cebc58 --- /dev/null +++ b/tests/components/mobile_app/test_timers.py @@ -0,0 +1,70 @@ +"""Test mobile app timers.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.mobile_app import DATA_DEVICES, DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import intent as intent_helper + + +@pytest.mark.parametrize( + ("intent_args", "message"), + [ + ( + {}, + "0:02:00 timer finished", + ), + ( + {"name": {"value": "pizza"}}, + "pizza finished", + ), + ], +) +async def test_timer_events( + hass: HomeAssistant, push_registration, intent_args: dict, message: str +) -> None: + """Test for timer events.""" + webhook_id = push_registration["webhook_id"] + device_id = hass.data[DOMAIN][DATA_DEVICES][webhook_id].id + + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_START_TIMER, + { + "minutes": {"value": 2}, + } + | intent_args, + device_id=device_id, + ) + + with patch( + "homeassistant.components.mobile_app.notify.MobileAppNotificationService.async_send_message" + ) as mock_send_message: + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_DECREASE_TIMER, + { + "minutes": {"value": 2}, + }, + device_id=device_id, + ) + await hass.async_block_till_done() + + assert mock_send_message.mock_calls[0][2] == { + "target": [webhook_id], + "message": message, + "data": { + "channel": "Timers", + "group": "timers", + "importance": "high", + "ttl": 0, + "priority": "high", + "push": { + "interruption-level": "time-sensitive", + }, + }, + } diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index ca5c9936409..61e342a45ce 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -1,16 +1,19 @@ """Webhook tests for mobile_app.""" from binascii import unhexlify +from collections.abc import Callable from http import HTTPStatus import json +from typing import Any from unittest.mock import ANY, patch +from aiohttp.test_utils import TestClient from nacl.encoding import Base64Encoder from nacl.secret import SecretBox import pytest from homeassistant.components.camera import CameraEntityFeature -from homeassistant.components.mobile_app.const import CONF_SECRET, DOMAIN +from homeassistant.components.mobile_app.const import CONF_SECRET, DATA_DEVICES, DOMAIN from homeassistant.components.tag import EVENT_TAG_SCANNED from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN from homeassistant.const import ( @@ -31,7 +34,7 @@ from tests.components.conversation import MockAgent @pytest.fixture -async def homeassistant(hass): +async def homeassistant(hass: HomeAssistant) -> None: """Load the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) @@ -93,7 +96,8 @@ def decrypt_payload_legacy(secret_key, encrypted_data): async def test_webhook_handle_render_template( - create_registrations, webhook_client + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we render templates properly.""" resp = await webhook_client.post( @@ -121,7 +125,9 @@ async def test_webhook_handle_render_template( async def test_webhook_handle_call_services( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we call services properly.""" calls = async_mock_service(hass, "test", "mobile_app") @@ -137,7 +143,9 @@ async def test_webhook_handle_call_services( async def test_webhook_handle_fire_event( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can fire events.""" events = [] @@ -161,7 +169,7 @@ async def test_webhook_handle_fire_event( assert events[0].data["hello"] == "yo world" -async def test_webhook_update_registration(webhook_client) -> None: +async def test_webhook_update_registration(webhook_client: TestClient) -> None: """Test that a we can update an existing registration via webhook.""" register_resp = await webhook_client.post( "/api/mobile_app/registrations", json=REGISTER_CLEARTEXT @@ -186,7 +194,9 @@ async def test_webhook_update_registration(webhook_client) -> None: async def test_webhook_handle_get_zones( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can get zones properly.""" # Zone is already loaded as part of the fixture, @@ -238,11 +248,14 @@ async def test_webhook_handle_get_zones( async def test_webhook_handle_get_config( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can get config properly.""" webhook_id = create_registrations[1]["webhook_id"] webhook_url = f"/api/webhook/{webhook_id}" + device: dr.DeviceEntry = hass.data[DOMAIN][DATA_DEVICES][webhook_id] # Create two entities for sensor in ( @@ -280,6 +293,7 @@ async def test_webhook_handle_get_config( "latitude": hass_config["latitude"], "longitude": hass_config["longitude"], "elevation": hass_config["elevation"], + "hass_device_id": device.id, "unit_system": hass_config["unit_system"], "location_name": hass_config["location_name"], "time_zone": hass_config["time_zone"], @@ -297,7 +311,9 @@ async def test_webhook_handle_get_config( async def test_webhook_returns_error_incorrect_json( - webhook_client, create_registrations, caplog: pytest.LogCaptureFixture + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that an error is returned when JSON is invalid.""" resp = await webhook_client.post( @@ -321,7 +337,11 @@ async def test_webhook_returns_error_incorrect_json( ], ) async def test_webhook_handle_decryption( - hass: HomeAssistant, webhook_client, create_registrations, msg, generate_response + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + msg: dict[str, Any], + generate_response: Callable[[HomeAssistant], dict[str, Any]], ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -344,7 +364,8 @@ async def test_webhook_handle_decryption( async def test_webhook_handle_decryption_legacy( - webhook_client, create_registrations + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -367,7 +388,9 @@ async def test_webhook_handle_decryption_legacy( async def test_webhook_handle_decryption_fail( - webhook_client, create_registrations, caplog: pytest.LogCaptureFixture + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -410,7 +433,9 @@ async def test_webhook_handle_decryption_fail( async def test_webhook_handle_decryption_legacy_fail( - webhook_client, create_registrations, caplog: pytest.LogCaptureFixture + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -453,7 +478,8 @@ async def test_webhook_handle_decryption_legacy_fail( async def test_webhook_handle_decryption_legacy_upgrade( - webhook_client, create_registrations + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -508,7 +534,8 @@ async def test_webhook_handle_decryption_legacy_upgrade( async def test_webhook_requires_encryption( - webhook_client, create_registrations + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that encrypted registrations only accept encrypted data.""" resp = await webhook_client.post( @@ -525,7 +552,9 @@ async def test_webhook_requires_encryption( async def test_webhook_update_location_without_locations( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" @@ -562,7 +591,9 @@ async def test_webhook_update_location_without_locations( async def test_webhook_update_location_with_gps( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( @@ -584,7 +615,9 @@ async def test_webhook_update_location_with_gps( async def test_webhook_update_location_with_gps_without_accuracy( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( @@ -602,7 +635,9 @@ async def test_webhook_update_location_with_gps_without_accuracy( async def test_webhook_update_location_with_location_name( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" @@ -664,7 +699,9 @@ async def test_webhook_update_location_with_location_name( async def test_webhook_enable_encryption( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that encryption can be added to a reg initially created without.""" webhook_id = create_registrations[1]["webhook_id"] @@ -715,7 +752,9 @@ async def test_webhook_enable_encryption( async def test_webhook_camera_stream_non_existent( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for a non-existent camera.""" webhook_id = create_registrations[1]["webhook_id"] @@ -734,7 +773,9 @@ async def test_webhook_camera_stream_non_existent( async def test_webhook_camera_stream_non_hls( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for a non-HLS/stream-supporting camera.""" hass.states.async_set("camera.non_stream_camera", "idle", {"supported_features": 0}) @@ -759,7 +800,9 @@ async def test_webhook_camera_stream_non_hls( async def test_webhook_camera_stream_stream_available( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for an HLS/stream-supporting camera.""" hass.states.async_set( @@ -789,7 +832,9 @@ async def test_webhook_camera_stream_stream_available( async def test_webhook_camera_stream_stream_available_but_errors( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for an HLS/stream-supporting camera but that streaming errors.""" hass.states.async_set( @@ -821,8 +866,8 @@ async def test_webhook_camera_stream_stream_available_but_errors( async def test_webhook_handle_scan_tag( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can scan tags.""" device = device_registry.async_get_device(identifiers={(DOMAIN, "mock-device-id")}) @@ -845,7 +890,9 @@ async def test_webhook_handle_scan_tag( async def test_register_sensor_limits_state_class( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we limit state classes to sensors only.""" webhook_id = create_registrations[1]["webhook_id"] @@ -888,8 +935,8 @@ async def test_register_sensor_limits_state_class( async def test_reregister_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can add more info in re-registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -990,11 +1037,11 @@ async def test_reregister_sensor( assert entry.original_icon is None +@pytest.mark.usefixtures("homeassistant") async def test_webhook_handle_conversation_process( hass: HomeAssistant, - homeassistant, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, mock_conversation_agent: MockAgent, ) -> None: """Test that we can converse.""" @@ -1040,9 +1087,8 @@ async def test_webhook_handle_conversation_process( async def test_sending_sensor_state( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, - caplog: pytest.LogCaptureFixture, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can register and send sensor state as number and None.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mochad/test_light.py b/tests/components/mochad/test_light.py index 872bd3a9d61..49beebbaec6 100644 --- a/tests/components/mochad/test_light.py +++ b/tests/components/mochad/test_light.py @@ -18,7 +18,7 @@ def pymochad_mock(): @pytest.fixture -def light_mock(hass, brightness): +def light_mock(hass: HomeAssistant, brightness: int) -> mochad.MochadLight: """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} diff --git a/tests/components/mochad/test_switch.py b/tests/components/mochad/test_switch.py index 750dd48296e..9fea3b5c14c 100644 --- a/tests/components/mochad/test_switch.py +++ b/tests/components/mochad/test_switch.py @@ -21,7 +21,7 @@ def pymochad_mock(): @pytest.fixture -def switch_mock(hass): +def switch_mock(hass: HomeAssistant) -> mochad.MochadSwitch: """Mock switch.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_switch"} diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index 067fb2d123d..5c612f9f8ad 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -37,7 +37,7 @@ TEST_PORT_SERIAL = "usb01" class ReadResult: """Storage class for register read results.""" - def __init__(self, register_words): + def __init__(self, register_words) -> None: """Init.""" self.registers = register_words self.bits = register_words @@ -61,7 +61,7 @@ def register_words_fixture(): @pytest.fixture(name="config_addon") -def config_addon_fixture(): +def config_addon_fixture() -> dict[str, Any] | None: """Add extra configuration items.""" return None @@ -192,7 +192,9 @@ async def mock_test_state_fixture( @pytest.fixture(name="mock_modbus_ha") -async def mock_modbus_ha_fixture(hass, mock_modbus): +async def mock_modbus_ha_fixture( + hass: HomeAssistant, mock_modbus: mock.AsyncMock +) -> mock.AsyncMock: """Load homeassistant to allow service calls.""" assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() diff --git a/tests/components/modbus/test_climate.py b/tests/components/modbus/test_climate.py index a52285b22d7..5578234ee6e 100644 --- a/tests/components/modbus/test_climate.py +++ b/tests/components/modbus/test_climate.py @@ -766,7 +766,7 @@ async def test_service_climate_swing_update( ("temperature", "result", "do_config"), [ ( - 35, + 31, [0x00], { CONF_CLIMATES: [ @@ -781,7 +781,7 @@ async def test_service_climate_swing_update( }, ), ( - 36, + 32, [0x00, 0x00], { CONF_CLIMATES: [ @@ -796,7 +796,7 @@ async def test_service_climate_swing_update( }, ), ( - 37.5, + 33.5, [0x00, 0x00], { CONF_CLIMATES: [ @@ -811,7 +811,7 @@ async def test_service_climate_swing_update( }, ), ( - "39", + "34", [0x00, 0x00, 0x00, 0x00], { CONF_CLIMATES: [ diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index 920003ad0c9..d4dc5b05fac 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -48,15 +48,7 @@ from homeassistant.components.modbus.const import ( CONF_FAN_MODE_HIGH, CONF_FAN_MODE_OFF, CONF_FAN_MODE_ON, - CONF_FAN_MODE_REGISTER, CONF_FAN_MODE_VALUES, - CONF_HVAC_MODE_COOL, - CONF_HVAC_MODE_DRY, - CONF_HVAC_MODE_HEAT, - CONF_HVAC_MODE_HEAT_COOL, - CONF_HVAC_MODE_REGISTER, - CONF_HVAC_MODE_VALUES, - CONF_HVAC_ONOFF_REGISTER, CONF_INPUT_TYPE, CONF_MSG_WAIT, CONF_PARITY, @@ -67,12 +59,10 @@ from homeassistant.components.modbus.const import ( CONF_SWAP_BYTE, CONF_SWAP_WORD, CONF_SWAP_WORD_BYTE, - CONF_SWING_MODE_REGISTER, CONF_SWING_MODE_SWING_BOTH, CONF_SWING_MODE_SWING_OFF, CONF_SWING_MODE_SWING_ON, CONF_SWING_MODE_VALUES, - CONF_TARGET_TEMP, CONF_VIRTUAL_COUNT, DEFAULT_SCAN_INTERVAL, MODBUS_DOMAIN as DOMAIN, @@ -88,7 +78,6 @@ from homeassistant.components.modbus.const import ( ) from homeassistant.components.modbus.validators import ( check_config, - check_hvac_target_temp_registers, duplicate_fan_mode_validator, duplicate_swing_mode_validator, hvac_fixedsize_reglist_validator, @@ -457,27 +446,6 @@ async def test_check_config(hass: HomeAssistant, do_config) -> None: ], } ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - ], - } - ], ], ) async def test_check_config_sensor(hass: HomeAssistant, do_config) -> None: @@ -510,225 +478,6 @@ async def test_check_config_sensor(hass: HomeAssistant, do_config) -> None: ], } ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - ], - } - ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 118, - CONF_SLAVE: 0, - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 119, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 118, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - }, - }, - }, - ], - } - ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 118, - CONF_SLAVE: 0, - CONF_TARGET_TEMP: [99], - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - ], - } - ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_TARGET_TEMP: [117], - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: [121], - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - ], - } - ], - [ # Testing Swing modes - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_SWING_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_SWING_MODE_VALUES: { - CONF_SWING_MODE_SWING_ON: 0, - CONF_SWING_MODE_SWING_BOTH: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 119, - CONF_SLAVE: 0, - CONF_TARGET_TEMP: 118, - CONF_SWING_MODE_REGISTER: { - CONF_ADDRESS: [120], - CONF_SWING_MODE_VALUES: { - CONF_SWING_MODE_SWING_ON: 0, - CONF_SWING_MODE_SWING_BOTH: 1, - }, - }, - }, - ], - } - ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_TARGET_TEMP: [130, 131, 132, 133, 134, 135, 136], - CONF_SLAVE: 0, - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 118, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 122, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 118, - CONF_TARGET_TEMP: [130, 131, 132, 133, 134, 135, 136], - CONF_SLAVE: 0, - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 130, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 122, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - ], - } - ], ], ) async def test_check_config_climate(hass: HomeAssistant, do_config) -> None: @@ -737,83 +486,6 @@ async def test_check_config_climate(hass: HomeAssistant, do_config) -> None: assert len(do_config[0][CONF_CLIMATES]) == 1 -@pytest.mark.parametrize( - "do_config", - [ - [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1, - CONF_TARGET_TEMP: [117, 121, 119, 150, 151, 152, 156], - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 119, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - CONF_HVAC_MODE_HEAT_COOL: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 117, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 121, - }, - }, - ], - [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1, - CONF_TARGET_TEMP: [117], - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 117, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - CONF_HVAC_MODE_HEAT_COOL: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 117, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 117, - }, - CONF_SWING_MODE_REGISTER: { - CONF_ADDRESS: 117, - }, - }, - ], - [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1, - CONF_TARGET_TEMP: [117], - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 117, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - CONF_HVAC_MODE_HEAT_COOL: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 117, - CONF_SWING_MODE_REGISTER: { - CONF_ADDRESS: [117], - }, - }, - ], - ], -) -async def test_climate_conflict_addresses(do_config) -> None: - """Test conflicts among the addresses of target temp and other climate addresses.""" - check_hvac_target_temp_registers(do_config[0]) - assert CONF_HVAC_MODE_REGISTER not in do_config[0] - assert CONF_HVAC_ONOFF_REGISTER not in do_config[0] - assert CONF_FAN_MODE_REGISTER not in do_config[0] - assert CONF_SWING_MODE_REGISTER not in do_config[0] - - @pytest.mark.parametrize( "do_config", [ @@ -852,157 +524,6 @@ async def test_duplicate_swing_mode_validator(do_config) -> None: assert len(do_config[CONF_SWING_MODE_VALUES]) == 2 -@pytest.mark.parametrize( - ("do_config", "sensor_cnt"), - [ - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + "1", - CONF_ADDRESS: 119, - CONF_SLAVE: 0, - }, - ], - }, - ], - 2, - ), - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + "1", - CONF_ADDRESS: 117, - CONF_SLAVE: 1, - }, - ], - }, - ], - 2, - ), - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + "1", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - ], - }, - ], - 1, - ), - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + "1", - CONF_ADDRESS: 119, - CONF_SLAVE: 0, - }, - ], - }, - { - CONF_NAME: TEST_MODBUS_NAME + "1", - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 119, - CONF_SLAVE: 0, - }, - ], - }, - ], - 2, - ), - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1179, - CONF_SLAVE: 0, - }, - ], - }, - ], - 1, - ), - ], -) -async def test_duplicate_addresses(hass: HomeAssistant, do_config, sensor_cnt) -> None: - """Test duplicate entity validator.""" - check_config(hass, do_config) - use_inx = len(do_config) - 1 - assert len(do_config[use_inx][CONF_SENSORS]) == sensor_cnt - - @pytest.mark.parametrize( "do_config", [ diff --git a/tests/components/modbus/test_sensor.py b/tests/components/modbus/test_sensor.py index 20ff558fce6..87015fa634c 100644 --- a/tests/components/modbus/test_sensor.py +++ b/tests/components/modbus/test_sensor.py @@ -1335,7 +1335,7 @@ async def test_wrap_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> None @pytest.fixture(name="mock_restore") -async def mock_restore(hass): +async def mock_restore(hass: HomeAssistant) -> None: """Mock restore cache.""" mock_restore_cache_with_extra_data( hass, diff --git a/tests/components/mold_indicator/test_sensor.py b/tests/components/mold_indicator/test_sensor.py index 760d82dfedc..2de1d34b403 100644 --- a/tests/components/mold_indicator/test_sensor.py +++ b/tests/components/mold_indicator/test_sensor.py @@ -18,7 +18,7 @@ from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) -def init_sensors_fixture(hass): +def init_sensors_fixture(hass: HomeAssistant) -> None: """Set up things to be run when tests are started.""" hass.states.async_set( "test.indoortemp", "20", {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS} diff --git a/tests/components/monoprice/test_media_player.py b/tests/components/monoprice/test_media_player.py index f7d88692cf5..c4ba998261b 100644 --- a/tests/components/monoprice/test_media_player.py +++ b/tests/components/monoprice/test_media_player.py @@ -58,7 +58,7 @@ class AttrDict(dict): class MockMonoprice: """Mock for pymonoprice object.""" - def __init__(self): + def __init__(self) -> None: """Init mock object.""" self.zones = defaultdict( lambda: AttrDict(power=True, volume=0, mute=True, source=1) diff --git a/tests/components/moon/conftest.py b/tests/components/moon/conftest.py index 6fa54fcb603..3cf0eb1afc3 100644 --- a/tests/components/moon/conftest.py +++ b/tests/components/moon/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.moon.const import DOMAIN diff --git a/tests/components/mopeka/test_config_flow.py b/tests/components/mopeka/test_config_flow.py index 826fe8db2aa..7a341052f22 100644 --- a/tests/components/mopeka/test_config_flow.py +++ b/tests/components/mopeka/test_config_flow.py @@ -2,8 +2,10 @@ from unittest.mock import patch +import voluptuous as vol + from homeassistant import config_entries -from homeassistant.components.mopeka.const import DOMAIN +from homeassistant.components.mopeka.const import CONF_MEDIUM_TYPE, DOMAIN, MediumType from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,13 +23,14 @@ async def test_async_step_bluetooth_valid_device(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "bluetooth_confirm" + with patch("homeassistant.components.mopeka.async_setup_entry", return_value=True): result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} + result["flow_id"], user_input={CONF_MEDIUM_TYPE: MediumType.PROPANE.value} ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {} + assert result2["data"] == {CONF_MEDIUM_TYPE: MediumType.PROPANE.value} assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" @@ -71,7 +74,10 @@ async def test_async_step_user_with_found_devices(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {} + assert CONF_MEDIUM_TYPE in result2["data"] + assert result2["data"][CONF_MEDIUM_TYPE] in [ + medium_type.value for medium_type in MediumType + ] assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" @@ -190,8 +196,44 @@ async def test_async_step_user_takes_precedence_over_discovery( ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {} + assert CONF_MEDIUM_TYPE in result2["data"] + assert result2["data"][CONF_MEDIUM_TYPE] in [ + medium_type.value for medium_type in MediumType + ] assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" # Verify the original one was aborted assert not hass.config_entries.flow.async_progress(DOMAIN) + + +async def test_async_step_reconfigure_options(hass: HomeAssistant) -> None: + """Test reconfig options: change MediumType from air to fresh water.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="aa:bb:cc:dd:75:10", + title="TD40/TD200 7510", + data={CONF_MEDIUM_TYPE: MediumType.AIR.value}, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.data[CONF_MEDIUM_TYPE] == MediumType.AIR.value + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + schema: vol.Schema = result["data_schema"] + medium_type_key = next( + iter(key for key in schema.schema if key == CONF_MEDIUM_TYPE) + ) + assert medium_type_key.default() == MediumType.AIR.value + + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_MEDIUM_TYPE: MediumType.FRESH_WATER.value}, + ) + assert result2["type"] == FlowResultType.CREATE_ENTRY + + # Verify the new configuration + assert entry.data[CONF_MEDIUM_TYPE] == MediumType.FRESH_WATER.value diff --git a/tests/components/motionblinds_ble/__init__.py b/tests/components/motionblinds_ble/__init__.py index c2385555dbf..e1caef9f51f 100644 --- a/tests/components/motionblinds_ble/__init__.py +++ b/tests/components/motionblinds_ble/__init__.py @@ -1 +1,16 @@ """Tests for the Motionblinds Bluetooth integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Mock a fully setup config entry.""" + + mock_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/motionblinds_ble/conftest.py b/tests/components/motionblinds_ble/conftest.py index 342e958eae4..f89cf4f305d 100644 --- a/tests/components/motionblinds_ble/conftest.py +++ b/tests/components/motionblinds_ble/conftest.py @@ -1,23 +1,142 @@ """Setup the Motionblinds Bluetooth tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch +from motionblindsble.const import MotionBlindType import pytest -from typing_extensions import Generator -TEST_MAC = "abcd" -TEST_NAME = f"MOTION_{TEST_MAC.upper()}" -TEST_ADDRESS = "test_adress" +from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak +from homeassistant.components.motionblinds_ble.const import ( + CONF_BLIND_TYPE, + CONF_LOCAL_NAME, + CONF_MAC_CODE, + DOMAIN, +) +from homeassistant.const import CONF_ADDRESS + +from tests.common import MockConfigEntry +from tests.components.bluetooth import generate_advertisement_data, generate_ble_device -@pytest.fixture(name="motionblinds_ble_connect", autouse=True) -def motion_blinds_connect_fixture( - enable_bluetooth: None, +@pytest.fixture +def address() -> str: + """Address fixture.""" + return "cc:cc:cc:cc:cc:cc" + + +@pytest.fixture +def mac_code(address: str) -> str: + """MAC code fixture.""" + return "".join(address.split(":")[-3:-1]).upper() + + +@pytest.fixture +def display_name(mac_code: str) -> str: + """Display name fixture.""" + return f"Motionblind {mac_code.upper()}" + + +@pytest.fixture +def name(display_name: str) -> str: + """Name fixture.""" + return display_name.lower().replace(" ", "_") + + +@pytest.fixture +def local_name(mac_code: str) -> str: + """Local name fixture.""" + return f"MOTION_{mac_code.upper()}" + + +@pytest.fixture +def blind_type() -> MotionBlindType: + """Blind type fixture.""" + return MotionBlindType.ROLLER + + +@pytest.fixture +def service_info(local_name: str, address: str) -> BluetoothServiceInfoBleak: + """Service info fixture.""" + return BluetoothServiceInfoBleak( + name=local_name, + address=address, + device=generate_ble_device( + address=address, + name=local_name, + ), + rssi=-61, + manufacturer_data={000: b"test"}, + service_data={ + "test": bytearray(b"0000"), + }, + service_uuids=[ + "test", + ], + source="local", + advertisement=generate_advertisement_data( + manufacturer_data={000: b"test"}, + service_uuids=["test"], + ), + connectable=True, + time=0, + tx_power=-127, + ) + + +@pytest.fixture +def mock_motion_device( + blind_type: MotionBlindType, display_name: str +) -> Generator[AsyncMock]: + """Mock a MotionDevice.""" + + with patch( + "homeassistant.components.motionblinds_ble.MotionDevice", + autospec=True, + ) as mock_device: + device = mock_device.return_value + device.ble_device = Mock() + device.display_name = display_name + device.blind_type = blind_type + yield device + + +@pytest.fixture +def mock_config_entry( + blind_type: MotionBlindType, address: str, display_name: str, mac_code: str +) -> MockConfigEntry: + """Config entry fixture.""" + return MockConfigEntry( + title="mock_title", + domain=DOMAIN, + unique_id=address, + data={ + CONF_ADDRESS: address, + CONF_LOCAL_NAME: display_name, + CONF_MAC_CODE: mac_code, + CONF_BLIND_TYPE: blind_type.name.lower(), + }, + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.motionblinds_ble.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def motionblinds_ble_connect( + enable_bluetooth: None, local_name: str, address: str ) -> Generator[tuple[AsyncMock, Mock]]: """Mock motion blinds ble connection and entry setup.""" device = Mock() - device.name = TEST_NAME - device.address = TEST_ADDRESS + device.name = local_name + device.address = address bleak_scanner = AsyncMock() bleak_scanner.discover.return_value = [device] @@ -31,9 +150,5 @@ def motion_blinds_connect_fixture( "homeassistant.components.motionblinds_ble.config_flow.bluetooth.async_get_scanner", return_value=bleak_scanner, ), - patch( - "homeassistant.components.motionblinds_ble.async_setup_entry", - return_value=True, - ), ): yield bleak_scanner, device diff --git a/tests/components/motionblinds_ble/test_button.py b/tests/components/motionblinds_ble/test_button.py new file mode 100644 index 00000000000..9c27056c929 --- /dev/null +++ b/tests/components/motionblinds_ble/test_button.py @@ -0,0 +1,47 @@ +"""Tests for Motionblinds BLE buttons.""" + +from unittest.mock import Mock + +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.motionblinds_ble.const import ( + ATTR_CONNECT, + ATTR_DISCONNECT, + ATTR_FAVORITE, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize( + ("button"), + [ + ATTR_CONNECT, + ATTR_DISCONNECT, + ATTR_FAVORITE, + ], +) +async def test_button( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + button: str, +) -> None: + """Test states of the button.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: f"button.{name}_{button}"}, + blocking=True, + ) + getattr(mock_motion_device, button).assert_called_once() diff --git a/tests/components/motionblinds_ble/test_config_flow.py b/tests/components/motionblinds_ble/test_config_flow.py index 4cab12269dd..05d3077ceb1 100644 --- a/tests/components/motionblinds_ble/test_config_flow.py +++ b/tests/components/motionblinds_ble/test_config_flow.py @@ -12,41 +12,19 @@ from homeassistant.const import CONF_ADDRESS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .conftest import TEST_ADDRESS, TEST_MAC, TEST_NAME - from tests.common import MockConfigEntry -from tests.components.bluetooth import generate_advertisement_data, generate_ble_device - -TEST_BLIND_TYPE = MotionBlindType.ROLLER.name.lower() - -BLIND_SERVICE_INFO = BluetoothServiceInfoBleak( - name=TEST_NAME, - address=TEST_ADDRESS, - device=generate_ble_device( - address="cc:cc:cc:cc:cc:cc", - name=TEST_NAME, - ), - rssi=-61, - manufacturer_data={000: b"test"}, - service_data={ - "test": bytearray(b"0000"), - }, - service_uuids=[ - "test", - ], - source="local", - advertisement=generate_advertisement_data( - manufacturer_data={000: b"test"}, - service_uuids=["test"], - ), - connectable=True, - time=0, - tx_power=-127, -) @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_manual_success(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_config_flow_manual_success( + hass: HomeAssistant, + blind_type: MotionBlindType, + mac_code: str, + address: str, + local_name: str, + display_name: str, +) -> None: """Successful flow manually initialized by the user.""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -57,28 +35,36 @@ async def test_config_flow_manual_success(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_config_flow_manual_error_invalid_mac( + hass: HomeAssistant, + mac_code: str, + address: str, + local_name: str, + display_name: str, + blind_type: MotionBlindType, +) -> None: """Invalid MAC code error flow manually initialized by the user.""" # Initialize @@ -101,7 +87,7 @@ async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None # Recover result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" @@ -109,15 +95,15 @@ async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None # Finish flow result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} @@ -125,6 +111,7 @@ async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None @pytest.mark.usefixtures("motionblinds_ble_connect") async def test_config_flow_manual_error_no_bluetooth_adapter( hass: HomeAssistant, + mac_code: str, ) -> None: """No Bluetooth adapter error flow manually initialized by the user.""" @@ -153,14 +140,21 @@ async def test_config_flow_manual_error_no_bluetooth_adapter( ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == const.ERROR_NO_BLUETOOTH_ADAPTER +@pytest.mark.usefixtures("mock_setup_entry") async def test_config_flow_manual_error_could_not_find_motor( - hass: HomeAssistant, motionblinds_ble_connect: tuple[AsyncMock, Mock] + hass: HomeAssistant, + motionblinds_ble_connect: tuple[AsyncMock, Mock], + mac_code: str, + local_name: str, + display_name: str, + address: str, + blind_type: MotionBlindType, ) -> None: """Could not find motor error flow manually initialized by the user.""" @@ -176,17 +170,17 @@ async def test_config_flow_manual_error_could_not_find_motor( motionblinds_ble_connect[1].name = "WRONG_NAME" result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {"base": const.ERROR_COULD_NOT_FIND_MOTOR} # Recover - motionblinds_ble_connect[1].name = TEST_NAME + motionblinds_ble_connect[1].name = local_name result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" @@ -194,21 +188,23 @@ async def test_config_flow_manual_error_could_not_find_motor( # Finish flow result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} async def test_config_flow_manual_error_no_devices_found( - hass: HomeAssistant, motionblinds_ble_connect: tuple[AsyncMock, Mock] + hass: HomeAssistant, + motionblinds_ble_connect: tuple[AsyncMock, Mock], + mac_code: str, ) -> None: """No devices found error flow manually initialized by the user.""" @@ -224,19 +220,27 @@ async def test_config_flow_manual_error_no_devices_found( motionblinds_ble_connect[0].discover.return_value = [] result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == const.ERROR_NO_DEVICES_FOUND @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_bluetooth_success(hass: HomeAssistant) -> None: +async def test_config_flow_bluetooth_success( + hass: HomeAssistant, + mac_code: str, + service_info: BluetoothServiceInfoBleak, + address: str, + local_name: str, + display_name: str, + blind_type: MotionBlindType, +) -> None: """Successful bluetooth discovery flow.""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_BLUETOOTH}, - data=BLIND_SERVICE_INFO, + data=service_info, ) assert result["type"] is FlowResultType.FORM @@ -244,36 +248,32 @@ async def test_config_flow_bluetooth_success(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} -async def test_options_flow(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: """Test the options flow.""" - entry = MockConfigEntry( - domain=const.DOMAIN, - unique_id="0123456789", - data={ - const.CONF_BLIND_TYPE: MotionBlindType.ROLLER, - }, - ) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.options.async_init(entry.entry_id) + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" diff --git a/tests/components/motionblinds_ble/test_cover.py b/tests/components/motionblinds_ble/test_cover.py new file mode 100644 index 00000000000..2f6b33b3017 --- /dev/null +++ b/tests/components/motionblinds_ble/test_cover.py @@ -0,0 +1,127 @@ +"""Tests for Motionblinds BLE covers.""" + +from typing import Any +from unittest.mock import Mock + +from motionblindsble.const import MotionBlindType, MotionRunningType +import pytest + +from homeassistant.components.cover import ( + ATTR_POSITION, + ATTR_TILT_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_CLOSE_COVER_TILT, + SERVICE_OPEN_COVER, + SERVICE_OPEN_COVER_TILT, + SERVICE_SET_COVER_POSITION, + SERVICE_SET_COVER_TILT_POSITION, + SERVICE_STOP_COVER, + SERVICE_STOP_COVER_TILT, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize("blind_type", [MotionBlindType.VENETIAN]) +@pytest.mark.parametrize( + ("service", "method", "kwargs"), + [ + (SERVICE_OPEN_COVER, "open", {}), + (SERVICE_CLOSE_COVER, "close", {}), + (SERVICE_OPEN_COVER_TILT, "open_tilt", {}), + (SERVICE_CLOSE_COVER_TILT, "close_tilt", {}), + (SERVICE_SET_COVER_POSITION, "position", {ATTR_POSITION: 5}), + (SERVICE_SET_COVER_TILT_POSITION, "tilt", {ATTR_TILT_POSITION: 10}), + (SERVICE_STOP_COVER, "stop", {}), + (SERVICE_STOP_COVER_TILT, "stop", {}), + ], +) +async def test_cover_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + service: str, + method: str, + kwargs: dict[str, Any], +) -> None: + """Test cover service.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + service, + {ATTR_ENTITY_ID: f"cover.{name}", **kwargs}, + blocking=True, + ) + getattr(mock_motion_device, method).assert_called_once() + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize( + ("running_type", "state"), + [ + (None, "unknown"), + (MotionRunningType.STILL, "unknown"), + (MotionRunningType.OPENING, STATE_OPENING), + (MotionRunningType.CLOSING, STATE_CLOSING), + ], +) +async def test_cover_update_running( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + running_type: str | None, + state: str, +) -> None: + """Test updating running status.""" + + await setup_integration(hass, mock_config_entry) + + async_update_running = mock_motion_device.register_running_callback.call_args[0][0] + + async_update_running(running_type) + assert hass.states.get(f"cover.{name}").state == state + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize( + ("position", "tilt", "state"), + [ + (None, None, "unknown"), + (0, 0, STATE_OPEN), + (50, 90, STATE_OPEN), + (100, 180, STATE_CLOSED), + ], +) +async def test_cover_update_position( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + position: int, + tilt: int, + state: str, +) -> None: + """Test updating cover position and tilt.""" + + await setup_integration(hass, mock_config_entry) + + async_update_position = mock_motion_device.register_position_callback.call_args[0][ + 0 + ] + + async_update_position(position, tilt) + assert hass.states.get(f"cover.{name}").state == state diff --git a/tests/components/motionblinds_ble/test_entity.py b/tests/components/motionblinds_ble/test_entity.py new file mode 100644 index 00000000000..1bfd3b185e5 --- /dev/null +++ b/tests/components/motionblinds_ble/test_entity.py @@ -0,0 +1,54 @@ +"""Tests for Motionblinds BLE entities.""" + +from unittest.mock import Mock + +import pytest + +from homeassistant.components.homeassistant import ( + DOMAIN as HA_DOMAIN, + SERVICE_UPDATE_ENTITY, +) +from homeassistant.components.motionblinds_ble.const import ( + ATTR_CONNECT, + ATTR_DISCONNECT, + ATTR_FAVORITE, + ATTR_SPEED, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("platform", "entity"), + [ + (Platform.BUTTON, ATTR_CONNECT), + (Platform.BUTTON, ATTR_DISCONNECT), + (Platform.BUTTON, ATTR_FAVORITE), + (Platform.SELECT, ATTR_SPEED), + ], +) +async def test_entity_update( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + platform: Platform, + entity: str, +) -> None: + """Test updating entity using homeassistant.update_entity.""" + + await async_setup_component(hass, HA_DOMAIN, {}) + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + HA_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: f"{platform.name.lower()}.{name}_{entity}"}, + blocking=True, + ) + getattr(mock_motion_device, "status_query").assert_called_once_with() diff --git a/tests/components/motionblinds_ble/test_init.py b/tests/components/motionblinds_ble/test_init.py new file mode 100644 index 00000000000..09596bd8d5e --- /dev/null +++ b/tests/components/motionblinds_ble/test_init.py @@ -0,0 +1,49 @@ +"""Tests for Motionblinds BLE init.""" + +from unittest.mock import patch + +from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak +from homeassistant.components.motionblinds_ble import options_update_listener +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + + +async def test_options_update_listener( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test options_update_listener.""" + + await setup_integration(hass, mock_config_entry) + + with ( + patch( + "homeassistant.components.motionblinds_ble.MotionDevice.set_custom_disconnect_time" + ) as mock_set_custom_disconnect_time, + patch( + "homeassistant.components.motionblinds_ble.MotionDevice.set_permanent_connection" + ) as set_permanent_connection, + ): + await options_update_listener(hass, mock_config_entry) + mock_set_custom_disconnect_time.assert_called_once() + set_permanent_connection.assert_called_once() + + +async def test_update_ble_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + service_info: BluetoothServiceInfoBleak, +) -> None: + """Test async_update_ble_device.""" + + await setup_integration(hass, mock_config_entry) + + with patch( + "homeassistant.components.motionblinds_ble.MotionDevice.set_ble_device" + ) as mock_set_ble_device: + inject_bluetooth_service_info(hass, service_info) + mock_set_ble_device.assert_called_once() diff --git a/tests/components/motionblinds_ble/test_select.py b/tests/components/motionblinds_ble/test_select.py new file mode 100644 index 00000000000..2bd1bb30ec2 --- /dev/null +++ b/tests/components/motionblinds_ble/test_select.py @@ -0,0 +1,76 @@ +"""Tests for Motionblinds BLE selects.""" + +from collections.abc import Callable +from enum import Enum +from typing import Any +from unittest.mock import Mock + +from motionblindsble.const import MotionSpeedLevel +from motionblindsble.device import MotionDevice +import pytest + +from homeassistant.components.motionblinds_ble.const import ATTR_SPEED +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize(("select", "args"), [(ATTR_SPEED, MotionSpeedLevel.HIGH)]) +async def test_select( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + select: str, + args: Any, +) -> None: + """Test select.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: f"select.{name}_{select}", + ATTR_OPTION: MotionSpeedLevel.HIGH.value, + }, + blocking=True, + ) + getattr(mock_motion_device, select).assert_called_once_with(args) + + +@pytest.mark.parametrize( + ("select", "register_callback", "value"), + [ + ( + ATTR_SPEED, + lambda device: device.register_speed_callback, + MotionSpeedLevel.HIGH, + ) + ], +) +async def test_select_update( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + select: str, + register_callback: Callable[[MotionDevice], Callable[..., None]], + value: type[Enum], +) -> None: + """Test select state update.""" + + await setup_integration(hass, mock_config_entry) + + update_func = register_callback(mock_motion_device).call_args[0][0] + + update_func(value) + assert hass.states.get(f"select.{name}_{select}").state == str(value.value) diff --git a/tests/components/motionblinds_ble/test_sensor.py b/tests/components/motionblinds_ble/test_sensor.py new file mode 100644 index 00000000000..54d2fbcb064 --- /dev/null +++ b/tests/components/motionblinds_ble/test_sensor.py @@ -0,0 +1,107 @@ +"""Tests for Motionblinds BLE sensors.""" + +from collections.abc import Callable +from typing import Any +from unittest.mock import Mock + +from motionblindsble.const import ( + MotionBlindType, + MotionCalibrationType, + MotionConnectionType, +) +from motionblindsble.device import MotionDevice +import pytest + +from homeassistant.components.motionblinds_ble.const import ( + ATTR_BATTERY, + ATTR_SIGNAL_STRENGTH, +) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize("blind_type", [MotionBlindType.CURTAIN]) +@pytest.mark.parametrize( + ("sensor", "register_callback", "initial_value", "args", "expected_value"), + [ + ( + "connection_status", + lambda device: device.register_connection_callback, + MotionConnectionType.DISCONNECTED.value, + [MotionConnectionType.CONNECTING], + MotionConnectionType.CONNECTING.value, + ), + ( + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [25, True, False], + "25", + ), + ( # Battery unknown + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [None, False, False], + "unknown", + ), + ( # Wired + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [255, False, True], + "255", + ), + ( # Almost full + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [99, False, False], + "99", + ), + ( # Almost empty + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [1, False, False], + "1", + ), + ( + "calibration_status", + lambda device: device.register_calibration_callback, + "unknown", + [MotionCalibrationType.CALIBRATING], + MotionCalibrationType.CALIBRATING.value, + ), + ( + ATTR_SIGNAL_STRENGTH, + lambda device: device.register_signal_strength_callback, + "unknown", + [-50], + "-50", + ), + ], +) +async def test_sensor( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + sensor: str, + register_callback: Callable[[MotionDevice], Callable[..., None]], + initial_value: str, + args: list[Any], + expected_value: str, +) -> None: + """Test sensors.""" + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get(f"{SENSOR_DOMAIN}.{name}_{sensor}").state == initial_value + update_func = register_callback(mock_motion_device).call_args[0][0] + update_func(*args) + assert hass.states.get(f"{SENSOR_DOMAIN}.{name}_{sensor}").state == expected_value diff --git a/tests/components/motioneye/test_camera.py b/tests/components/motioneye/test_camera.py index ccbdc022495..0f3a7d6f904 100644 --- a/tests/components/motioneye/test_camera.py +++ b/tests/components/motioneye/test_camera.py @@ -339,7 +339,7 @@ async def test_device_info( device = device_registry.async_get_device(identifiers={device_identifier}) assert device - assert device.config_entries == [TEST_CONFIG_ENTRY_ID] + assert device.config_entries == {TEST_CONFIG_ENTRY_ID} assert device.identifiers == {device_identifier} assert device.manufacturer == MOTIONEYE_MANUFACTURER assert device.model == MOTIONEYE_MANUFACTURER diff --git a/tests/components/motionmount/conftest.py b/tests/components/motionmount/conftest.py index 9e5b0355387..49f624b5266 100644 --- a/tests/components/motionmount/conftest.py +++ b/tests/components/motionmount/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Vogel's MotionMount integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.motionmount.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT diff --git a/tests/components/mpd/conftest.py b/tests/components/mpd/conftest.py index 818f085decc..a73a529cd0b 100644 --- a/tests/components/mpd/conftest.py +++ b/tests/components/mpd/conftest.py @@ -1,7 +1,7 @@ """Fixtures for Music Player Daemon integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -22,7 +22,7 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch( "homeassistant.components.mpd.async_setup_entry", return_value=True @@ -31,7 +31,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_mpd_client() -> Generator[AsyncMock, None, None]: +def mock_mpd_client() -> Generator[MagicMock]: """Return a mock for Music Player Daemon client.""" with patch( diff --git a/tests/components/mqtt/conftest.py b/tests/components/mqtt/conftest.py index bc4fa2e6634..7395767aeae 100644 --- a/tests/components/mqtt/conftest.py +++ b/tests/components/mqtt/conftest.py @@ -1,12 +1,31 @@ """Test fixtures for mqtt component.""" +import asyncio +from collections.abc import AsyncGenerator, Generator from random import getrandbits +from typing import Any from unittest.mock import patch import pytest -from typing_extensions import Generator -from tests.components.light.conftest import mock_light_profiles # noqa: F401 +from homeassistant.components import mqtt +from homeassistant.components.mqtt.models import MessageCallbackType, ReceiveMessage +from homeassistant.components.mqtt.util import EnsureJobAfterCooldown +from homeassistant.const import EVENT_HOMEASSISTANT_STARTED +from homeassistant.core import HomeAssistant, callback + +from tests.common import MockConfigEntry +from tests.typing import MqttMockPahoClient + +ENTRY_DEFAULT_BIRTH_MESSAGE = { + mqtt.CONF_BROKER: "mock-broker", + mqtt.CONF_BIRTH_MESSAGE: { + mqtt.ATTR_TOPIC: "homeassistant/status", + mqtt.ATTR_PAYLOAD: "online", + mqtt.ATTR_QOS: 0, + mqtt.ATTR_RETAIN: False, + }, +} @pytest.fixture(autouse=True) @@ -29,3 +48,76 @@ def mock_temp_dir(temp_dir_prefix: str) -> Generator[str]: f"home-assistant-mqtt-{temp_dir_prefix}-{getrandbits(10):03x}", ) as mocked_temp_dir: yield mocked_temp_dir + + +@pytest.fixture +def mock_debouncer(hass: HomeAssistant) -> Generator[asyncio.Event]: + """Mock EnsureJobAfterCooldown. + + Returns an asyncio.Event that allows to await the debouncer task to be finished. + """ + task_done = asyncio.Event() + + class MockDeboncer(EnsureJobAfterCooldown): + """Mock the MQTT client (un)subscribe debouncer.""" + + async def _async_job(self) -> None: + """Execute after a cooldown period.""" + await super()._async_job() + task_done.set() + + # We mock the import of EnsureJobAfterCooldown in client.py + with patch( + "homeassistant.components.mqtt.client.EnsureJobAfterCooldown", MockDeboncer + ): + yield task_done + + +@pytest.fixture +async def setup_with_birth_msg_client_mock( + hass: HomeAssistant, + mqtt_config_entry_data: dict[str, Any] | None, + mqtt_client_mock: MqttMockPahoClient, +) -> AsyncGenerator[MqttMockPahoClient]: + """Test sending birth message.""" + birth = asyncio.Event() + with ( + patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0), + patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0), + patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0), + ): + entry = MockConfigEntry( + domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} + ) + entry.add_to_hass(hass) + hass.config.components.add(mqtt.DOMAIN) + assert await hass.config_entries.async_setup(entry.entry_id) + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + + @callback + def wait_birth(msg: ReceiveMessage) -> None: + """Handle birth message.""" + birth.set() + + await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) + await hass.async_block_till_done() + await birth.wait() + yield mqtt_client_mock + + +@pytest.fixture +def recorded_calls() -> list[ReceiveMessage]: + """Fixture to hold recorded calls.""" + return [] + + +@pytest.fixture +def record_calls(recorded_calls: list[ReceiveMessage]) -> MessageCallbackType: + """Fixture to record calls.""" + + @callback + def record_calls(msg: ReceiveMessage) -> None: + """Record calls.""" + recorded_calls.append(msg) + + return record_calls diff --git a/tests/components/mqtt/test_alarm_control_panel.py b/tests/components/mqtt/test_alarm_control_panel.py index a90e71cebe5..07ebb671e37 100644 --- a/tests/components/mqtt/test_alarm_control_panel.py +++ b/tests/components/mqtt/test_alarm_control_panel.py @@ -192,7 +192,7 @@ def does_not_raise(): ], ) async def test_fail_setup_without_state_or_command_topic( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, valid + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, valid: bool ) -> None: """Test for failing setup with no state or command topic.""" assert await mqtt_mock_entry() @@ -351,8 +351,8 @@ async def test_supported_features( async def test_publish_mqtt_no_code( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - service, - payload, + service: str, + payload: str, ) -> None: """Test publishing of MQTT messages when no code is configured.""" mqtt_mock = await mqtt_mock_entry() @@ -853,10 +853,7 @@ async def test_availability_without_topic( ) -> None: """Test availability without defined availability topic.""" await help_test_availability_without_topic( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG_CODE, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE ) @@ -865,10 +862,7 @@ async def test_default_availability_payload( ) -> None: """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG_CODE, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE ) @@ -877,10 +871,7 @@ async def test_custom_availability_payload( ) -> None: """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -889,10 +880,7 @@ async def test_setting_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -914,10 +902,7 @@ async def test_setting_attribute_with_template( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -928,11 +913,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -943,26 +924,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -997,21 +968,17 @@ async def test_unique_id( async def test_discovery_removal_alarm( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered alarm_control_panel.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][alarm_control_panel.DOMAIN]) await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, alarm_control_panel.DOMAIN, data + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, data ) async def test_discovery_update_alarm_topic_and_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered alarm_control_panel.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][alarm_control_panel.DOMAIN]) @@ -1036,7 +1003,6 @@ async def test_discovery_update_alarm_topic_and_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, alarm_control_panel.DOMAIN, config1, config2, @@ -1046,9 +1012,7 @@ async def test_discovery_update_alarm_topic_and_template( async def test_discovery_update_alarm_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered alarm_control_panel.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][alarm_control_panel.DOMAIN]) @@ -1071,7 +1035,6 @@ async def test_discovery_update_alarm_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, alarm_control_panel.DOMAIN, config1, config2, @@ -1081,9 +1044,7 @@ async def test_discovery_update_alarm_template( async def test_discovery_update_unchanged_alarm( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered alarm_control_panel.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][alarm_control_panel.DOMAIN]) @@ -1096,7 +1057,6 @@ async def test_discovery_update_unchanged_alarm( await help_test_discovery_update_unchanged( hass, mqtt_mock_entry, - caplog, alarm_control_panel.DOMAIN, data1, discovery_update, @@ -1105,9 +1065,7 @@ async def test_discovery_update_unchanged_alarm( @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' @@ -1117,12 +1075,7 @@ async def test_discovery_broken( ' "command_topic": "test_topic" }' ) await help_test_discovery_broken( - hass, - mqtt_mock_entry, - caplog, - alarm_control_panel.DOMAIN, - data1, - data2, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, data1, data2 ) @@ -1191,10 +1144,7 @@ async def test_entity_device_info_remove( ) -> None: """Test device registry remove.""" await help_test_entity_device_info_remove( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -1212,10 +1162,7 @@ async def test_entity_id_update_discovery_update( ) -> None: """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -1289,8 +1236,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = alarm_control_panel.DOMAIN @@ -1313,8 +1259,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = alarm_control_panel.DOMAIN diff --git a/tests/components/mqtt/test_binary_sensor.py b/tests/components/mqtt/test_binary_sensor.py index 995aadd7dba..e2c168bd46e 100644 --- a/tests/components/mqtt/test_binary_sensor.py +++ b/tests/components/mqtt/test_binary_sensor.py @@ -90,7 +90,6 @@ DEFAULT_CONFIG = { async def test_setting_sensor_value_expires_availability_topic( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, ) -> None: """Test the expiration of the value.""" await mqtt_mock_entry() @@ -759,10 +758,7 @@ async def test_setting_attribute_with_template( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( - hass, - mqtt_mock_entry, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -773,11 +769,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -788,26 +780,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, binary_sensor.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -840,21 +822,15 @@ async def test_unique_id( async def test_discovery_removal_binary_sensor( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered binary_sensor.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][binary_sensor.DOMAIN]) - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, binary_sensor.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, binary_sensor.DOMAIN, data) async def test_discovery_update_binary_sensor_topic_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered binary_sensor.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][binary_sensor.DOMAIN]) @@ -881,7 +857,6 @@ async def test_discovery_update_binary_sensor_topic_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, binary_sensor.DOMAIN, config1, config2, @@ -891,9 +866,7 @@ async def test_discovery_update_binary_sensor_topic_template( async def test_discovery_update_binary_sensor_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered binary_sensor.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][binary_sensor.DOMAIN]) @@ -918,7 +891,6 @@ async def test_discovery_update_binary_sensor_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, binary_sensor.DOMAIN, config1, config2, @@ -962,9 +934,7 @@ async def test_encoding_subscribable_topics( async def test_discovery_update_unchanged_binary_sensor( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered binary_sensor.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][binary_sensor.DOMAIN]) @@ -975,31 +945,19 @@ async def test_discovery_update_unchanged_binary_sensor( "homeassistant.components.mqtt.binary_sensor.MqttBinarySensor.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - binary_sensor.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer", "off_delay": -1 }' data2 = '{ "name": "Milk", "state_topic": "test_topic" }' await help_test_discovery_broken( - hass, - mqtt_mock_entry, - caplog, - binary_sensor.DOMAIN, - data1, - data2, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, data1, data2 ) @@ -1008,10 +966,7 @@ async def test_entity_device_info_with_connection( ) -> None: """Test MQTT binary sensor device registry integration.""" await help_test_entity_device_info_with_connection( - hass, - mqtt_mock_entry, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -1020,10 +975,7 @@ async def test_entity_device_info_with_identifier( ) -> None: """Test MQTT binary sensor device registry integration.""" await help_test_entity_device_info_with_identifier( - hass, - mqtt_mock_entry, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -1032,10 +984,7 @@ async def test_entity_device_info_update( ) -> None: """Test device registry update.""" await help_test_entity_device_info_update( - hass, - mqtt_mock_entry, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -1044,10 +993,7 @@ async def test_entity_device_info_remove( ) -> None: """Test device registry remove.""" await help_test_entity_device_info_remove( - hass, - mqtt_mock_entry, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -1065,10 +1011,7 @@ async def test_entity_id_update_discovery_update( ) -> None: """Test MQTT discovery update when entity_id is updated.""" await help_test_entity_id_update_discovery_update( - hass, - mqtt_mock_entry, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -1077,17 +1020,12 @@ async def test_entity_debug_info_message( ) -> None: """Test MQTT debug info.""" await help_test_entity_debug_info_message( - hass, - mqtt_mock_entry, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, - None, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG, None ) async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = binary_sensor.DOMAIN @@ -1135,10 +1073,10 @@ async def test_cleanup_triggers_and_restoring_state( tmp_path: Path, freezer: FrozenDateTimeFactory, hass_config: ConfigType, - payload1, - state1, - payload2, - state2, + payload1: str, + state1: str, + payload2: str, + state2: str, ) -> None: """Test cleanup old triggers at reloading and restoring the state.""" freezer.move_to("2022-02-02 12:01:00+01:00") @@ -1173,6 +1111,8 @@ async def test_cleanup_triggers_and_restoring_state( state = hass.states.get("binary_sensor.test2") assert state.state == state2 + await hass.async_block_till_done(wait_background_tasks=True) + @pytest.mark.parametrize( "hass_config", @@ -1225,8 +1165,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = binary_sensor.DOMAIN diff --git a/tests/components/mqtt/test_button.py b/tests/components/mqtt/test_button.py index 3d5d295d4d4..d85ead6ecee 100644 --- a/tests/components/mqtt/test_button.py +++ b/tests/components/mqtt/test_button.py @@ -159,13 +159,7 @@ async def test_default_availability_payload( } } await help_test_default_availability_payload( - hass, - mqtt_mock_entry, - button.DOMAIN, - config, - True, - "state-topic", - "1", + hass, mqtt_mock_entry, button.DOMAIN, config, True, "state-topic", "1" ) @@ -184,13 +178,7 @@ async def test_custom_availability_payload( } await help_test_custom_availability_payload( - hass, - mqtt_mock_entry, - button.DOMAIN, - config, - True, - "state-topic", - "1", + hass, mqtt_mock_entry, button.DOMAIN, config, True, "state-topic", "1" ) @@ -228,11 +216,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - button.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, button.DOMAIN, DEFAULT_CONFIG ) @@ -243,26 +227,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - button.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, button.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - button.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, button.DOMAIN, DEFAULT_CONFIG ) @@ -295,21 +269,15 @@ async def test_unique_id( async def test_discovery_removal_button( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered button.""" data = '{ "name": "test", "command_topic": "test_topic" }' - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, button.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, button.DOMAIN, data) async def test_discovery_update_button( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered button.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][button.DOMAIN]) @@ -318,19 +286,12 @@ async def test_discovery_update_button( config2["name"] = "Milk" await help_test_discovery_update( - hass, - mqtt_mock_entry, - caplog, - button.DOMAIN, - config1, - config2, + hass, mqtt_mock_entry, button.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_button( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered button.""" data1 = ( @@ -342,27 +303,18 @@ async def test_discovery_update_unchanged_button( "homeassistant.components.mqtt.button.MqttButton.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - button.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, button.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "command_topic": "test_topic" }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, button.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, button.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -528,8 +480,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = button.DOMAIN @@ -552,8 +503,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = button.DOMAIN diff --git a/tests/components/mqtt/test_camera.py b/tests/components/mqtt/test_camera.py index fb0107d6780..cda536dc19e 100644 --- a/tests/components/mqtt/test_camera.py +++ b/tests/components/mqtt/test_camera.py @@ -222,11 +222,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - camera.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, camera.DOMAIN, DEFAULT_CONFIG ) @@ -237,26 +233,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - camera.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, camera.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - camera.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, camera.DOMAIN, DEFAULT_CONFIG ) @@ -289,35 +275,28 @@ async def test_unique_id( async def test_discovery_removal_camera( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered camera.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][camera.DOMAIN]) - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, camera.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, camera.DOMAIN, data) async def test_discovery_update_camera( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, ) -> None: """Test update of discovered camera.""" config1 = {"name": "Beer", "topic": "test_topic"} config2 = {"name": "Milk", "topic": "test_topic"} await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, camera.DOMAIN, config1, config2 + hass, mqtt_mock_entry, camera.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_camera( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered camera.""" data1 = '{ "name": "Beer", "topic": "test_topic"}' @@ -325,28 +304,19 @@ async def test_discovery_update_unchanged_camera( "homeassistant.components.mqtt.camera.MqttCamera.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - camera.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, camera.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "topic": "test_topic"}' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, camera.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, camera.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -390,11 +360,7 @@ async def test_entity_id_update_subscriptions( ) -> None: """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( - hass, - mqtt_mock_entry, - camera.DOMAIN, - DEFAULT_CONFIG, - ["test_topic"], + hass, mqtt_mock_entry, camera.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) @@ -423,8 +389,7 @@ async def test_entity_debug_info_message( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = camera.DOMAIN @@ -447,8 +412,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = camera.DOMAIN diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py new file mode 100644 index 00000000000..c5887016f2e --- /dev/null +++ b/tests/components/mqtt/test_client.py @@ -0,0 +1,1997 @@ +"""The tests for the MQTT client.""" + +import asyncio +from datetime import datetime, timedelta +import socket +import ssl +from typing import Any +from unittest.mock import MagicMock, Mock, call, patch + +import certifi +import paho.mqtt.client as paho_mqtt +import pytest + +from homeassistant.components import mqtt +from homeassistant.components.mqtt.client import RECONNECT_INTERVAL_SECONDS +from homeassistant.components.mqtt.models import MessageCallbackType, ReceiveMessage +from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState +from homeassistant.const import ( + CONF_PROTOCOL, + EVENT_HOMEASSISTANT_STARTED, + EVENT_HOMEASSISTANT_STOP, + UnitOfTemperature, +) +from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.util.dt import utcnow + +from .conftest import ENTRY_DEFAULT_BIRTH_MESSAGE +from .test_common import help_all_subscribe_calls + +from tests.common import ( + MockConfigEntry, + async_fire_mqtt_message, + async_fire_time_changed, +) +from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, MqttMockPahoClient + + +@pytest.fixture(autouse=True) +def mock_storage(hass_storage: dict[str, Any]) -> None: + """Autouse hass_storage for the TestCase tests.""" + + +def help_assert_message( + msg: ReceiveMessage, + topic: str | None = None, + payload: str | None = None, + qos: int | None = None, + retain: bool | None = None, +) -> bool: + """Return True if all of the given attributes match with the message.""" + match: bool = True + if topic is not None: + match &= msg.topic == topic + if payload is not None: + match &= msg.payload == payload + if qos is not None: + match &= msg.qos == qos + if retain is not None: + match &= msg.retain == retain + return match + + +async def test_mqtt_connects_on_home_assistant_mqtt_setup( + hass: HomeAssistant, setup_with_birth_msg_client_mock: MqttMockPahoClient +) -> None: + """Test if client is connected after mqtt init on bootstrap.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + assert mqtt_client_mock.connect.call_count == 1 + + +async def test_mqtt_does_not_disconnect_on_home_assistant_stop( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test if client is not disconnected on HA stop.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + hass.bus.fire(EVENT_HOMEASSISTANT_STOP) + await mock_debouncer.wait() + assert mqtt_client_mock.disconnect.call_count == 0 + + +async def test_mqtt_await_ack_at_disconnect(hass: HomeAssistant) -> None: + """Test if ACK is awaited correctly when disconnecting.""" + + class FakeInfo: + """Returns a simulated client publish response.""" + + mid = 100 + rc = 0 + + with patch( + "homeassistant.components.mqtt.async_client.AsyncMQTTClient" + ) as mock_client: + mqtt_client = mock_client.return_value + mqtt_client.connect = MagicMock( + return_value=0, + side_effect=lambda *args, **kwargs: hass.loop.call_soon_threadsafe( + mqtt_client.on_connect, mqtt_client, None, 0, 0, 0 + ), + ) + mqtt_client.publish = MagicMock(return_value=FakeInfo()) + entry = MockConfigEntry( + domain=mqtt.DOMAIN, + data={ + "certificate": "auto", + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_DISCOVERY: False, + }, + ) + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + + mqtt_client = mock_client.return_value + + # publish from MQTT client without awaiting + hass.async_create_task( + mqtt.async_publish(hass, "test-topic", "some-payload", 0, False) + ) + await asyncio.sleep(0) + # Simulate late ACK callback from client with mid 100 + mqtt_client.on_publish(0, 0, 100) + # disconnect the MQTT client + await hass.async_stop() + await hass.async_block_till_done() + # assert the payload was sent through the client + assert mqtt_client.publish.called + assert mqtt_client.publish.call_args[0] == ( + "test-topic", + "some-payload", + 0, + False, + ) + await hass.async_block_till_done(wait_background_tasks=True) + + +@pytest.mark.parametrize("mqtt_config_entry_data", [ENTRY_DEFAULT_BIRTH_MESSAGE]) +async def test_publish( + hass: HomeAssistant, setup_with_birth_msg_client_mock: MqttMockPahoClient +) -> None: + """Test the publish function.""" + publish_mock: MagicMock = setup_with_birth_msg_client_mock.publish + await mqtt.async_publish(hass, "test-topic", "test-payload") + await hass.async_block_till_done() + assert publish_mock.called + assert publish_mock.call_args[0] == ( + "test-topic", + "test-payload", + 0, + False, + ) + publish_mock.reset_mock() + + await mqtt.async_publish(hass, "test-topic", "test-payload", 2, True) + await hass.async_block_till_done() + assert publish_mock.called + assert publish_mock.call_args[0] == ( + "test-topic", + "test-payload", + 2, + True, + ) + publish_mock.reset_mock() + + mqtt.publish(hass, "test-topic2", "test-payload2") + await hass.async_block_till_done() + assert publish_mock.called + assert publish_mock.call_args[0] == ( + "test-topic2", + "test-payload2", + 0, + False, + ) + publish_mock.reset_mock() + + mqtt.publish(hass, "test-topic2", "test-payload2", 2, True) + await hass.async_block_till_done() + assert publish_mock.called + assert publish_mock.call_args[0] == ( + "test-topic2", + "test-payload2", + 2, + True, + ) + publish_mock.reset_mock() + + # test binary pass-through + mqtt.publish( + hass, + "test-topic3", + b"\xde\xad\xbe\xef", + 0, + False, + ) + await hass.async_block_till_done() + assert publish_mock.called + assert publish_mock.call_args[0] == ( + "test-topic3", + b"\xde\xad\xbe\xef", + 0, + False, + ) + publish_mock.reset_mock() + + # test null payload + mqtt.publish( + hass, + "test-topic3", + None, + 0, + False, + ) + await hass.async_block_till_done() + assert publish_mock.called + assert publish_mock.call_args[0] == ( + "test-topic3", + None, + 0, + False, + ) + + publish_mock.reset_mock() + + +async def test_convert_outgoing_payload(hass: HomeAssistant) -> None: + """Test the converting of outgoing MQTT payloads without template.""" + command_template = mqtt.MqttCommandTemplate(None) + assert command_template.async_render(b"\xde\xad\xbe\xef") == b"\xde\xad\xbe\xef" + assert ( + command_template.async_render("b'\\xde\\xad\\xbe\\xef'") + == "b'\\xde\\xad\\xbe\\xef'" + ) + assert command_template.async_render(1234) == 1234 + assert command_template.async_render(1234.56) == 1234.56 + assert command_template.async_render(None) is None + + +async def test_all_subscriptions_run_when_decode_fails( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test all other subscriptions still run when decode fails for one.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "test-topic", record_calls, encoding="ascii") + await mqtt.async_subscribe(hass, "test-topic", record_calls) + + async_fire_mqtt_message(hass, "test-topic", UnitOfTemperature.CELSIUS) + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + + +async def test_subscribe_topic( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of a topic.""" + await mqtt_mock_entry() + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + + async_fire_mqtt_message(hass, "test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "test-topic" + assert recorded_calls[0].payload == "test-payload" + + unsub() + + async_fire_mqtt_message(hass, "test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + + # Cannot unsubscribe twice + with pytest.raises(HomeAssistantError): + unsub() + + +@pytest.mark.usefixtures("mqtt_mock_entry") +async def test_subscribe_topic_not_initialize( + hass: HomeAssistant, record_calls: MessageCallbackType +) -> None: + """Test the subscription of a topic when MQTT was not initialized.""" + with pytest.raises( + HomeAssistantError, match=r".*make sure MQTT is set up correctly" + ): + await mqtt.async_subscribe(hass, "test-topic", record_calls) + + +async def test_subscribe_mqtt_config_entry_disabled( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, record_calls: MessageCallbackType +) -> None: + """Test the subscription of a topic when MQTT config entry is disabled.""" + mqtt_mock.connected = True + + mqtt_config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + assert mqtt_config_entry.state is ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(mqtt_config_entry.entry_id) + assert mqtt_config_entry.state is ConfigEntryState.NOT_LOADED + + await hass.config_entries.async_set_disabled_by( + mqtt_config_entry.entry_id, ConfigEntryDisabler.USER + ) + mqtt_mock.connected = False + + with pytest.raises(HomeAssistantError, match=r".*MQTT is not enabled"): + await mqtt.async_subscribe(hass, "test-topic", record_calls) + + +async def test_subscribe_and_resubscribe( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test resubscribing within the debounce time.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + with ( + patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.4), + patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.4), + ): + mock_debouncer.clear() + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + # This unsub will be un-done with the following subscribe + # unsubscribe should not be called at the broker + unsub() + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + await mock_debouncer.wait() + mock_debouncer.clear() + + async_fire_mqtt_message(hass, "test-topic", "test-payload") + + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "test-topic" + assert recorded_calls[0].payload == "test-payload" + # assert unsubscribe was not called + mqtt_client_mock.unsubscribe.assert_not_called() + + mock_debouncer.clear() + unsub() + + await mock_debouncer.wait() + mqtt_client_mock.unsubscribe.assert_called_once_with(["test-topic"]) + + +async def test_subscribe_topic_non_async( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of a topic using the non-async function.""" + await mqtt_mock_entry() + await mock_debouncer.wait() + mock_debouncer.clear() + unsub = await hass.async_add_executor_job( + mqtt.subscribe, hass, "test-topic", record_calls + ) + await mock_debouncer.wait() + + async_fire_mqtt_message(hass, "test-topic", "test-payload") + + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "test-topic" + assert recorded_calls[0].payload == "test-payload" + + mock_debouncer.clear() + await hass.async_add_executor_job(unsub) + await mock_debouncer.wait() + + async_fire_mqtt_message(hass, "test-topic", "test-payload") + + assert len(recorded_calls) == 1 + + +async def test_subscribe_bad_topic( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of a topic.""" + await mqtt_mock_entry() + with pytest.raises(HomeAssistantError): + await mqtt.async_subscribe(hass, 55, record_calls) # type: ignore[arg-type] + + +async def test_subscribe_topic_not_match( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test if subscribed topic is not a match.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "test-topic", record_calls) + + async_fire_mqtt_message(hass, "another-test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 0 + + +async def test_subscribe_topic_level_wildcard( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "test-topic/+/on", record_calls) + + async_fire_mqtt_message(hass, "test-topic/bier/on", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "test-topic/bier/on" + assert recorded_calls[0].payload == "test-payload" + + +async def test_subscribe_topic_level_wildcard_no_subtree_match( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "test-topic/+/on", record_calls) + + async_fire_mqtt_message(hass, "test-topic/bier", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 0 + + +async def test_subscribe_topic_level_wildcard_root_topic_no_subtree_match( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "test-topic/#", record_calls) + + async_fire_mqtt_message(hass, "test-topic-123", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 0 + + +async def test_subscribe_topic_subtree_wildcard_subtree_topic( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "test-topic/#", record_calls) + + async_fire_mqtt_message(hass, "test-topic/bier/on", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "test-topic/bier/on" + assert recorded_calls[0].payload == "test-payload" + + +async def test_subscribe_topic_subtree_wildcard_root_topic( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "test-topic/#", record_calls) + + async_fire_mqtt_message(hass, "test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "test-topic" + assert recorded_calls[0].payload == "test-payload" + + +async def test_subscribe_topic_subtree_wildcard_no_match( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "test-topic/#", record_calls) + + async_fire_mqtt_message(hass, "another-test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 0 + + +async def test_subscribe_topic_level_wildcard_and_wildcard_root_topic( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) + + async_fire_mqtt_message(hass, "hi/test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "hi/test-topic" + assert recorded_calls[0].payload == "test-payload" + + +async def test_subscribe_topic_level_wildcard_and_wildcard_subtree_topic( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) + + async_fire_mqtt_message(hass, "hi/test-topic/here-iam", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "hi/test-topic/here-iam" + assert recorded_calls[0].payload == "test-payload" + + +async def test_subscribe_topic_level_wildcard_and_wildcard_level_no_match( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) + + async_fire_mqtt_message(hass, "hi/here-iam/test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 0 + + +async def test_subscribe_topic_level_wildcard_and_wildcard_no_match( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) + + async_fire_mqtt_message(hass, "hi/another-test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 0 + + +async def test_subscribe_topic_sys_root( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of $ root topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "$test-topic/subtree/on", record_calls) + + async_fire_mqtt_message(hass, "$test-topic/subtree/on", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "$test-topic/subtree/on" + assert recorded_calls[0].payload == "test-payload" + + +async def test_subscribe_topic_sys_root_and_wildcard_topic( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of $ root and wildcard topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "$test-topic/#", record_calls) + + async_fire_mqtt_message(hass, "$test-topic/some-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "$test-topic/some-topic" + assert recorded_calls[0].payload == "test-payload" + + +async def test_subscribe_topic_sys_root_and_wildcard_subtree_topic( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription of $ root and wildcard subtree topics.""" + await mqtt_mock_entry() + await mqtt.async_subscribe(hass, "$test-topic/subtree/#", record_calls) + + async_fire_mqtt_message(hass, "$test-topic/subtree/some-topic", "test-payload") + + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == "$test-topic/subtree/some-topic" + assert recorded_calls[0].payload == "test-payload" + + +async def test_subscribe_special_characters( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test the subscription to topics with special characters.""" + await mqtt_mock_entry() + topic = "/test-topic/$(.)[^]{-}" + payload = "p4y.l[]a|> ?" + + await mqtt.async_subscribe(hass, topic, record_calls) + + async_fire_mqtt_message(hass, topic, payload) + await hass.async_block_till_done() + assert len(recorded_calls) == 1 + assert recorded_calls[0].topic == topic + assert recorded_calls[0].payload == payload + + +async def test_subscribe_same_topic( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test subscribing to same topic twice and simulate retained messages. + + When subscribing to the same topic again, SUBSCRIBE must be sent to the broker again + for it to resend any retained messages. + """ + mqtt_client_mock = setup_with_birth_msg_client_mock + calls_a: list[ReceiveMessage] = [] + calls_b: list[ReceiveMessage] = [] + + @callback + def _callback_a(msg: ReceiveMessage) -> None: + calls_a.append(msg) + + @callback + def _callback_b(msg: ReceiveMessage) -> None: + calls_b.append(msg) + + mqtt_client_mock.reset_mock() + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/state", _callback_a, qos=0) + # Simulate a non retained message after the first subscription + async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=False) + await mock_debouncer.wait() + assert len(calls_a) == 1 + mqtt_client_mock.subscribe.assert_called() + calls_a = [] + mqtt_client_mock.reset_mock() + + await hass.async_block_till_done() + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/state", _callback_b, qos=1) + # Simulate an other non retained message after the second subscription + async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=False) + await mock_debouncer.wait() + # Both subscriptions should receive updates + assert len(calls_a) == 1 + assert len(calls_b) == 1 + mqtt_client_mock.subscribe.assert_called() + + +async def test_replaying_payload_same_topic( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test replaying retained messages. + + When subscribing to the same topic again, SUBSCRIBE must be sent to the broker again + for it to resend any retained messages for new subscriptions. + Retained messages must only be replayed for new subscriptions, except + when the MQTT client is reconnecting. + """ + mqtt_client_mock = setup_with_birth_msg_client_mock + calls_a: list[ReceiveMessage] = [] + calls_b: list[ReceiveMessage] = [] + + @callback + def _callback_a(msg: ReceiveMessage) -> None: + calls_a.append(msg) + + @callback + def _callback_b(msg: ReceiveMessage) -> None: + calls_b.append(msg) + + mqtt_client_mock.reset_mock() + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/state", _callback_a) + await mock_debouncer.wait() + async_fire_mqtt_message( + hass, "test/state", "online", qos=0, retain=True + ) # Simulate a (retained) message played back + assert len(calls_a) == 1 + mqtt_client_mock.subscribe.assert_called() + calls_a = [] + mqtt_client_mock.reset_mock() + + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/state", _callback_b) + await mock_debouncer.wait() + + # Simulate edge case where non retained message was received + # after subscription at HA but before the debouncer delay was passed. + # The message without retain flag directly after a subscription should + # be processed by both subscriptions. + async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=False) + + # Simulate a (retained) message played back on new subscriptions + async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=True) + + # The current subscription only received the message without retain flag + assert len(calls_a) == 1 + assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=False) + # The retained message playback should only be processed by the new subscription. + # The existing subscription already got the latest update, hence the existing + # subscription should not receive the replayed (retained) message. + # Messages without retain flag are received on both subscriptions. + assert len(calls_b) == 2 + assert help_assert_message(calls_b[0], "test/state", "online", qos=0, retain=False) + assert help_assert_message(calls_b[1], "test/state", "online", qos=0, retain=True) + mqtt_client_mock.subscribe.assert_called() + + calls_a = [] + calls_b = [] + mqtt_client_mock.reset_mock() + + # Simulate new message played back on new subscriptions + # After connecting the retain flag will not be set, even if the + # payload published was retained, we cannot see that + async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=False) + assert len(calls_a) == 1 + assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=False) + assert len(calls_b) == 1 + assert help_assert_message(calls_b[0], "test/state", "online", qos=0, retain=False) + + # Now simulate the broker was disconnected shortly + calls_a = [] + calls_b = [] + mqtt_client_mock.reset_mock() + mqtt_client_mock.on_disconnect(None, None, 0) + + mock_debouncer.clear() + mqtt_client_mock.on_connect(None, None, None, 0) + await mock_debouncer.wait() + mqtt_client_mock.subscribe.assert_called() + # Simulate a (retained) message played back after reconnecting + async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=True) + # Both subscriptions now should replay the retained message + assert len(calls_a) == 1 + assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=True) + assert len(calls_b) == 1 + assert help_assert_message(calls_b[0], "test/state", "online", qos=0, retain=True) + + +async def test_replaying_payload_after_resubscribing( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test replaying and filtering retained messages after resubscribing. + + When subscribing to the same topic again, SUBSCRIBE must be sent to the broker again + for it to resend any retained messages for new subscriptions. + Retained messages must only be replayed for new subscriptions, except + when the MQTT client is reconnection. + """ + mqtt_client_mock = setup_with_birth_msg_client_mock + calls_a: list[ReceiveMessage] = [] + + @callback + def _callback_a(msg: ReceiveMessage) -> None: + calls_a.append(msg) + + mqtt_client_mock.reset_mock() + mock_debouncer.clear() + unsub = await mqtt.async_subscribe(hass, "test/state", _callback_a) + await mock_debouncer.wait() + mqtt_client_mock.subscribe.assert_called() + + # Simulate a (retained) message played back + async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=True) + assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=True) + calls_a.clear() + + # Test we get updates + async_fire_mqtt_message(hass, "test/state", "offline", qos=0, retain=False) + assert help_assert_message(calls_a[0], "test/state", "offline", qos=0, retain=False) + calls_a.clear() + + # Test we filter new retained updates + async_fire_mqtt_message(hass, "test/state", "offline", qos=0, retain=True) + await hass.async_block_till_done() + assert len(calls_a) == 0 + + # Unsubscribe an resubscribe again + mock_debouncer.clear() + unsub() + unsub = await mqtt.async_subscribe(hass, "test/state", _callback_a) + await mock_debouncer.wait() + mqtt_client_mock.subscribe.assert_called() + + # Simulate we can receive a (retained) played back message again + async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=True) + assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=True) + + +async def test_replaying_payload_wildcard_topic( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test replaying retained messages. + + When we have multiple subscriptions to the same wildcard topic, + SUBSCRIBE must be sent to the broker again + for it to resend any retained messages for new subscriptions. + Retained messages should only be replayed for new subscriptions, except + when the MQTT client is reconnection. + """ + mqtt_client_mock = setup_with_birth_msg_client_mock + calls_a: list[ReceiveMessage] = [] + calls_b: list[ReceiveMessage] = [] + + @callback + def _callback_a(msg: ReceiveMessage) -> None: + calls_a.append(msg) + + @callback + def _callback_b(msg: ReceiveMessage) -> None: + calls_b.append(msg) + + mqtt_client_mock.reset_mock() + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/#", _callback_a) + await mock_debouncer.wait() + # Simulate (retained) messages being played back on new subscriptions + async_fire_mqtt_message(hass, "test/state1", "new_value_1", qos=0, retain=True) + async_fire_mqtt_message(hass, "test/state2", "new_value_2", qos=0, retain=True) + assert len(calls_a) == 2 + mqtt_client_mock.subscribe.assert_called() + calls_a = [] + mqtt_client_mock.reset_mock() + + # resubscribe to the wild card topic again + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/#", _callback_b) + await mock_debouncer.wait() + # Simulate (retained) messages being played back on new subscriptions + async_fire_mqtt_message(hass, "test/state1", "initial_value_1", qos=0, retain=True) + async_fire_mqtt_message(hass, "test/state2", "initial_value_2", qos=0, retain=True) + # The retained messages playback should only be processed for the new subscriptions + assert len(calls_a) == 0 + assert len(calls_b) == 2 + mqtt_client_mock.subscribe.assert_called() + + calls_a = [] + calls_b = [] + mqtt_client_mock.reset_mock() + + # Simulate new messages being received + async_fire_mqtt_message(hass, "test/state1", "update_value_1", qos=0, retain=False) + async_fire_mqtt_message(hass, "test/state2", "update_value_2", qos=0, retain=False) + assert len(calls_a) == 2 + assert len(calls_b) == 2 + + # Now simulate the broker was disconnected shortly + calls_a = [] + calls_b = [] + mqtt_client_mock.reset_mock() + mqtt_client_mock.on_disconnect(None, None, 0) + + mock_debouncer.clear() + mqtt_client_mock.on_connect(None, None, None, 0) + await mock_debouncer.wait() + + mqtt_client_mock.subscribe.assert_called() + # Simulate the (retained) messages are played back after reconnecting + # for all subscriptions + async_fire_mqtt_message(hass, "test/state1", "update_value_1", qos=0, retain=True) + async_fire_mqtt_message(hass, "test/state2", "update_value_2", qos=0, retain=True) + # Both subscriptions should replay + assert len(calls_a) == 2 + assert len(calls_b) == 2 + + +async def test_not_calling_unsubscribe_with_active_subscribers( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, +) -> None: + """Test not calling unsubscribe() when other subscribers are active.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + mqtt_client_mock.reset_mock() + mock_debouncer.clear() + unsub = await mqtt.async_subscribe(hass, "test/state", record_calls, 2) + await mqtt.async_subscribe(hass, "test/state", record_calls, 1) + await mock_debouncer.wait() + assert mqtt_client_mock.subscribe.called + + mock_debouncer.clear() + unsub() + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown + assert not mqtt_client_mock.unsubscribe.called + assert not mock_debouncer.is_set() + + +async def test_not_calling_subscribe_when_unsubscribed_within_cooldown( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + mqtt_mock_entry: MqttMockHAClientGenerator, + record_calls: MessageCallbackType, +) -> None: + """Test not calling subscribe() when it is unsubscribed. + + Make sure subscriptions are cleared if unsubscribed before + the subscribe cool down period has ended. + """ + mqtt_mock = await mqtt_mock_entry() + mqtt_client_mock = mqtt_mock._mqttc + await mock_debouncer.wait() + + mock_debouncer.clear() + mqtt_client_mock.subscribe.reset_mock() + unsub = await mqtt.async_subscribe(hass, "test/state", record_calls) + unsub() + await mock_debouncer.wait() + # The debouncer executes without an pending subscribes + assert not mqtt_client_mock.subscribe.called + + +async def test_unsubscribe_race( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test not calling unsubscribe() when other subscribers are active.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + calls_a: list[ReceiveMessage] = [] + calls_b: list[ReceiveMessage] = [] + + @callback + def _callback_a(msg: ReceiveMessage) -> None: + calls_a.append(msg) + + @callback + def _callback_b(msg: ReceiveMessage) -> None: + calls_b.append(msg) + + mqtt_client_mock.reset_mock() + + mock_debouncer.clear() + unsub = await mqtt.async_subscribe(hass, "test/state", _callback_a) + unsub() + await mqtt.async_subscribe(hass, "test/state", _callback_b) + await mock_debouncer.wait() + + async_fire_mqtt_message(hass, "test/state", "online") + assert not calls_a + assert calls_b + + # We allow either calls [subscribe, unsubscribe, subscribe], [subscribe, subscribe] or + # when both subscriptions were combined [subscribe] + expected_calls_1 = [ + call.subscribe([("test/state", 0)]), + call.unsubscribe("test/state"), + call.subscribe([("test/state", 0)]), + ] + expected_calls_2 = [ + call.subscribe([("test/state", 0)]), + call.subscribe([("test/state", 0)]), + ] + expected_calls_3 = [ + call.subscribe([("test/state", 0)]), + ] + assert mqtt_client_mock.mock_calls in ( + expected_calls_1, + expected_calls_2, + expected_calls_3, + ) + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_DISCOVERY: False}], +) +async def test_restore_subscriptions_on_reconnect( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, +) -> None: + """Test subscriptions are restored on reconnect.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + + mqtt_client_mock.reset_mock() + + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/state", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown + await mock_debouncer.wait() + assert ("test/state", 0) in help_all_subscribe_calls(mqtt_client_mock) + + mqtt_client_mock.reset_mock() + mqtt_client_mock.on_disconnect(None, None, 0) + + mock_debouncer.clear() + mqtt_client_mock.on_connect(None, None, None, 0) + await mock_debouncer.wait() + assert ("test/state", 0) in help_all_subscribe_calls(mqtt_client_mock) + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_DISCOVERY: False}], +) +async def test_restore_all_active_subscriptions_on_reconnect( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, +) -> None: + """Test active subscriptions are restored correctly on reconnect.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + mqtt_client_mock.reset_mock() + mock_debouncer.clear() + unsub = await mqtt.async_subscribe(hass, "test/state", record_calls, qos=2) + await mqtt.async_subscribe(hass, "test/state", record_calls, qos=1) + await mqtt.async_subscribe(hass, "test/state", record_calls, qos=0) + # cooldown + await mock_debouncer.wait() + + # the subscription with the highest QoS should survive + expected = [ + call([("test/state", 2)]), + ] + assert mqtt_client_mock.subscribe.mock_calls == expected + + unsub() + assert mqtt_client_mock.unsubscribe.call_count == 0 + + mqtt_client_mock.on_disconnect(None, None, 0) + + mock_debouncer.clear() + mqtt_client_mock.on_connect(None, None, None, 0) + # wait for cooldown + await mock_debouncer.wait() + + expected.append(call([("test/state", 1)])) + for expected_call in expected: + assert mqtt_client_mock.subscribe.hass_call(expected_call) + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_DISCOVERY: False}], +) +async def test_subscribed_at_highest_qos( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, +) -> None: + """Test the highest qos as assigned when subscribing to the same topic.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + mqtt_client_mock.reset_mock() + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/state", record_calls, qos=0) + await hass.async_block_till_done() + # cooldown + await mock_debouncer.wait() + assert ("test/state", 0) in help_all_subscribe_calls(mqtt_client_mock) + mqtt_client_mock.reset_mock() + + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/state", record_calls, qos=1) + await mqtt.async_subscribe(hass, "test/state", record_calls, qos=2) + # cooldown + await mock_debouncer.wait() + + # the subscription with the highest QoS should survive + assert help_all_subscribe_calls(mqtt_client_mock) == [("test/state", 2)] + + +async def test_initial_setup_logs_error( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mqtt_client_mock: MqttMockPahoClient, +) -> None: + """Test for setup failure if initial client connection fails.""" + entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) + entry.add_to_hass(hass) + mqtt_client_mock.connect.side_effect = MagicMock(return_value=1) + try: + assert await hass.config_entries.async_setup(entry.entry_id) + except HomeAssistantError: + assert True + assert "Failed to connect to MQTT server:" in caplog.text + + +async def test_logs_error_if_no_connect_broker( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test for setup failure if connection to broker is missing.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + # test with rc = 3 -> broker unavailable + mqtt_client_mock.on_disconnect(Mock(), None, 0) + mqtt_client_mock.on_connect(Mock(), None, None, 3) + await hass.async_block_till_done() + assert ( + "Unable to connect to the MQTT broker: Connection Refused: broker unavailable." + in caplog.text + ) + + +@pytest.mark.parametrize("return_code", [4, 5]) +async def test_triggers_reauth_flow_if_auth_fails( + hass: HomeAssistant, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + return_code: int, +) -> None: + """Test re-auth is triggered if authentication is failing.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + # test with rc = 4 -> CONNACK_REFUSED_NOT_AUTHORIZED and 5 -> CONNACK_REFUSED_BAD_USERNAME_PASSWORD + mqtt_client_mock.on_disconnect(Mock(), None, 0) + mqtt_client_mock.on_connect(Mock(), None, None, return_code) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["source"] == "reauth" + + +@patch("homeassistant.components.mqtt.client.TIMEOUT_ACK", 0.3) +async def test_handle_mqtt_on_callback( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test receiving an ACK callback before waiting for it.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + with patch.object(mqtt_client_mock, "get_mid", return_value=100): + # Simulate an ACK for mid == 100, this will call mqtt_mock._async_get_mid_future(mid) + mqtt_client_mock.on_publish(mqtt_client_mock, None, 100) + await hass.async_block_till_done() + # Make sure the ACK has been received + await hass.async_block_till_done() + # Now call publish without call back, this will call _async_async_wait_for_mid(msg_info.mid) + await mqtt.async_publish(hass, "no_callback/test-topic", "test-payload") + # Since the mid event was already set, we should not see any timeout warning in the log + await hass.async_block_till_done() + assert "No ACK from MQTT server" not in caplog.text + + +async def test_handle_mqtt_on_callback_after_cancellation( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_client_mock: MqttMockPahoClient, +) -> None: + """Test receiving an ACK after a cancellation.""" + mqtt_mock = await mqtt_mock_entry() + # Simulate the mid future getting a cancellation + mqtt_mock()._async_get_mid_future(101).cancel() + # Simulate an ACK for mid == 101, being received after the cancellation + mqtt_client_mock.on_publish(mqtt_client_mock, None, 101) + await hass.async_block_till_done() + assert "No ACK from MQTT server" not in caplog.text + assert "InvalidStateError" not in caplog.text + + +async def test_handle_mqtt_on_callback_after_timeout( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_client_mock: MqttMockPahoClient, +) -> None: + """Test receiving an ACK after a timeout.""" + mqtt_mock = await mqtt_mock_entry() + # Simulate the mid future getting a timeout + mqtt_mock()._async_get_mid_future(101).set_exception(asyncio.TimeoutError) + # Simulate an ACK for mid == 101, being received after the timeout + mqtt_client_mock.on_publish(mqtt_client_mock, None, 101) + await hass.async_block_till_done() + assert "No ACK from MQTT server" not in caplog.text + assert "InvalidStateError" not in caplog.text + + +async def test_publish_error( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test publish error.""" + entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) + entry.add_to_hass(hass) + + # simulate an Out of memory error + with patch( + "homeassistant.components.mqtt.async_client.AsyncMQTTClient" + ) as mock_client: + mock_client().connect = lambda *args: 1 + mock_client().publish().rc = 1 + assert await hass.config_entries.async_setup(entry.entry_id) + with pytest.raises(HomeAssistantError): + await mqtt.async_publish( + hass, "some-topic", b"test-payload", qos=0, retain=False, encoding=None + ) + assert "Failed to connect to MQTT server: Out of memory." in caplog.text + + +async def test_subscribe_error( + hass: HomeAssistant, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test publish error.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + mqtt_client_mock.reset_mock() + # simulate client is not connected error before subscribing + mqtt_client_mock.subscribe.side_effect = lambda *args: (4, None) + await mqtt.async_subscribe(hass, "some-topic", record_calls) + while mqtt_client_mock.subscribe.call_count == 0: + await hass.async_block_till_done() + await hass.async_block_till_done() + assert ( + "Error talking to MQTT: The client is not currently connected." in caplog.text + ) + + +async def test_handle_message_callback( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test for handling an incoming message callback.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + callbacks = [] + + @callback + def _callback(args) -> None: + callbacks.append(args) + + msg = ReceiveMessage( + "some-topic", b"test-payload", 1, False, "some-topic", datetime.now() + ) + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "some-topic", _callback) + await mock_debouncer.wait() + mqtt_client_mock.reset_mock() + mqtt_client_mock.on_message(None, None, msg) + + assert len(callbacks) == 1 + assert callbacks[0].topic == "some-topic" + assert callbacks[0].qos == 1 + assert callbacks[0].payload == "test-payload" + + +@pytest.mark.parametrize( + ("mqtt_config_entry_data", "protocol"), + [ + ( + { + mqtt.CONF_BROKER: "mock-broker", + CONF_PROTOCOL: "3.1", + }, + 3, + ), + ( + { + mqtt.CONF_BROKER: "mock-broker", + CONF_PROTOCOL: "3.1.1", + }, + 4, + ), + ( + { + mqtt.CONF_BROKER: "mock-broker", + CONF_PROTOCOL: "5", + }, + 5, + ), + ], +) +async def test_setup_mqtt_client_protocol( + mqtt_mock_entry: MqttMockHAClientGenerator, protocol: int +) -> None: + """Test MQTT client protocol setup.""" + with patch( + "homeassistant.components.mqtt.async_client.AsyncMQTTClient" + ) as mock_client: + await mqtt_mock_entry() + + # check if protocol setup was correctly + assert mock_client.call_args[1]["protocol"] == protocol + + +@patch("homeassistant.components.mqtt.client.TIMEOUT_ACK", 0.2) +async def test_handle_mqtt_timeout_on_callback( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_debouncer: asyncio.Event +) -> None: + """Test publish without receiving an ACK callback.""" + mid = 0 + + class FakeInfo: + """Returns a simulated client publish response.""" + + mid = 102 + rc = 0 + + with patch( + "homeassistant.components.mqtt.async_client.AsyncMQTTClient" + ) as mock_client: + + def _mock_ack(topic: str, qos: int = 0) -> tuple[int, int]: + # Handle ACK for subscribe normally + nonlocal mid + mid += 1 + mock_client.on_subscribe(0, 0, mid) + return (0, mid) + + # We want to simulate the publish behaviour MQTT client + mock_client = mock_client.return_value + mock_client.publish.return_value = FakeInfo() + # Mock we get a mid and rc=0 + mock_client.subscribe.side_effect = _mock_ack + mock_client.unsubscribe.side_effect = _mock_ack + mock_client.connect = MagicMock( + return_value=0, + side_effect=lambda *args, **kwargs: hass.loop.call_soon_threadsafe( + mock_client.on_connect, mock_client, None, 0, 0, 0 + ), + ) + + entry = MockConfigEntry( + domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} + ) + entry.add_to_hass(hass) + + # Set up the integration + mock_debouncer.clear() + assert await hass.config_entries.async_setup(entry.entry_id) + + # Now call we publish without simulating and ACK callback + await mqtt.async_publish(hass, "no_callback/test-topic", "test-payload") + await hass.async_block_till_done() + # There is no ACK so we should see a timeout in the log after publishing + assert len(mock_client.publish.mock_calls) == 1 + assert "No ACK from MQTT server" in caplog.text + # Ensure we stop lingering background tasks + await hass.config_entries.async_unload(entry.entry_id) + # Assert we did not have any completed subscribes, + # because the debouncer subscribe job failed to receive an ACK, + # and the time auto caused the debouncer job to fail. + assert not mock_debouncer.is_set() + + +async def test_setup_raises_config_entry_not_ready_if_no_connect_broker( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test for setup failure if connection to broker is missing.""" + entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) + entry.add_to_hass(hass) + + with patch( + "homeassistant.components.mqtt.async_client.AsyncMQTTClient" + ) as mock_client: + mock_client().connect = MagicMock(side_effect=OSError("Connection error")) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert "Failed to connect to MQTT server due to exception:" in caplog.text + + +@pytest.mark.parametrize( + ("mqtt_config_entry_data", "insecure_param"), + [ + ({"broker": "test-broker", "certificate": "auto"}, "not set"), + ( + {"broker": "test-broker", "certificate": "auto", "tls_insecure": False}, + False, + ), + ({"broker": "test-broker", "certificate": "auto", "tls_insecure": True}, True), + ], +) +async def test_setup_uses_certificate_on_certificate_set_to_auto_and_insecure( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + insecure_param: bool | str, +) -> None: + """Test setup uses bundled certs when certificate is set to auto and insecure.""" + calls = [] + insecure_check = {"insecure": "not set"} + + def mock_tls_set( + certificate, certfile=None, keyfile=None, tls_version=None + ) -> None: + calls.append((certificate, certfile, keyfile, tls_version)) + + def mock_tls_insecure_set(insecure_param) -> None: + insecure_check["insecure"] = insecure_param + + with patch( + "homeassistant.components.mqtt.async_client.AsyncMQTTClient" + ) as mock_client: + mock_client().tls_set = mock_tls_set + mock_client().tls_insecure_set = mock_tls_insecure_set + await mqtt_mock_entry() + await hass.async_block_till_done() + + assert calls + + expected_certificate = certifi.where() + assert calls[0][0] == expected_certificate + + # test if insecure is set + assert insecure_check["insecure"] == insecure_param + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ + { + mqtt.CONF_BROKER: "mock-broker", + mqtt.CONF_CERTIFICATE: "auto", + } + ], +) +async def test_tls_version( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test setup defaults for tls.""" + await mqtt_mock_entry() + await hass.async_block_till_done() + assert ( + mqtt_client_mock.tls_set.mock_calls[0][2]["tls_version"] + == ssl.PROTOCOL_TLS_CLIENT + ) + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ + { + mqtt.CONF_BROKER: "mock-broker", + mqtt.CONF_BIRTH_MESSAGE: { + mqtt.ATTR_TOPIC: "birth", + mqtt.ATTR_PAYLOAD: "birth", + mqtt.ATTR_QOS: 0, + mqtt.ATTR_RETAIN: False, + }, + } + ], +) +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_custom_birth_message( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + mqtt_config_entry_data: dict[str, Any], + mqtt_client_mock: MqttMockPahoClient, +) -> None: + """Test sending birth message.""" + + entry = MockConfigEntry(domain=mqtt.DOMAIN, data=mqtt_config_entry_data) + entry.add_to_hass(hass) + hass.config.components.add(mqtt.DOMAIN) + assert await hass.config_entries.async_setup(entry.entry_id) + mock_debouncer.clear() + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + # discovery cooldown + await mock_debouncer.wait() + # Wait for publish call to finish + await hass.async_block_till_done(wait_background_tasks=True) + mqtt_client_mock.publish.assert_called_with("birth", "birth", 0, False) + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ENTRY_DEFAULT_BIRTH_MESSAGE], +) +async def test_default_birth_message( + hass: HomeAssistant, setup_with_birth_msg_client_mock: MqttMockPahoClient +) -> None: + """Test sending birth message.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + await hass.async_block_till_done(wait_background_tasks=True) + mqtt_client_mock.publish.assert_called_with( + "homeassistant/status", "online", 0, False + ) + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_BIRTH_MESSAGE: {}}], +) +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_no_birth_message( + hass: HomeAssistant, + record_calls: MessageCallbackType, + mock_debouncer: asyncio.Event, + mqtt_config_entry_data: dict[str, Any], + mqtt_client_mock: MqttMockPahoClient, +) -> None: + """Test disabling birth message.""" + entry = MockConfigEntry(domain=mqtt.DOMAIN, data=mqtt_config_entry_data) + entry.add_to_hass(hass) + hass.config.components.add(mqtt.DOMAIN) + mock_debouncer.clear() + assert await hass.config_entries.async_setup(entry.entry_id) + # Wait for discovery cooldown + await mock_debouncer.wait() + # Ensure any publishing could have been processed + await hass.async_block_till_done(wait_background_tasks=True) + mqtt_client_mock.publish.assert_not_called() + + mqtt_client_mock.reset_mock() + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "homeassistant/some-topic", record_calls) + # Wait for discovery cooldown + await mock_debouncer.wait() + mqtt_client_mock.subscribe.assert_called() + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ENTRY_DEFAULT_BIRTH_MESSAGE], +) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.2) +async def test_delayed_birth_message( + hass: HomeAssistant, + mqtt_config_entry_data: dict[str, Any], + mqtt_client_mock: MqttMockPahoClient, +) -> None: + """Test sending birth message does not happen until Home Assistant starts.""" + hass.set_state(CoreState.starting) + await hass.async_block_till_done() + birth = asyncio.Event() + entry = MockConfigEntry(domain=mqtt.DOMAIN, data=mqtt_config_entry_data) + entry.add_to_hass(hass) + hass.config.components.add(mqtt.DOMAIN) + assert await hass.config_entries.async_setup(entry.entry_id) + + @callback + def wait_birth(msg: ReceiveMessage) -> None: + """Handle birth message.""" + birth.set() + + await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) + with pytest.raises(TimeoutError): + await asyncio.wait_for(birth.wait(), 0.05) + assert not mqtt_client_mock.publish.called + assert not birth.is_set() + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await birth.wait() + mqtt_client_mock.publish.assert_called_with( + "homeassistant/status", "online", 0, False + ) + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ENTRY_DEFAULT_BIRTH_MESSAGE], +) +async def test_subscription_done_when_birth_message_is_sent( + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test sending birth message until initial subscription has been completed.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) + assert ("homeassistant/+/+/config", 0) in subscribe_calls + assert ("homeassistant/+/+/+/config", 0) in subscribe_calls + mqtt_client_mock.publish.assert_called_with( + "homeassistant/status", "online", 0, False + ) + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ + { + mqtt.CONF_BROKER: "mock-broker", + mqtt.CONF_WILL_MESSAGE: { + mqtt.ATTR_TOPIC: "death", + mqtt.ATTR_PAYLOAD: "death", + mqtt.ATTR_QOS: 0, + mqtt.ATTR_RETAIN: False, + }, + } + ], +) +async def test_custom_will_message( + hass: HomeAssistant, + mqtt_config_entry_data: dict[str, Any], + mqtt_client_mock: MqttMockPahoClient, +) -> None: + """Test will message.""" + entry = MockConfigEntry(domain=mqtt.DOMAIN, data=mqtt_config_entry_data) + entry.add_to_hass(hass) + hass.config.components.add(mqtt.DOMAIN) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + mqtt_client_mock.will_set.assert_called_with( + topic="death", payload="death", qos=0, retain=False + ) + + +async def test_default_will_message( + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test will message.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + mqtt_client_mock.will_set.assert_called_with( + topic="homeassistant/status", payload="offline", qos=0, retain=False + ) + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_WILL_MESSAGE: {}}], +) +async def test_no_will_message( + hass: HomeAssistant, + mqtt_config_entry_data: dict[str, Any], + mqtt_client_mock: MqttMockPahoClient, +) -> None: + """Test will message.""" + entry = MockConfigEntry(domain=mqtt.DOMAIN, data=mqtt_config_entry_data) + entry.add_to_hass(hass) + hass.config.components.add(mqtt.DOMAIN) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + mqtt_client_mock.will_set.assert_not_called() + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ENTRY_DEFAULT_BIRTH_MESSAGE | {mqtt.CONF_DISCOVERY: False}], +) +async def test_mqtt_subscribes_topics_on_connect( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, +) -> None: + """Test subscription to topic on connect.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "topic/test", record_calls) + await mqtt.async_subscribe(hass, "home/sensor", record_calls, 2) + await mqtt.async_subscribe(hass, "still/pending", record_calls) + await mqtt.async_subscribe(hass, "still/pending", record_calls, 1) + await mock_debouncer.wait() + + mqtt_client_mock.on_disconnect(Mock(), None, 0) + + mqtt_client_mock.reset_mock() + + mock_debouncer.clear() + mqtt_client_mock.on_connect(Mock(), None, 0, 0) + await mock_debouncer.wait() + + subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) + assert ("topic/test", 0) in subscribe_calls + assert ("home/sensor", 2) in subscribe_calls + assert ("still/pending", 1) in subscribe_calls + + +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ENTRY_DEFAULT_BIRTH_MESSAGE], +) +async def test_mqtt_subscribes_in_single_call( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, +) -> None: + """Test bundled client subscription to topic.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + mqtt_client_mock.subscribe.reset_mock() + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "topic/test", record_calls) + await mqtt.async_subscribe(hass, "home/sensor", record_calls) + # Make sure the debouncer finishes + await mock_debouncer.wait() + + assert mqtt_client_mock.subscribe.call_count == 1 + # Assert we have a single subscription call with both subscriptions + assert mqtt_client_mock.subscribe.mock_calls[0][1][0] in [ + [("topic/test", 0), ("home/sensor", 0)], + [("home/sensor", 0), ("topic/test", 0)], + ] + + +@pytest.mark.parametrize("mqtt_config_entry_data", [ENTRY_DEFAULT_BIRTH_MESSAGE]) +@patch("homeassistant.components.mqtt.client.MAX_SUBSCRIBES_PER_CALL", 2) +@patch("homeassistant.components.mqtt.client.MAX_UNSUBSCRIBES_PER_CALL", 2) +async def test_mqtt_subscribes_and_unsubscribes_in_chunks( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + record_calls: MessageCallbackType, +) -> None: + """Test chunked client subscriptions.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + + mqtt_client_mock.subscribe.reset_mock() + unsub_tasks: list[CALLBACK_TYPE] = [] + mock_debouncer.clear() + unsub_tasks.append(await mqtt.async_subscribe(hass, "topic/test1", record_calls)) + unsub_tasks.append(await mqtt.async_subscribe(hass, "home/sensor1", record_calls)) + unsub_tasks.append(await mqtt.async_subscribe(hass, "topic/test2", record_calls)) + unsub_tasks.append(await mqtt.async_subscribe(hass, "home/sensor2", record_calls)) + # Make sure the debouncer finishes + await mock_debouncer.wait() + + assert mqtt_client_mock.subscribe.call_count == 2 + # Assert we have a 2 subscription calls with both 2 subscriptions + assert len(mqtt_client_mock.subscribe.mock_calls[0][1][0]) == 2 + assert len(mqtt_client_mock.subscribe.mock_calls[1][1][0]) == 2 + + # Unsubscribe all topics + mock_debouncer.clear() + for task in unsub_tasks: + task() + # Make sure the debouncer finishes + await mock_debouncer.wait() + + assert mqtt_client_mock.unsubscribe.call_count == 2 + # Assert we have a 2 unsubscribe calls with both 2 topic + assert len(mqtt_client_mock.unsubscribe.mock_calls[0][1][0]) == 2 + assert len(mqtt_client_mock.unsubscribe.mock_calls[1][1][0]) == 2 + + +async def test_auto_reconnect( + hass: HomeAssistant, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test reconnection is automatically done.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + assert mqtt_client_mock.connect.call_count == 1 + mqtt_client_mock.reconnect.reset_mock() + + mqtt_client_mock.disconnect() + mqtt_client_mock.on_disconnect(None, None, 0) + await hass.async_block_till_done() + + mqtt_client_mock.reconnect.side_effect = OSError("foo") + async_fire_time_changed( + hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) + ) + await hass.async_block_till_done() + assert len(mqtt_client_mock.reconnect.mock_calls) == 1 + assert "Error re-connecting to MQTT server due to exception: foo" in caplog.text + + mqtt_client_mock.reconnect.side_effect = None + async_fire_time_changed( + hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) + ) + await hass.async_block_till_done() + assert len(mqtt_client_mock.reconnect.mock_calls) == 2 + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + + mqtt_client_mock.disconnect() + mqtt_client_mock.on_disconnect(None, None, 0) + await hass.async_block_till_done() + + async_fire_time_changed( + hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) + ) + await hass.async_block_till_done() + # Should not reconnect after stop + assert len(mqtt_client_mock.reconnect.mock_calls) == 2 + + +async def test_server_sock_connect_and_disconnect( + hass: HomeAssistant, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test handling the socket connected and disconnected.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + assert mqtt_client_mock.connect.call_count == 1 + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) + await hass.async_block_till_done() + + server.close() # mock the server closing the connection on us + + mock_debouncer.clear() + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + await mock_debouncer.wait() + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_CONN_LOST + mqtt_client_mock.on_socket_unregister_write(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_close(mqtt_client_mock, None, client) + mqtt_client_mock.on_disconnect(mqtt_client_mock, None, client) + await hass.async_block_till_done() + mock_debouncer.clear() + unsub() + await hass.async_block_till_done() + assert not mock_debouncer.is_set() + + # Should have failed + assert len(recorded_calls) == 0 + + +async def test_server_sock_buffer_size( + hass: HomeAssistant, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test handling the socket buffer size fails.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + assert mqtt_client_mock.connect.call_count == 1 + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + with patch.object(client, "setsockopt", side_effect=OSError("foo")): + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) + await hass.async_block_till_done() + assert "Unable to increase the socket buffer size" in caplog.text + + +async def test_server_sock_buffer_size_with_websocket( + hass: HomeAssistant, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test handling the socket buffer size fails.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + assert mqtt_client_mock.connect.call_count == 1 + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + + class FakeWebsocket(paho_mqtt.WebsocketWrapper): + def _do_handshake(self, *args, **kwargs): + pass + + wrapped_socket = FakeWebsocket(client, "127.0.01", 1, False, "/", None) + + with patch.object(client, "setsockopt", side_effect=OSError("foo")): + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, wrapped_socket) + mqtt_client_mock.on_socket_register_write( + mqtt_client_mock, None, wrapped_socket + ) + await hass.async_block_till_done() + assert "Unable to increase the socket buffer size" in caplog.text + + +async def test_client_sock_failure_after_connect( + hass: HomeAssistant, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + recorded_calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test handling the socket connected and disconnected.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + assert mqtt_client_mock.connect.call_count == 1 + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_writer(mqtt_client_mock, None, client) + await hass.async_block_till_done() + + mqtt_client_mock.loop_write.side_effect = OSError("foo") + client.close() # close the client socket out from under the client + + assert mqtt_client_mock.connect.call_count == 1 + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) + await hass.async_block_till_done() + + unsub() + # Should have failed + assert len(recorded_calls) == 0 + + +async def test_loop_write_failure( + hass: HomeAssistant, + setup_with_birth_msg_client_mock: MqttMockPahoClient, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test handling the socket connected and disconnected.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + assert mqtt_client_mock.connect.call_count == 1 + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) + mqtt_client_mock.loop_write.return_value = paho_mqtt.MQTT_ERR_CONN_LOST + mqtt_client_mock.loop_read.return_value = paho_mqtt.MQTT_ERR_CONN_LOST + + # Fill up the outgoing buffer to ensure that loop_write + # and loop_read are called that next time control is + # returned to the event loop + try: + for _ in range(1000): + server.send(b"long" * 100) + except BlockingIOError: + pass + + server.close() + # Once for the reader callback + await hass.async_block_till_done() + # Another for the writer callback + await hass.async_block_till_done() + # Final for the disconnect callback + await hass.async_block_till_done() + + assert "Disconnected from MQTT server test-broker:1883" in caplog.text diff --git a/tests/components/mqtt/test_climate.py b/tests/components/mqtt/test_climate.py index 2bf78e59e42..13bd6b5feda 100644 --- a/tests/components/mqtt/test_climate.py +++ b/tests/components/mqtt/test_climate.py @@ -179,21 +179,19 @@ async def test_get_hvac_modes( state = hass.states.get(ENTITY_CLIMATE) modes = state.attributes.get("hvac_modes") - assert [ + assert modes == [ HVACMode.AUTO, HVACMode.OFF, HVACMode.COOL, HVACMode.HEAT, HVACMode.DRY, HVACMode.FAN_ONLY, - ] == modes + ] @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_set_operation_bad_attr_and_state( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting operation mode without required attribute. @@ -454,9 +452,7 @@ async def test_turn_on_and_off_without_power_command( @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_set_fan_mode_bad_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting fan mode without required attribute.""" await mqtt_mock_entry() @@ -551,9 +547,7 @@ async def test_set_fan_mode( @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_set_swing_mode_bad_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting swing mode without required attribute.""" await mqtt_mock_entry() @@ -660,11 +654,11 @@ async def test_set_target_temperature( assert state.state == "heat" mqtt_mock.async_publish.assert_called_once_with("mode-topic", "heat", 0, False) mqtt_mock.async_publish.reset_mock() - await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == 47 + assert state.attributes.get("temperature") == 35 mqtt_mock.async_publish.assert_called_once_with( - "temperature-topic", "47.0", 0, False + "temperature-topic", "35.0", 0, False ) # also test directly supplying the operation mode to set_temperature @@ -719,7 +713,7 @@ async def test_set_target_temperature_pessimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) - await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None @@ -1023,7 +1017,16 @@ async def test_handle_action_received( # Cycle through valid modes # Redefine actions according to https://developers.home-assistant.io/docs/core/entity/climate/#hvac-action - actions = ["off", "preheating", "heating", "cooling", "drying", "idle", "fan"] + actions = [ + "off", + "preheating", + "defrosting", + "heating", + "cooling", + "drying", + "idle", + "fan", + ] assert all(elem in actions for elem in HVACAction) for action in actions: async_fire_mqtt_message(hass, "action", action) @@ -1046,9 +1049,7 @@ async def test_handle_action_received( @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_set_preset_mode_optimistic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting of the preset mode.""" mqtt_mock = await mqtt_mock_entry() @@ -1104,9 +1105,7 @@ async def test_set_preset_mode_optimistic( ], ) async def test_set_preset_mode_explicit_optimistic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting of the preset mode.""" mqtt_mock = await mqtt_mock_entry() @@ -1523,9 +1522,7 @@ async def test_get_with_templates( ], ) async def test_set_and_templates( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting various attributes with templates.""" mqtt_mock = await mqtt_mock_entry() @@ -1593,13 +1590,13 @@ async def test_set_and_templates( assert state.attributes.get("swing_mode") == "on" # Temperature - await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) mqtt_mock.async_publish.assert_called_once_with( - "temperature-topic", "temp: 47.0", 0, False + "temperature-topic", "temp: 35.0", 0, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == 47 + assert state.attributes.get("temperature") == 35 # Temperature Low/High await common.async_set_temperature( @@ -1879,11 +1876,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - climate.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, climate.DOMAIN, DEFAULT_CONFIG ) @@ -1894,26 +1887,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - climate.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, climate.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - climate.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, climate.DOMAIN, DEFAULT_CONFIG ) @@ -1987,34 +1970,26 @@ async def test_encoding_subscribable_topics( async def test_discovery_removal_climate( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered climate.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][climate.DOMAIN]) - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, climate.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, climate.DOMAIN, data) async def test_discovery_update_climate( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered climate.""" config1 = {"name": "Beer"} config2 = {"name": "Milk"} await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, climate.DOMAIN, config1, config2 + hass, mqtt_mock_entry, climate.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_climate( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered climate.""" data1 = '{ "name": "Beer" }' @@ -2022,26 +1997,19 @@ async def test_discovery_update_unchanged_climate( "homeassistant.components.mqtt.climate.MqttClimate.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - climate.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, climate.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer", "power_command_topic": "test_topic#" }' data2 = '{ "name": "Milk", "power_command_topic": "test_topic" }' await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, climate.DOMAIN, data1, data2 + hass, mqtt_mock_entry, climate.DOMAIN, data1, data2 ) @@ -2095,11 +2063,7 @@ async def test_entity_id_update_subscriptions( } } await help_test_entity_id_update_subscriptions( - hass, - mqtt_mock_entry, - climate.DOMAIN, - config, - ["test-topic", "avty-topic"], + hass, mqtt_mock_entry, climate.DOMAIN, config, ["test-topic", "avty-topic"] ) @@ -2191,20 +2155,8 @@ async def test_precision_whole( @pytest.mark.parametrize( ("service", "topic", "parameters", "payload", "template"), [ - ( - climate.SERVICE_TURN_ON, - "power_command_topic", - {}, - "ON", - None, - ), - ( - climate.SERVICE_TURN_OFF, - "power_command_topic", - {}, - "OFF", - None, - ), + (climate.SERVICE_TURN_ON, "power_command_topic", {}, "ON", None), + (climate.SERVICE_TURN_OFF, "power_command_topic", {}, "OFF", None), ( climate.SERVICE_SET_HVAC_MODE, "mode_command_topic", @@ -2367,9 +2319,7 @@ async def test_publishing_with_custom_encoding( ], ) async def test_humidity_configuration_validity( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - valid: bool, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, valid: bool ) -> None: """Test the validity of humidity configurations.""" assert await mqtt_mock_entry() @@ -2378,8 +2328,7 @@ async def test_humidity_configuration_validity( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = climate.DOMAIN @@ -2402,8 +2351,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = climate.DOMAIN diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index d196e1998fb..f7ebd039d1a 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -42,6 +42,7 @@ DEFAULT_CONFIG_DEVICE_INFO_ID = { "manufacturer": "Whatever", "name": "Beer", "model": "Glass", + "model_id": "XYZ001", "hw_version": "rev1", "serial_number": "1234deadbeef", "sw_version": "0.1-beta", @@ -54,6 +55,7 @@ DEFAULT_CONFIG_DEVICE_INFO_MAC = { "manufacturer": "Whatever", "name": "Beer", "model": "Glass", + "model_id": "XYZ001", "hw_version": "rev1", "serial_number": "1234deadbeef", "sw_version": "0.1-beta", @@ -103,9 +105,7 @@ def help_custom_config( async def help_test_availability_when_connection_lost( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - domain: str, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, domain: str ) -> None: """Test availability after MQTT disconnection.""" mqtt_mock = await mqtt_mock_entry() @@ -251,8 +251,6 @@ async def help_test_default_availability_list_payload_all( domain: str, config: ConfigType, no_assumed_state: bool = False, - state_topic: str | None = None, - state_message: str | None = None, ) -> None: """Test availability by default payload with defined topic. @@ -314,8 +312,6 @@ async def help_test_default_availability_list_payload_any( domain: str, config: ConfigType, no_assumed_state: bool = False, - state_topic: str | None = None, - state_message: str | None = None, ) -> None: """Test availability by default payload with defined topic. @@ -657,7 +653,6 @@ async def help_test_update_with_json_attrs_bad_json( async def help_test_discovery_update_attr( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, domain: str, config: ConfigType, ) -> None: @@ -696,9 +691,7 @@ async def help_test_discovery_update_attr( async def help_test_unique_id( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - domain: str, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, domain: str ) -> None: """Test unique id option only creates one entity per unique_id.""" await mqtt_mock_entry() @@ -709,7 +702,6 @@ async def help_test_unique_id( async def help_test_discovery_removal( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, domain: str, data: str, ) -> None: @@ -735,8 +727,7 @@ async def help_test_discovery_removal( async def help_test_discovery_update( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog, - domain, + domain: str, discovery_config1: DiscoveryInfoType, discovery_config2: DiscoveryInfoType, state_data1: _StateDataType | None = None, @@ -800,7 +791,6 @@ async def help_test_discovery_update( async def help_test_discovery_update_unchanged( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, domain: str, data1: str, discovery_update: MagicMock, @@ -826,7 +816,6 @@ async def help_test_discovery_update_unchanged( async def help_test_discovery_broken( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, domain: str, data1: str, data2: str, @@ -1012,6 +1001,7 @@ async def help_test_entity_device_info_with_identifier( assert device.manufacturer == "Whatever" assert device.name == "Beer" assert device.model == "Glass" + assert device.model_id == "XYZ001" assert device.hw_version == "rev1" assert device.sw_version == "0.1-beta" assert device.suggested_area == "default_area" @@ -1048,6 +1038,7 @@ async def help_test_entity_device_info_with_connection( assert device.manufacturer == "Whatever" assert device.name == "Beer" assert device.model == "Glass" + assert device.model_id == "XYZ001" assert device.hw_version == "rev1" assert device.sw_version == "0.1-beta" assert device.suggested_area == "default_area" diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 8df5de8e2fb..2b4cb20ccf9 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -1,6 +1,6 @@ """Test config flow.""" -from collections.abc import Iterator +from collections.abc import Generator, Iterator from contextlib import contextmanager from pathlib import Path from ssl import SSLError @@ -9,7 +9,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from uuid import uuid4 import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant import config_entries @@ -187,11 +186,11 @@ def mock_process_uploaded_file( yield mock_upload +@pytest.mark.usefixtures("mqtt_client_mock") async def test_user_connection_works( hass: HomeAssistant, mock_try_connection: MagicMock, mock_finish_setup: MagicMock, - mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test we can finish a config flow.""" mock_try_connection.return_value = True @@ -217,11 +216,11 @@ async def test_user_connection_works( assert len(mock_finish_setup.mock_calls) == 1 +@pytest.mark.usefixtures("mqtt_client_mock") async def test_user_v5_connection_works( hass: HomeAssistant, mock_try_connection: MagicMock, mock_finish_setup: MagicMock, - mqtt_client_mock: MqttMockPahoClient, ) -> None: """Test we can finish a config flow.""" mock_try_connection.return_value = True @@ -664,11 +663,11 @@ async def test_bad_certificate( ("100", False), ], ) +@pytest.mark.usefixtures("mock_reload_after_entry_update") async def test_keepalive_validation( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, mock_try_connection: MagicMock, - mock_reload_after_entry_update: MagicMock, input_value: str, error: bool, ) -> None: @@ -851,16 +850,17 @@ async def test_invalid_discovery_prefix( assert mock_reload_after_entry_update.call_count == 0 -def get_default(schema: vol.Schema, key: str) -> Any: +def get_default(schema: vol.Schema, key: str) -> Any | None: """Get default value for key in voluptuous schema.""" for schema_key in schema: if schema_key == key: if schema_key.default == vol.UNDEFINED: return None return schema_key.default() + return None -def get_suggested(schema: vol.Schema, key: str) -> Any: +def get_suggested(schema: vol.Schema, key: str) -> Any | None: """Get suggested value for key in voluptuous schema.""" for schema_key in schema: if schema_key == key: @@ -870,13 +870,14 @@ def get_suggested(schema: vol.Schema, key: str) -> Any: ): return None return schema_key.description["suggested_value"] + return None +@pytest.mark.usefixtures("mock_reload_after_entry_update") async def test_option_flow_default_suggested_values( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, mock_try_connection_success: MqttMockPahoClient, - mock_reload_after_entry_update: MagicMock, ) -> None: """Test config flow options has default/suggested values.""" await mqtt_mock_entry() @@ -1030,11 +1031,11 @@ async def test_option_flow_default_suggested_values( @pytest.mark.parametrize( ("advanced_options", "step_id"), [(False, "options"), (True, "broker")] ) +@pytest.mark.usefixtures("mock_reload_after_entry_update") async def test_skipping_advanced_options( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, mock_try_connection: MagicMock, - mock_reload_after_entry_update: MagicMock, advanced_options: bool, step_id: str, ) -> None: @@ -1102,12 +1103,11 @@ async def test_skipping_advanced_options( ), ], ) +@pytest.mark.usefixtures("mock_reload_after_entry_update") async def test_step_reauth( hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, mqtt_client_mock: MqttMockPahoClient, mock_try_connection: MagicMock, - mock_reload_after_entry_update: MagicMock, test_input: dict[str, Any], user_input: dict[str, Any], new_password: str, @@ -1115,12 +1115,9 @@ async def test_step_reauth( """Test that the reauth step works.""" # Prepare the config entry - config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - hass.config_entries.async_update_entry( - config_entry, - data=test_input, - ) - await mqtt_mock_entry() + config_entry = MockConfigEntry(domain=mqtt.DOMAIN, data=test_input) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) # Start reauth flow config_entry.async_start_reauth(hass) @@ -1288,12 +1285,9 @@ async def test_options_bad_will_message_fails( @pytest.mark.parametrize( "hass_config", [{"mqtt": {"sensor": [{"state_topic": "some-topic"}]}}] ) +@pytest.mark.usefixtures("mock_ssl_context", "mock_process_uploaded_file") async def test_try_connection_with_advanced_parameters( - hass: HomeAssistant, - mock_try_connection_success: MqttMockPahoClient, - tmp_path: Path, - mock_ssl_context: dict[str, MagicMock], - mock_process_uploaded_file: MagicMock, + hass: HomeAssistant, mock_try_connection_success: MqttMockPahoClient ) -> None: """Test config flow with advanced parameters from config.""" config_entry = MockConfigEntry(domain=mqtt.DOMAIN) @@ -1406,10 +1400,10 @@ async def test_try_connection_with_advanced_parameters( await hass.async_block_till_done() +@pytest.mark.usefixtures("mock_ssl_context") async def test_setup_with_advanced_settings( hass: HomeAssistant, mock_try_connection: MagicMock, - mock_ssl_context: dict[str, MagicMock], mock_process_uploaded_file: MagicMock, ) -> None: """Test config flow setup with advanced parameters.""" @@ -1568,11 +1562,9 @@ async def test_setup_with_advanced_settings( } +@pytest.mark.usefixtures("mock_ssl_context", "mock_process_uploaded_file") async def test_change_websockets_transport_to_tcp( - hass: HomeAssistant, - mock_try_connection, - mock_ssl_context: dict[str, MagicMock], - mock_process_uploaded_file: MagicMock, + hass: HomeAssistant, mock_try_connection: MagicMock ) -> None: """Test option flow setup with websockets transport settings.""" config_entry = MockConfigEntry(domain=mqtt.DOMAIN) diff --git a/tests/components/mqtt/test_cover.py b/tests/components/mqtt/test_cover.py index 4b46f49c629..451665de96a 100644 --- a/tests/components/mqtt/test_cover.py +++ b/tests/components/mqtt/test_cover.py @@ -697,9 +697,7 @@ async def test_position_via_template_and_entity_id( ], ) async def test_optimistic_flag( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - assumed_state: bool, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, assumed_state: bool ) -> None: """Test assumed_state is set correctly.""" await mqtt_mock_entry() @@ -1073,10 +1071,9 @@ async def test_current_cover_position_inverted( } ], ) +@pytest.mark.usefixtures("hass") async def test_optimistic_position( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test optimistic position is not supported.""" assert await mqtt_mock_entry() @@ -1627,7 +1624,6 @@ async def test_tilt_via_invocation_defaults( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test tilt defaults on close/open.""" - await hass.async_block_till_done() mqtt_mock = await mqtt_mock_entry() await hass.services.async_call( @@ -2547,11 +2543,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - cover.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, cover.DOMAIN, DEFAULT_CONFIG ) @@ -2562,26 +2554,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - cover.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, cover.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - cover.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, cover.DOMAIN, DEFAULT_CONFIG ) @@ -2614,32 +2596,26 @@ async def test_unique_id( async def test_discovery_removal_cover( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered cover.""" data = '{ "name": "test", "command_topic": "test_topic" }' - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, cover.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, cover.DOMAIN, data) async def test_discovery_update_cover( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered cover.""" config1 = {"name": "Beer", "command_topic": "test_topic"} config2 = {"name": "Milk", "command_topic": "test_topic"} await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, cover.DOMAIN, config1, config2 + hass, mqtt_mock_entry, cover.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_cover( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered cover.""" data1 = '{ "name": "Beer", "command_topic": "test_topic" }' @@ -2647,27 +2623,18 @@ async def test_discovery_update_unchanged_cover( "homeassistant.components.mqtt.cover.MqttCover.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - cover.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, cover.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer", "command_topic": "test_topic#" }' data2 = '{ "name": "Milk", "command_topic": "test_topic" }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, cover.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, cover.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -3242,10 +3209,9 @@ async def test_position_via_position_topic_template_return_invalid_json( } ], ) +@pytest.mark.usefixtures("hass") async def test_set_position_topic_without_get_position_topic_error( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test error when set_position_topic is used without position_topic.""" assert await mqtt_mock_entry() @@ -3268,8 +3234,8 @@ async def test_set_position_topic_without_get_position_topic_error( } ], ) +@pytest.mark.usefixtures("hass") async def test_value_template_without_state_topic_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: @@ -3294,8 +3260,8 @@ async def test_value_template_without_state_topic_error( } ], ) +@pytest.mark.usefixtures("hass") async def test_position_template_without_position_topic_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: @@ -3321,10 +3287,9 @@ async def test_position_template_without_position_topic_error( } ], ) +@pytest.mark.usefixtures("hass") async def test_set_position_template_without_set_position_topic( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test error when set_position_template is used and set_position_topic is missing.""" assert await mqtt_mock_entry() @@ -3348,10 +3313,9 @@ async def test_set_position_template_without_set_position_topic( } ], ) +@pytest.mark.usefixtures("hass") async def test_tilt_command_template_without_tilt_command_topic( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test error when tilt_command_template is used and tilt_command_topic is missing.""" assert await mqtt_mock_entry() @@ -3375,10 +3339,9 @@ async def test_tilt_command_template_without_tilt_command_topic( } ], ) +@pytest.mark.usefixtures("hass") async def test_tilt_status_template_without_tilt_status_topic_topic( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test error when tilt_status_template is used and tilt_status_topic is missing.""" assert await mqtt_mock_entry() @@ -3444,8 +3407,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = cover.DOMAIN @@ -3499,8 +3461,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = cover.DOMAIN diff --git a/tests/components/mqtt/test_device_tracker.py b/tests/components/mqtt/test_device_tracker.py index 254885919b0..00e88860299 100644 --- a/tests/components/mqtt/test_device_tracker.py +++ b/tests/components/mqtt/test_device_tracker.py @@ -240,6 +240,8 @@ async def test_device_tracker_discovery_update( # Entity was not updated as the state was not changed assert state.last_updated == datetime(2023, 8, 22, 19, 16, tzinfo=UTC) + await hass.async_block_till_done(wait_background_tasks=True) + async def test_cleanup_device_tracker( hass: HomeAssistant, @@ -332,9 +334,7 @@ async def test_setting_device_tracker_value_via_mqtt_message( async def test_setting_device_tracker_value_via_mqtt_message_and_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the setting of the value via MQTT.""" await mqtt_mock_entry() @@ -359,9 +359,7 @@ async def test_setting_device_tracker_value_via_mqtt_message_and_template( async def test_setting_device_tracker_value_via_mqtt_message_and_template2( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the setting of the value via MQTT.""" await mqtt_mock_entry() @@ -389,9 +387,7 @@ async def test_setting_device_tracker_value_via_mqtt_message_and_template2( async def test_setting_device_tracker_location_via_mqtt_message( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the setting of the location via MQTT.""" await mqtt_mock_entry() @@ -413,9 +409,7 @@ async def test_setting_device_tracker_location_via_mqtt_message( async def test_setting_device_tracker_location_via_lat_lon_message( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the setting of the latitude and longitude via MQTT without state topic.""" await mqtt_mock_entry() @@ -470,9 +464,7 @@ async def test_setting_device_tracker_location_via_lat_lon_message( async def test_setting_device_tracker_location_via_reset_message( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the automatic inference of zones via MQTT via reset.""" await mqtt_mock_entry() @@ -546,9 +538,7 @@ async def test_setting_device_tracker_location_via_reset_message( async def test_setting_device_tracker_location_via_abbr_reset_message( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the setting of reset via abbreviated names and custom payloads via MQTT.""" await mqtt_mock_entry() @@ -594,11 +584,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, - mqtt_mock_entry, - device_tracker.DOMAIN, - DEFAULT_CONFIG, - None, + hass, mqtt_mock_entry, device_tracker.DOMAIN, DEFAULT_CONFIG, None ) @@ -623,8 +609,7 @@ async def test_setup_with_modern_schema( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = device_tracker.DOMAIN diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 9e75ea5168b..10322dd9046 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -1,6 +1,7 @@ """The tests for MQTT device triggers.""" import json +from typing import Any import pytest from pytest_unordered import unordered @@ -16,11 +17,7 @@ from homeassistant.setup import async_setup_component from .test_common import help_test_unload_config_entry -from tests.common import ( - async_fire_mqtt_message, - async_get_device_automations, - async_mock_service, -) +from tests.common import async_fire_mqtt_message, async_get_device_automations from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, WebSocketGenerator @@ -29,12 +26,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -194,7 +185,6 @@ async def test_update_remove_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, ) -> None: """Test triggers can be updated and removed.""" await mqtt_mock_entry() @@ -284,7 +274,7 @@ async def test_update_remove_triggers( async def test_if_fires_on_mqtt_message( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers firing.""" @@ -350,20 +340,20 @@ async def test_if_fires_on_mqtt_message( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message(hass, "foobar/triggers/button2", "long_press") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "long_press" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "long_press" async def test_if_discovery_id_is_prefered( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test if discovery is preferred over referencing by type/subtype. @@ -437,21 +427,21 @@ async def test_if_discovery_id_is_prefered( # Fake short press, matching on type and subtype async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press, matching on discovery_id - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/button1", "long_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "long_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "long_press" async def test_non_unique_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -528,20 +518,20 @@ async def test_non_unique_triggers( # and triggers both attached instances. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 2 - all_calls = {calls[0].data["some"], calls[1].data["some"]} + assert len(service_calls) == 2 + all_calls = {service_calls[0].data["some"], service_calls[1].data["some"]} assert all_calls == {"press1", "press2"} # Trigger second config references to same trigger # and triggers both attached instances. async_fire_mqtt_message(hass, "foobar/triggers/button2", "long_press") await hass.async_block_till_done() - assert len(calls) == 2 - all_calls = {calls[0].data["some"], calls[1].data["some"]} + assert len(service_calls) == 2 + all_calls = {service_calls[0].data["some"], service_calls[1].data["some"]} assert all_calls == {"press1", "press2"} # Removing the first trigger will clean up - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", "") await hass.async_block_till_done() await hass.async_block_till_done() @@ -549,13 +539,13 @@ async def test_non_unique_triggers( "Device trigger ('device_automation', 'bla1') has been removed" in caplog.text ) async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_mqtt_message_template( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers firing with a message template and a shared topic.""" @@ -623,20 +613,20 @@ async def test_if_fires_on_mqtt_message_template( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button4", '{"button":"short_press"}') await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message(hass, "foobar/triggers/button4", '{"button":"long_press"}') await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "long_press" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "long_press" async def test_if_fires_on_mqtt_message_late_discover( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers firing of MQTT device triggers discovered after setup.""" @@ -710,20 +700,20 @@ async def test_if_fires_on_mqtt_message_late_discover( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message(hass, "foobar/triggers/button2", "long_press") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "long_press" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "long_press" async def test_if_fires_on_mqtt_message_after_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -782,7 +772,7 @@ async def test_if_fires_on_mqtt_message_after_update( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Update the trigger with existing type/subtype change async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", data1) @@ -793,29 +783,29 @@ async def test_if_fires_on_mqtt_message_after_update( async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data3) await hass.async_block_till_done() - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/button1", "") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/buttonOne", "") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Update the trigger with same topic async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data3) await hass.async_block_till_done() - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/button1", "") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/buttonOne", "") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_no_resubscribe_same_topic( @@ -868,7 +858,7 @@ async def test_no_resubscribe_same_topic( async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers not firing after removal.""" @@ -911,7 +901,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Remove the trigger async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", "") @@ -919,7 +909,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Rediscover the trigger async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data1) @@ -927,14 +917,14 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers not firing after removal.""" @@ -982,7 +972,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Remove MQTT from the device mqtt_config_entry = hass.config_entries.async_entries(DOMAIN)[0] @@ -994,7 +984,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attach_remove( @@ -1016,10 +1006,10 @@ async def test_attach_remove( await hass.async_block_till_done() device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - calls = [] + callback_calls: list[dict[str, Any]] = [] - def callback(trigger): - calls.append(trigger["trigger"]["payload"]) + def trigger_callback(trigger): + callback_calls.append(trigger["trigger"]["payload"]) remove = await async_initialize_triggers( hass, @@ -1033,7 +1023,7 @@ async def test_attach_remove( "subtype": "button_1", }, ], - callback, + trigger_callback, DOMAIN, "mock-name", _LOGGER.log, @@ -1042,8 +1032,8 @@ async def test_attach_remove( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0] == "short_press" + assert len(callback_calls) == 1 + assert callback_calls[0] == "short_press" # Remove the trigger remove() @@ -1052,7 +1042,7 @@ async def test_attach_remove( # Verify the triggers are no longer active async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(callback_calls) == 1 async def test_attach_remove_late( @@ -1079,10 +1069,10 @@ async def test_attach_remove_late( await hass.async_block_till_done() device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - calls = [] + callback_calls: list[dict[str, Any]] = [] - def callback(trigger): - calls.append(trigger["trigger"]["payload"]) + def trigger_callback(trigger): + callback_calls.append(trigger["trigger"]["payload"]) remove = await async_initialize_triggers( hass, @@ -1096,7 +1086,7 @@ async def test_attach_remove_late( "subtype": "button_1", }, ], - callback, + trigger_callback, DOMAIN, "mock-name", _LOGGER.log, @@ -1108,8 +1098,8 @@ async def test_attach_remove_late( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0] == "short_press" + assert len(callback_calls) == 1 + assert callback_calls[0] == "short_press" # Remove the trigger remove() @@ -1118,7 +1108,7 @@ async def test_attach_remove_late( # Verify the triggers are no longer active async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(callback_calls) == 1 async def test_attach_remove_late2( @@ -1145,10 +1135,10 @@ async def test_attach_remove_late2( await hass.async_block_till_done() device_entry = device_registry.async_get_device(identifiers={("mqtt", "0AFFD2")}) - calls = [] + callback_calls: list[dict[str, Any]] = [] - def callback(trigger): - calls.append(trigger["trigger"]["payload"]) + def trigger_callback(trigger): + callback_calls.append(trigger["trigger"]["payload"]) remove = await async_initialize_triggers( hass, @@ -1162,7 +1152,7 @@ async def test_attach_remove_late2( "subtype": "button_1", }, ], - callback, + trigger_callback, DOMAIN, "mock-name", _LOGGER.log, @@ -1178,7 +1168,7 @@ async def test_attach_remove_late2( # Verify the triggers are no longer active async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(callback_calls) == 0 # Try to remove the trigger twice with pytest.raises(HomeAssistantError): @@ -1684,7 +1674,7 @@ async def test_trigger_debug_info( async def test_unload_entry( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, ) -> None: @@ -1727,7 +1717,7 @@ async def test_unload_entry( # Fake short press 1 async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await help_test_unload_config_entry(hass) @@ -1736,7 +1726,7 @@ async def test_unload_entry( await hass.async_block_till_done() async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Start entry again mqtt_entry = hass.config_entries.async_entries("mqtt")[0] @@ -1747,4 +1737,4 @@ async def test_unload_entry( await hass.async_block_till_done() async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 diff --git a/tests/components/mqtt/test_diagnostics.py b/tests/components/mqtt/test_diagnostics.py index f8b547ae1eb..b8499ba5812 100644 --- a/tests/components/mqtt/test_diagnostics.py +++ b/tests/components/mqtt/test_diagnostics.py @@ -26,7 +26,6 @@ default_config = { async def test_entry_diagnostics( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, hass_client: ClientSessionGenerator, mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 911d205269c..58de3c53c52 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -22,7 +22,9 @@ from homeassistant.components.mqtt.discovery import ( MQTTDiscoveryPayload, async_start, ) +from homeassistant.components.mqtt.models import ReceiveMessage from homeassistant.const import ( + EVENT_HOMEASSISTANT_STARTED, EVENT_STATE_CHANGED, STATE_ON, STATE_UNAVAILABLE, @@ -40,6 +42,7 @@ from homeassistant.helpers.service_info.mqtt import MqttServiceInfo from homeassistant.setup import async_setup_component from homeassistant.util.signal_type import SignalTypeFormat +from .conftest import ENTRY_DEFAULT_BIRTH_MESSAGE from .test_common import help_all_subscribe_calls, help_test_unload_config_entry from tests.common import ( @@ -61,8 +64,7 @@ from tests.typing import ( [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_DISCOVERY: False}], ) async def test_subscribing_config_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting up discovery.""" mqtt_mock = await mqtt_mock_entry() @@ -202,8 +204,7 @@ async def test_only_valid_components( async def test_correct_config_discovery( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test sending in correct JSON.""" await mqtt_mock_entry() @@ -282,9 +283,7 @@ async def test_discovery_with_invalid_integration_info( """Test sending in correct JSON.""" await mqtt_mock_entry() async_fire_mqtt_message( - hass, - "homeassistant/binary_sensor/bla/config", - config_message, + hass, "homeassistant/binary_sensor/bla/config", config_message ) await hass.async_block_till_done() @@ -295,8 +294,7 @@ async def test_discovery_with_invalid_integration_info( async def test_discover_fan( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test discovering an MQTT fan.""" await mqtt_mock_entry() @@ -315,9 +313,7 @@ async def test_discover_fan( async def test_discover_climate( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test discovering an MQTT climate component.""" await mqtt_mock_entry() @@ -338,8 +334,7 @@ async def test_discover_climate( async def test_discover_alarm_control_panel( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test discovering an MQTT alarm control panel component.""" await mqtt_mock_entry() @@ -528,8 +523,7 @@ async def test_discovery_with_object_id( async def test_discovery_incl_nodeid( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test sending in correct JSON with optional node_id included.""" await mqtt_mock_entry() @@ -578,8 +572,7 @@ async def test_non_duplicate_discovery( async def test_removal( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of component through empty discovery message.""" await mqtt_mock_entry() @@ -599,8 +592,7 @@ async def test_removal( async def test_rediscover( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test rediscover of removed component.""" await mqtt_mock_entry() @@ -629,8 +621,7 @@ async def test_rediscover( async def test_rapid_rediscover( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test immediate rediscover of removed component.""" await mqtt_mock_entry() @@ -681,8 +672,7 @@ async def test_rapid_rediscover( async def test_rapid_rediscover_unique( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test immediate rediscover of removed component.""" await mqtt_mock_entry() @@ -743,8 +733,7 @@ async def test_rapid_rediscover_unique( async def test_rapid_reconfigure( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test immediate reconfigure of added component.""" await mqtt_mock_entry() @@ -820,7 +809,7 @@ async def test_duplicate_removal( assert "Component has already been discovered: binary_sensor bla" not in caplog.text -async def test_cleanup_device( +async def test_cleanup_device_manual( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, @@ -976,10 +965,10 @@ async def test_cleanup_device_multiple_config_entries( connections={("mac", "12:34:56:AB:CD:EF")} ) assert device_entry is not None - assert device_entry.config_entries == [ - config_entry.entry_id, + assert device_entry.config_entries == { mqtt_config_entry.entry_id, - ] + config_entry.entry_id, + } entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") assert entity_entry is not None @@ -1002,7 +991,7 @@ async def test_cleanup_device_multiple_config_entries( ) assert device_entry is not None entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") - assert device_entry.config_entries == [config_entry.entry_id] + assert device_entry.config_entries == {config_entry.entry_id} assert entity_entry is None # Verify state is removed @@ -1023,6 +1012,7 @@ async def test_cleanup_device_multiple_config_entries( async def test_cleanup_device_multiple_config_entries_mqtt( hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mqtt_mock_entry: MqttMockHAClientGenerator, @@ -1070,10 +1060,10 @@ async def test_cleanup_device_multiple_config_entries_mqtt( connections={("mac", "12:34:56:AB:CD:EF")} ) assert device_entry is not None - assert device_entry.config_entries == [ - config_entry.entry_id, + assert device_entry.config_entries == { mqtt_config_entry.entry_id, - ] + config_entry.entry_id, + } entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") assert entity_entry is not None @@ -1094,7 +1084,7 @@ async def test_cleanup_device_multiple_config_entries_mqtt( ) assert device_entry is not None entity_entry = entity_registry.async_get("sensor.none_mqtt_sensor") - assert device_entry.config_entries == [config_entry.entry_id] + assert device_entry.config_entries == {config_entry.entry_id} assert entity_entry is None # Verify state is removed @@ -1104,11 +1094,11 @@ async def test_cleanup_device_multiple_config_entries_mqtt( # Verify retained discovery topics have not been cleared again mqtt_mock.async_publish.assert_not_called() + assert "KeyError:" not in caplog.text async def test_discovery_expansion( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test expansion of abbreviated discovery payload.""" await mqtt_mock_entry() @@ -1169,8 +1159,7 @@ async def test_discovery_expansion( async def test_discovery_expansion_2( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test expansion of abbreviated discovery payload.""" await mqtt_mock_entry() @@ -1246,8 +1235,7 @@ async def test_discovery_expansion_3( async def test_discovery_expansion_without_encoding_and_value_template_1( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test expansion of raw availability payload with a template as list.""" await mqtt_mock_entry() @@ -1297,8 +1285,7 @@ async def test_discovery_expansion_without_encoding_and_value_template_1( async def test_discovery_expansion_without_encoding_and_value_template_2( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test expansion of raw availability payload with a template directly.""" await mqtt_mock_entry() @@ -1381,27 +1368,30 @@ async def test_missing_discover_abbreviations( ) -> None: """Check MQTT platforms for missing abbreviations.""" await mqtt_mock_entry() - missing = [] + missing: list[str] = [] regex = re.compile(r"(CONF_[a-zA-Z\d_]*) *= *[\'\"]([a-zA-Z\d_]*)[\'\"]") - for fil in Path(mqtt.__file__).parent.rglob("*.py"): - if fil.name in EXCLUDED_MODULES: - continue - with open(fil, encoding="utf-8") as file: - matches = re.findall(regex, file.read()) - missing.extend( - f"{fil}: no abbreviation for {match[1]} ({match[0]})" - for match in matches - if match[1] not in ABBREVIATIONS.values() - and match[1] not in DEVICE_ABBREVIATIONS.values() - and match[0] not in ABBREVIATIONS_WHITE_LIST - ) + + def _add_missing(): + for fil in Path(mqtt.__file__).parent.rglob("*.py"): + if fil.name in EXCLUDED_MODULES: + continue + with open(fil, encoding="utf-8") as file: + matches = re.findall(regex, file.read()) + missing.extend( + f"{fil}: no abbreviation for {match[1]} ({match[0]})" + for match in matches + if match[1] not in ABBREVIATIONS.values() + and match[1] not in DEVICE_ABBREVIATIONS.values() + and match[0] not in ABBREVIATIONS_WHITE_LIST + ) + + await hass.async_add_executor_job(_add_missing) assert not missing async def test_no_implicit_state_topic_switch( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test no implicit state topic for switch.""" await mqtt_mock_entry() @@ -1454,32 +1444,14 @@ async def test_complex_discovery_topic_prefix( ].discovery_already_discovered +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.0) async def test_mqtt_integration_discovery_subscribe_unsubscribe( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Check MQTT integration discovery subscribe and unsubscribe.""" - mqtt_mock = await mqtt_mock_entry() - mock_platform(hass, "comp.config_flow", None) - - entry = hass.config_entries.async_entries("mqtt")[0] - mqtt_mock().connected = True - - with patch( - "homeassistant.components.mqtt.discovery.async_get_mqtt", - return_value={"comp": ["comp/discovery/#"]}, - ): - await async_start(hass, "homeassistant", entry) - await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) - assert not mqtt_client_mock.unsubscribe.called class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -1488,65 +1460,108 @@ async def test_mqtt_integration_discovery_subscribe_unsubscribe( """Test mqtt step.""" return self.async_abort(reason="already_configured") - assert not mqtt_client_mock.unsubscribe.called + mock_platform(hass, "comp.config_flow", None) + + birth = asyncio.Event() + + @callback + def wait_birth(msg: ReceiveMessage) -> None: + """Handle birth message.""" + birth.set() wait_unsub = asyncio.Event() + @callback def _mock_unsubscribe(topics: list[str]) -> tuple[int, int]: wait_unsub.set() return (0, 0) + entry = MockConfigEntry(domain=mqtt.DOMAIN, data=ENTRY_DEFAULT_BIRTH_MESSAGE) + entry.add_to_hass(hass) with ( + patch( + "homeassistant.components.mqtt.discovery.async_get_mqtt", + return_value={"comp": ["comp/discovery/#"]}, + ), mock_config_flow("comp", TestFlow), patch.object(mqtt_client_mock, "unsubscribe", side_effect=_mock_unsubscribe), ): + assert await hass.config_entries.async_setup(entry.entry_id) + await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await birth.wait() + + assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) + assert not mqtt_client_mock.unsubscribe.called + mqtt_client_mock.reset_mock() + + await hass.async_block_till_done(wait_background_tasks=True) async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") await wait_unsub.wait() mqtt_client_mock.unsubscribe.assert_called_once_with(["comp/discovery/#"]) + await hass.async_block_till_done(wait_background_tasks=True) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) @patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.0) async def test_mqtt_discovery_unsubscribe_once( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Check MQTT integration discovery unsubscribe once.""" - mqtt_mock = await mqtt_mock_entry() - mock_platform(hass, "comp.config_flow", None) - - entry = hass.config_entries.async_entries("mqtt")[0] - mqtt_mock().connected = True - - with patch( - "homeassistant.components.mqtt.discovery.async_get_mqtt", - return_value={"comp": ["comp/discovery/#"]}, - ): - await async_start(hass, "homeassistant", entry) - await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) - assert not mqtt_client_mock.unsubscribe.called class TestFlow(config_entries.ConfigFlow): """Test flow.""" async def async_step_mqtt(self, discovery_info: MqttServiceInfo) -> FlowResult: """Test mqtt step.""" - await asyncio.sleep(0.1) + await asyncio.sleep(0) return self.async_abort(reason="already_configured") - with mock_config_flow("comp", TestFlow): + mock_platform(hass, "comp.config_flow", None) + + birth = asyncio.Event() + + @callback + def wait_birth(msg: ReceiveMessage) -> None: + """Handle birth message.""" + birth.set() + + wait_unsub = asyncio.Event() + + @callback + def _mock_unsubscribe(topics: list[str]) -> tuple[int, int]: + wait_unsub.set() + return (0, 0) + + entry = MockConfigEntry(domain=mqtt.DOMAIN, data=ENTRY_DEFAULT_BIRTH_MESSAGE) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.mqtt.discovery.async_get_mqtt", + return_value={"comp": ["comp/discovery/#"]}, + ), + mock_config_flow("comp", TestFlow), + patch.object(mqtt_client_mock, "unsubscribe", side_effect=_mock_unsubscribe), + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await birth.wait() + + assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) + assert not mqtt_client_mock.unsubscribe.called + + await hass.async_block_till_done(wait_background_tasks=True) async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") - await asyncio.sleep(0.1) - await hass.async_block_till_done() - await hass.async_block_till_done() + await wait_unsub.wait() + await asyncio.sleep(0) + await hass.async_block_till_done(wait_background_tasks=True) mqtt_client_mock.unsubscribe.assert_called_once_with(["comp/discovery/#"]) + await hass.async_block_till_done(wait_background_tasks=True) async def test_clear_config_topic_disabled_entity( @@ -1627,7 +1642,6 @@ async def test_clean_up_registry_monitoring( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, device_registry: dr.DeviceRegistry, - tmp_path: Path, ) -> None: """Test registry monitoring hook is removed after a reload.""" await mqtt_mock_entry() diff --git a/tests/components/mqtt/test_event.py b/tests/components/mqtt/test_event.py index 64a2003606c..3d4847a406a 100644 --- a/tests/components/mqtt/test_event.py +++ b/tests/components/mqtt/test_event.py @@ -222,6 +222,8 @@ async def test_setting_event_value_via_mqtt_json_message_and_default_current_sta assert state.attributes.get("val") == "valcontent" assert state.attributes.get("par") == "parcontent" + await hass.async_block_till_done(wait_background_tasks=True) + @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_availability_when_connection_lost( @@ -323,10 +325,9 @@ async def test_discovery_update_availability( } ], ) +@pytest.mark.usefixtures("hass") async def test_invalid_device_class( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test device_class option with invalid value.""" assert await mqtt_mock_entry() @@ -371,11 +372,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - event.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, event.DOMAIN, DEFAULT_CONFIG ) @@ -386,26 +383,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - event.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, event.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - event.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, event.DOMAIN, DEFAULT_CONFIG ) @@ -440,19 +427,15 @@ async def test_unique_id( async def test_discovery_removal_event( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered event.""" data = '{ "name": "test", "state_topic": "test_topic", "event_types": ["press"] }' - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, event.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, event.DOMAIN, data) async def test_discovery_update_event_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered mqtt event template.""" await mqtt_mock_entry() @@ -489,16 +472,12 @@ async def test_discovery_update_event_template( @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer", "state_topic": "test_topic#", "event_types": ["press"] }' data2 = '{ "name": "Milk", "state_topic": "test_topic", "event_types": ["press"] }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, event.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, event.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -675,8 +654,7 @@ async def test_value_template_with_entity_id( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = event.DOMAIN @@ -699,8 +677,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = event.DOMAIN diff --git a/tests/components/mqtt/test_fan.py b/tests/components/mqtt/test_fan.py index 0dbfa3037b2..1d0cc809fd6 100644 --- a/tests/components/mqtt/test_fan.py +++ b/tests/components/mqtt/test_fan.py @@ -83,10 +83,9 @@ DEFAULT_CONFIG = { @pytest.mark.parametrize("hass_config", [{mqtt.DOMAIN: {fan.DOMAIN: {"name": "test"}}}]) +@pytest.mark.usefixtures("hass") async def test_fail_setup_if_no_command_topic( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test if command fails with command topic.""" assert await mqtt_mock_entry() @@ -611,8 +610,7 @@ async def test_controlling_state_via_topic_and_json_message_shared_topic( ], ) async def test_sending_mqtt_commands_and_optimistic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test optimistic mode without state topic.""" mqtt_mock = await mqtt_mock_entry() @@ -861,9 +859,7 @@ async def test_sending_mqtt_commands_with_alternate_speed_range( ], ) async def test_sending_mqtt_commands_and_optimistic_no_legacy( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test optimistic mode without state topic without legacy speed command topic.""" mqtt_mock = await mqtt_mock_entry() @@ -1005,8 +1001,7 @@ async def test_sending_mqtt_commands_and_optimistic_no_legacy( ], ) async def test_sending_mqtt_command_templates_( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test optimistic mode without state topic without legacy speed command topic.""" mqtt_mock = await mqtt_mock_entry() @@ -1166,8 +1161,7 @@ async def test_sending_mqtt_command_templates_( ], ) async def test_sending_mqtt_commands_and_optimistic_no_percentage_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test optimistic mode without state topic without percentage command topic.""" mqtt_mock = await mqtt_mock_entry() @@ -1237,8 +1231,7 @@ async def test_sending_mqtt_commands_and_optimistic_no_percentage_topic( ], ) async def test_sending_mqtt_commands_and_explicit_optimistic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test optimistic mode with state topic and turn on attributes.""" mqtt_mock = await mqtt_mock_entry() @@ -1533,9 +1526,7 @@ async def test_encoding_subscribable_topics( ], ) async def test_attributes( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test attributes.""" await mqtt_mock_entry() @@ -1599,7 +1590,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature(0), + fan.FanEntityFeature.TURN_OFF | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1614,7 +1605,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.OSCILLATE, + fan.FanEntityFeature.OSCILLATE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1629,7 +1622,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.SET_SPEED, + fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1660,7 +1655,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1676,7 +1673,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1691,7 +1690,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.SET_SPEED, + fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1707,7 +1708,10 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.OSCILLATE | fan.FanEntityFeature.SET_SPEED, + fan.FanEntityFeature.OSCILLATE + | fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1723,7 +1727,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1739,7 +1745,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1756,7 +1764,10 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE | fan.FanEntityFeature.OSCILLATE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.OSCILLATE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1773,7 +1784,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.SET_SPEED, + fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1840,7 +1853,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, "some error", ), ( @@ -1855,7 +1870,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.DIRECTION, + fan.FanEntityFeature.DIRECTION + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, "some error", ), ], @@ -1941,11 +1958,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, - mqtt_mock_entry, - fan.DOMAIN, - DEFAULT_CONFIG, - MQTT_FAN_ATTRIBUTES_BLOCKED, + hass, mqtt_mock_entry, fan.DOMAIN, DEFAULT_CONFIG, MQTT_FAN_ATTRIBUTES_BLOCKED ) @@ -1965,11 +1978,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - fan.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, fan.DOMAIN, DEFAULT_CONFIG ) @@ -1980,22 +1989,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - fan.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, fan.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, mqtt_mock_entry, caplog, fan.DOMAIN, DEFAULT_CONFIG + hass, mqtt_mock_entry, fan.DOMAIN, DEFAULT_CONFIG ) @@ -2030,32 +2033,26 @@ async def test_unique_id( async def test_discovery_removal_fan( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered fan.""" data = '{ "name": "test", "command_topic": "test_topic" }' - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, fan.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, fan.DOMAIN, data) async def test_discovery_update_fan( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered fan.""" config1 = {"name": "Beer", "command_topic": "test_topic"} config2 = {"name": "Milk", "command_topic": "test_topic"} await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, fan.DOMAIN, config1, config2 + hass, mqtt_mock_entry, fan.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_fan( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered fan.""" data1 = '{ "name": "Beer", "command_topic": "test_topic" }' @@ -2063,28 +2060,19 @@ async def test_discovery_update_unchanged_fan( "homeassistant.components.mqtt.fan.MqttFan.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - fan.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, fan.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "command_topic": "test_topic" }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, fan.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, fan.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -2232,8 +2220,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = fan.DOMAIN @@ -2256,8 +2243,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = fan.DOMAIN diff --git a/tests/components/mqtt/test_humidifier.py b/tests/components/mqtt/test_humidifier.py index 4e8918d330e..62de371af4b 100644 --- a/tests/components/mqtt/test_humidifier.py +++ b/tests/components/mqtt/test_humidifier.py @@ -83,17 +83,16 @@ DEFAULT_CONFIG = { } -async def async_turn_on( - hass: HomeAssistant, - entity_id=ENTITY_MATCH_ALL, -) -> None: +async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified humidifier on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) -async def async_turn_off(hass: HomeAssistant, entity_id=ENTITY_MATCH_ALL) -> None: +async def async_turn_off( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified humidier off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} @@ -101,7 +100,7 @@ async def async_turn_off(hass: HomeAssistant, entity_id=ENTITY_MATCH_ALL) -> Non async def async_set_mode( - hass: HomeAssistant, entity_id=ENTITY_MATCH_ALL, mode: str | None = None + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, mode: str | None = None ) -> None: """Set mode for all or specified humidifier.""" data = { @@ -114,7 +113,7 @@ async def async_set_mode( async def async_set_humidity( - hass: HomeAssistant, entity_id=ENTITY_MATCH_ALL, humidity: int | None = None + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, humidity: int | None = None ) -> None: """Set target humidity for all or specified humidifier.""" data = { @@ -129,10 +128,9 @@ async def async_set_humidity( @pytest.mark.parametrize( "hass_config", [{mqtt.DOMAIN: {humidifier.DOMAIN: {"name": "test"}}}] ) +@pytest.mark.usefixtures("hass") async def test_fail_setup_if_no_command_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test if command fails with command topic.""" assert await mqtt_mock_entry() @@ -892,9 +890,7 @@ async def test_encoding_subscribable_topics( ], ) async def test_attributes( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test attributes.""" await mqtt_mock_entry() @@ -1048,9 +1044,7 @@ async def test_attributes( ], ) async def test_validity_configurations( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - valid: bool, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, valid: bool ) -> None: """Test validity of configurations.""" await mqtt_mock_entry() @@ -1252,11 +1246,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - humidifier.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, humidifier.DOMAIN, DEFAULT_CONFIG ) @@ -1267,26 +1257,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - humidifier.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, humidifier.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - humidifier.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, humidifier.DOMAIN, DEFAULT_CONFIG ) @@ -1323,21 +1303,15 @@ async def test_unique_id( async def test_discovery_removal_humidifier( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered humidifier.""" data = '{ "name": "test", "command_topic": "test_topic", "target_humidity_command_topic": "test-topic2" }' - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, humidifier.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, humidifier.DOMAIN, data) async def test_discovery_update_humidifier( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered humidifier.""" config1 = { @@ -1351,19 +1325,12 @@ async def test_discovery_update_humidifier( "target_humidity_command_topic": "test-topic2", } await help_test_discovery_update( - hass, - mqtt_mock_entry, - caplog, - humidifier.DOMAIN, - config1, - config2, + hass, mqtt_mock_entry, humidifier.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_humidifier( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered humidifier.""" data1 = '{ "name": "Beer", "command_topic": "test_topic", "target_humidity_command_topic": "test-topic2" }' @@ -1371,26 +1338,19 @@ async def test_discovery_update_unchanged_humidifier( "homeassistant.components.mqtt.fan.MqttFan.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - humidifier.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, humidifier.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "command_topic": "test_topic", "target_humidity_command_topic": "test-topic2" }' await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, humidifier.DOMAIN, data1, data2 + hass, mqtt_mock_entry, humidifier.DOMAIN, data1, data2 ) @@ -1525,8 +1485,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = humidifier.DOMAIN @@ -1549,8 +1508,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_config_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = humidifier.DOMAIN diff --git a/tests/components/mqtt/test_image.py b/tests/components/mqtt/test_image.py index 79e6cf1d281..6f0eb8edf49 100644 --- a/tests/components/mqtt/test_image.py +++ b/tests/components/mqtt/test_image.py @@ -356,7 +356,6 @@ async def test_image_from_url_content_type( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, content_type: str, setup_ok: bool, ) -> None: @@ -425,7 +424,6 @@ async def test_image_from_url_fails( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, side_effect: Exception, ) -> None: """Test setup with minimum configuration.""" @@ -501,9 +499,8 @@ async def test_image_from_url_fails( ), ], ) +@pytest.mark.usefixtures("hass", "hass_client_no_auth") async def test_image_config_fails( - hass: HomeAssistant, - hass_client_no_auth: ClientSessionGenerator, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, error_msg: str, @@ -576,11 +573,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - image.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, image.DOMAIN, DEFAULT_CONFIG ) @@ -591,26 +584,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - image.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, image.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - image.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, image.DOMAIN, DEFAULT_CONFIG ) @@ -643,33 +626,27 @@ async def test_unique_id( async def test_discovery_removal_image( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered image.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][image.DOMAIN]) - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, image.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, image.DOMAIN, data) async def test_discovery_update_image( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered image.""" config1 = {"name": "Beer", "image_topic": "test_topic"} config2 = {"name": "Milk", "image_topic": "test_topic"} await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, image.DOMAIN, config1, config2 + hass, mqtt_mock_entry, image.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_image( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered image.""" data1 = '{ "name": "Beer", "image_topic": "test_topic"}' @@ -677,28 +654,19 @@ async def test_discovery_update_unchanged_image( "homeassistant.components.mqtt.image.MqttImage.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - image.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, image.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "image_topic": "test_topic"}' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, image.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, image.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -742,11 +710,7 @@ async def test_entity_id_update_subscriptions( ) -> None: """Test MQTT subscriptions are managed when entity_id is updated.""" await help_test_entity_id_update_subscriptions( - hass, - mqtt_mock_entry, - image.DOMAIN, - DEFAULT_CONFIG, - ["test_topic"], + hass, mqtt_mock_entry, image.DOMAIN, DEFAULT_CONFIG, ["test_topic"] ) @@ -775,8 +739,7 @@ async def test_entity_debug_info_message( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = image.DOMAIN @@ -795,8 +758,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = image.DOMAIN diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index cd710ba610e..333960d8ad4 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -1,31 +1,20 @@ -"""The tests for the MQTT component.""" +"""The tests for the MQTT component setup and helpers.""" import asyncio from copy import deepcopy from datetime import datetime, timedelta from functools import partial import json -import logging -import socket -import ssl import time from typing import Any, TypedDict -from unittest.mock import ANY, MagicMock, Mock, call, mock_open, patch +from unittest.mock import ANY, MagicMock, Mock, mock_open, patch -import certifi from freezegun.api import FrozenDateTimeFactory -import paho.mqtt.client as paho_mqtt import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant.components import mqtt from homeassistant.components.mqtt import debug_info -from homeassistant.components.mqtt.client import ( - _LOGGER as CLIENT_LOGGER, - RECONNECT_INTERVAL_SECONDS, - EnsureJobAfterCooldown, -) from homeassistant.components.mqtt.models import ( MessageCallbackType, MqttCommandTemplateException, @@ -37,16 +26,12 @@ from homeassistant.components.sensor import SensorDeviceClass from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState from homeassistant.const import ( ATTR_ASSUMED_STATE, - CONF_PROTOCOL, - EVENT_HOMEASSISTANT_STARTED, - EVENT_HOMEASSISTANT_STOP, SERVICE_RELOAD, STATE_UNAVAILABLE, STATE_UNKNOWN, - UnitOfTemperature, ) import homeassistant.core as ha -from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr, entity_registry as er, template from homeassistant.helpers.entity import Entity @@ -56,8 +41,6 @@ from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.dt import utcnow -from .test_common import help_all_subscribe_calls - from tests.common import ( MockConfigEntry, MockEntity, @@ -68,7 +51,6 @@ from tests.common import ( ) from tests.components.sensor.common import MockSensor from tests.typing import ( - MqttMockHAClient, MqttMockHAClientGenerator, MqttMockPahoClient, WebSocketGenerator, @@ -100,237 +82,6 @@ def mock_storage(hass_storage: dict[str, Any]) -> None: """Autouse hass_storage for the TestCase tests.""" -@pytest.fixture -def recorded_calls() -> list[ReceiveMessage]: - """Fixture to hold recorded calls.""" - return [] - - -@pytest.fixture -def record_calls(recorded_calls: list[ReceiveMessage]) -> MessageCallbackType: - """Fixture to record calls.""" - - @callback - def record_calls(msg: ReceiveMessage) -> None: - """Record calls.""" - recorded_calls.append(msg) - - return record_calls - - -@pytest.fixture -def client_debug_log() -> Generator[None]: - """Set the mqtt client log level to DEBUG.""" - logger = logging.getLogger("mqtt_client_tests_debug") - logger.setLevel(logging.DEBUG) - with patch.object(CLIENT_LOGGER, "parent", logger): - yield - - -def help_assert_message( - msg: ReceiveMessage, - topic: str | None = None, - payload: str | None = None, - qos: int | None = None, - retain: bool | None = None, -) -> bool: - """Return True if all of the given attributes match with the message.""" - match: bool = True - if topic is not None: - match &= msg.topic == topic - if payload is not None: - match &= msg.payload == payload - if qos is not None: - match &= msg.qos == qos - if retain is not None: - match &= msg.retain == retain - return match - - -async def test_mqtt_connects_on_home_assistant_mqtt_setup( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test if client is connected after mqtt init on bootstrap.""" - await mqtt_mock_entry() - assert mqtt_client_mock.connect.call_count == 1 - - -async def test_mqtt_does_not_disconnect_on_home_assistant_stop( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, -) -> None: - """Test if client is not disconnected on HA stop.""" - await mqtt_mock_entry() - hass.bus.fire(EVENT_HOMEASSISTANT_STOP) - await hass.async_block_till_done() - await hass.async_block_till_done() - assert mqtt_client_mock.disconnect.call_count == 0 - - -async def test_mqtt_await_ack_at_disconnect( - hass: HomeAssistant, -) -> None: - """Test if ACK is awaited correctly when disconnecting.""" - - class FakeInfo: - """Returns a simulated client publish response.""" - - mid = 100 - rc = 0 - - with patch( - "homeassistant.components.mqtt.async_client.AsyncMQTTClient" - ) as mock_client: - mqtt_client = mock_client.return_value - mqtt_client.connect = MagicMock( - return_value=0, - side_effect=lambda *args, **kwargs: hass.loop.call_soon_threadsafe( - mqtt_client.on_connect, mqtt_client, None, 0, 0, 0 - ), - ) - mqtt_client.publish = MagicMock(return_value=FakeInfo()) - entry = MockConfigEntry( - domain=mqtt.DOMAIN, - data={ - "certificate": "auto", - mqtt.CONF_BROKER: "test-broker", - mqtt.CONF_DISCOVERY: False, - }, - ) - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) - - mqtt_client = mock_client.return_value - - # publish from MQTT client without awaiting - hass.async_create_task( - mqtt.async_publish(hass, "test-topic", "some-payload", 0, False) - ) - await asyncio.sleep(0) - # Simulate late ACK callback from client with mid 100 - mqtt_client.on_publish(0, 0, 100) - # disconnect the MQTT client - await hass.async_stop() - await hass.async_block_till_done() - # assert the payload was sent through the client - assert mqtt_client.publish.called - assert mqtt_client.publish.call_args[0] == ( - "test-topic", - "some-payload", - 0, - False, - ) - await hass.async_block_till_done(wait_background_tasks=True) - - -async def test_publish( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator -) -> None: - """Test the publish function.""" - mqtt_mock = await mqtt_mock_entry() - publish_mock: MagicMock = mqtt_mock._mqttc.publish - await mqtt.async_publish(hass, "test-topic", "test-payload") - await hass.async_block_till_done() - assert publish_mock.called - assert publish_mock.call_args[0] == ( - "test-topic", - "test-payload", - 0, - False, - ) - publish_mock.reset_mock() - - await mqtt.async_publish(hass, "test-topic", "test-payload", 2, True) - await hass.async_block_till_done() - assert publish_mock.called - assert publish_mock.call_args[0] == ( - "test-topic", - "test-payload", - 2, - True, - ) - publish_mock.reset_mock() - - mqtt.publish(hass, "test-topic2", "test-payload2") - await hass.async_block_till_done() - assert publish_mock.called - assert publish_mock.call_args[0] == ( - "test-topic2", - "test-payload2", - 0, - False, - ) - publish_mock.reset_mock() - - mqtt.publish(hass, "test-topic2", "test-payload2", 2, True) - await hass.async_block_till_done() - assert publish_mock.called - assert publish_mock.call_args[0] == ( - "test-topic2", - "test-payload2", - 2, - True, - ) - publish_mock.reset_mock() - - # test binary pass-through - mqtt.publish( - hass, - "test-topic3", - b"\xde\xad\xbe\xef", - 0, - False, - ) - await hass.async_block_till_done() - assert publish_mock.called - assert publish_mock.call_args[0] == ( - "test-topic3", - b"\xde\xad\xbe\xef", - 0, - False, - ) - mqtt_mock.reset_mock() - - # test null payload - mqtt.publish( - hass, - "test-topic3", - None, - 0, - False, - ) - await hass.async_block_till_done() - assert publish_mock.called - assert publish_mock.call_args[0] == ( - "test-topic3", - None, - 0, - False, - ) - - publish_mock.reset_mock() - - -async def test_convert_outgoing_payload(hass: HomeAssistant) -> None: - """Test the converting of outgoing MQTT payloads without template.""" - command_template = mqtt.MqttCommandTemplate(None, hass=hass) - assert command_template.async_render(b"\xde\xad\xbe\xef") == b"\xde\xad\xbe\xef" - - assert ( - command_template.async_render("b'\\xde\\xad\\xbe\\xef'") - == "b'\\xde\\xad\\xbe\\xef'" - ) - - assert command_template.async_render(1234) == 1234 - - assert command_template.async_render(1234.56) == 1234.56 - - assert command_template.async_render(None) is None - - async def test_command_template_value(hass: HomeAssistant) -> None: """Test the rendering of MQTT command template.""" @@ -338,12 +89,12 @@ async def test_command_template_value(hass: HomeAssistant) -> None: # test rendering value tpl = template.Template("{{ value + 1 }}", hass=hass) - cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) + cmd_tpl = mqtt.MqttCommandTemplate(tpl) assert cmd_tpl.async_render(4321) == "4322" # test variables at rendering tpl = template.Template("{{ some_var }}", hass=hass) - cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) + cmd_tpl = mqtt.MqttCommandTemplate(tpl) assert cmd_tpl.async_render(None, variables=variables) == "beer" @@ -359,9 +110,7 @@ async def test_command_template_value(hass: HomeAssistant) -> None: ], ) async def test_command_template_variables( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - config: ConfigType, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, config: ConfigType ) -> None: """Test the rendering of entity variables.""" topic = "test/select" @@ -412,8 +161,8 @@ async def test_command_template_variables( async def test_command_template_fails(hass: HomeAssistant) -> None: """Test the exception handling of an MQTT command template.""" - tpl = template.Template("{{ value * 2 }}") - cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) + tpl = template.Template("{{ value * 2 }}", hass=hass) + cmd_tpl = mqtt.MqttCommandTemplate(tpl) with pytest.raises(MqttCommandTemplateException) as exc: cmd_tpl.async_render(None) assert "unsupported operand type(s) for *: 'NoneType' and 'int'" in str(exc.value) @@ -425,13 +174,13 @@ async def test_value_template_value(hass: HomeAssistant) -> None: variables = {"id": 1234, "some_var": "beer"} # test rendering value - tpl = template.Template("{{ value_json.id }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass) + tpl = template.Template("{{ value_json.id }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl) assert val_tpl.async_render_with_possible_json_value('{"id": 4321}') == "4321" # test variables at rendering - tpl = template.Template("{{ value_json.id }} {{ some_var }} {{ code }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass, config_attributes={"code": 1234}) + tpl = template.Template("{{ value_json.id }} {{ some_var }} {{ code }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl, config_attributes={"code": 1234}) assert ( val_tpl.async_render_with_possible_json_value( '{"id": 4321}', variables=variables @@ -440,8 +189,8 @@ async def test_value_template_value(hass: HomeAssistant) -> None: ) # test with default value if an error occurs due to an invalid template - tpl = template.Template("{{ value_json.id | as_datetime }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass) + tpl = template.Template("{{ value_json.id | as_datetime }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl) assert ( val_tpl.async_render_with_possible_json_value('{"otherid": 4321}', "my default") == "my default" @@ -451,19 +200,19 @@ async def test_value_template_value(hass: HomeAssistant) -> None: entity = Entity() entity.hass = hass entity.entity_id = "select.test" - tpl = template.Template("{{ value_json.id }}") + tpl = template.Template("{{ value_json.id }}", hass=hass) val_tpl = mqtt.MqttValueTemplate(tpl, entity=entity) assert val_tpl.async_render_with_possible_json_value('{"id": 4321}') == "4321" # test this object in a template - tpl2 = template.Template("{{ this.entity_id }}") + tpl2 = template.Template("{{ this.entity_id }}", hass=hass) val_tpl2 = mqtt.MqttValueTemplate(tpl2, entity=entity) assert val_tpl2.async_render_with_possible_json_value("bla") == "select.test" with patch( "homeassistant.helpers.template.TemplateStateFromEntityId", MagicMock() ) as template_state_calls: - tpl3 = template.Template("{{ this.entity_id }}") + tpl3 = template.Template("{{ this.entity_id }}", hass=hass) val_tpl3 = mqtt.MqttValueTemplate(tpl3, entity=entity) val_tpl3.async_render_with_possible_json_value("call1") val_tpl3.async_render_with_possible_json_value("call2") @@ -474,8 +223,8 @@ async def test_value_template_fails(hass: HomeAssistant) -> None: """Test the rendering of MQTT value template fails.""" entity = MockEntity(entity_id="sensor.test") entity.hass = hass - tpl = template.Template("{{ value_json.some_var * 2 }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass, entity=entity) + tpl = template.Template("{{ value_json.some_var * 2 }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl, entity=entity) with pytest.raises(MqttValueTemplateException) as exc: val_tpl.async_render_with_possible_json_value('{"some_var": null }') assert str(exc.value) == ( @@ -511,10 +260,12 @@ async def test_service_call_without_topic_does_not_publish( assert not mqtt_mock.async_publish.called -async def test_service_call_with_topic_and_topic_template_does_not_publish( +# The use of a topic_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_mqtt_publish_action_call_with_topic_and_topic_template_does_not_publish( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with topic/topic template. + """Test the mqtt publish action call with topic/topic template. If both 'topic' and 'topic_template' are provided then fail. """ @@ -535,10 +286,12 @@ async def test_service_call_with_topic_and_topic_template_does_not_publish( assert not mqtt_mock.async_publish.called -async def test_service_call_with_invalid_topic_template_does_not_publish( +# The use of a topic_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_mqtt_action_call_with_invalid_topic_template_does_not_publish( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with a problematic topic template.""" + """Test the mqtt publish action call with a problematic topic template.""" mqtt_mock = await mqtt_mock_entry() with pytest.raises(MqttCommandTemplateException) as exc: await hass.services.async_call( @@ -558,10 +311,12 @@ async def test_service_call_with_invalid_topic_template_does_not_publish( assert not mqtt_mock.async_publish.called -async def test_service_call_with_template_topic_renders_template( +# The use of a topic_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_mqtt_publish_action_call_with_template_topic_renders_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with rendered topic template. + """Test the mqtt publish action call with rendered topic template. If 'topic_template' is provided and 'topic' is not, then render it. """ @@ -582,7 +337,7 @@ async def test_service_call_with_template_topic_renders_template( async def test_service_call_with_template_topic_renders_invalid_topic( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with rendered, invalid topic template. + """Test the action call with rendered, invalid topic template. If a wildcard topic is rendered, then fail. """ @@ -605,10 +360,12 @@ async def test_service_call_with_template_topic_renders_invalid_topic( assert not mqtt_mock.async_publish.called -async def test_service_call_with_invalid_rendered_template_topic_doesnt_render_template( +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_action_call_with_invalid_rendered_payload_template_doesnt_render_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with unrendered template. + """Test the action call with unrendered payload template. If both 'payload' and 'payload_template' are provided then fail. """ @@ -629,10 +386,12 @@ async def test_service_call_with_invalid_rendered_template_topic_doesnt_render_t assert not mqtt_mock.async_publish.called -async def test_service_call_with_template_payload_renders_template( +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_mqtt_publish_action_call_with_template_payload_renders_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with rendered template. + """Test the mqtt publish action call with rendered template. If 'payload_template' is provided and 'payload' is not, then render it. """ @@ -661,10 +420,80 @@ async def test_service_call_with_template_payload_renders_template( mqtt_mock.reset_mock() -async def test_service_call_with_bad_template( +@pytest.mark.parametrize( + ("attr_payload", "payload", "evaluate_payload", "literal_eval_calls"), + [ + ("b'\\xde\\xad\\xbe\\xef'", b"\xde\xad\xbe\xef", True, 1), + ("b'\\xde\\xad\\xbe\\xef'", "b'\\xde\\xad\\xbe\\xef'", False, 0), + ("DEADBEEF", "DEADBEEF", False, 0), + ( + "b'\\xde", + "b'\\xde", + True, + 1, + ), # Bytes literal is invalid, fall back to string + ], +) +async def test_mqtt_publish_action_call_with_raw_data( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + attr_payload: str, + payload: str | bytes, + evaluate_payload: bool, + literal_eval_calls: int, +) -> None: + """Test the mqtt publish action call raw data. + + When `payload` represents a `bytes` object, it should be published + as raw data if `evaluate_payload` is set. + """ + mqtt_mock = await mqtt_mock_entry() + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: attr_payload, + mqtt.ATTR_EVALUATE_PAYLOAD: evaluate_payload, + }, + blocking=True, + ) + assert mqtt_mock.async_publish.called + assert mqtt_mock.async_publish.call_args[0][1] == payload + + with patch( + "homeassistant.components.mqtt.models.literal_eval" + ) as literal_eval_mock: + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: attr_payload, + }, + blocking=True, + ) + literal_eval_mock.assert_not_called() + + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: attr_payload, + mqtt.ATTR_EVALUATE_PAYLOAD: evaluate_payload, + }, + blocking=True, + ) + assert len(literal_eval_mock.mock_calls) == literal_eval_calls + + +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_publish_action_call_with_bad_payload_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with a bad template does not publish.""" + """Test the mqtt publish action call with a bad template does not publish.""" mqtt_mock = await mqtt_mock_entry() with pytest.raises(MqttCommandTemplateException) as exc: await hass.services.async_call( @@ -683,10 +512,12 @@ async def test_service_call_with_bad_template( ) -async def test_service_call_with_payload_doesnt_render_template( +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_action_call_with_payload_doesnt_render_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with unrendered template. + """Test the mqtt publish action call with an unrendered template. If both 'payload' and 'payload_template' are provided then fail. """ @@ -907,7 +738,7 @@ def test_entity_device_info_schema() -> None: {"identifiers": [], "connections": [], "name": "Beer"} ) - # not an valid URL + # not a valid URL with pytest.raises(vol.Invalid): MQTT_ENTITY_DEVICE_INFO_SCHEMA( { @@ -1022,977 +853,10 @@ async def test_receiving_message_with_non_utf8_topic_gets_logged( ) -async def test_all_subscriptions_run_when_decode_fails( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test all other subscriptions still run when decode fails for one.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "test-topic", record_calls, encoding="ascii") - await mqtt.async_subscribe(hass, "test-topic", record_calls) - - async_fire_mqtt_message(hass, "test-topic", UnitOfTemperature.CELSIUS) - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - - -async def test_subscribe_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of a topic.""" - await mqtt_mock_entry() - unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) - - async_fire_mqtt_message(hass, "test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "test-topic" - assert recorded_calls[0].payload == "test-payload" - - unsub() - - async_fire_mqtt_message(hass, "test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - - # Cannot unsubscribe twice - with pytest.raises(HomeAssistantError): - unsub() - - -async def test_subscribe_topic_not_initialize( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test the subscription of a topic when MQTT was not initialized.""" - with pytest.raises( - HomeAssistantError, match=r".*make sure MQTT is set up correctly" - ): - await mqtt.async_subscribe(hass, "test-topic", record_calls) - - -async def test_subscribe_mqtt_config_entry_disabled( - hass: HomeAssistant, mqtt_mock: MqttMockHAClient -) -> None: - """Test the subscription of a topic when MQTT config entry is disabled.""" - mqtt_mock.connected = True - - mqtt_config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - assert mqtt_config_entry.state is ConfigEntryState.LOADED - - assert await hass.config_entries.async_unload(mqtt_config_entry.entry_id) - assert mqtt_config_entry.state is ConfigEntryState.NOT_LOADED - - await hass.config_entries.async_set_disabled_by( - mqtt_config_entry.entry_id, ConfigEntryDisabler.USER - ) - mqtt_mock.connected = False - - with pytest.raises(HomeAssistantError, match=r".*MQTT is not enabled"): - await mqtt.async_subscribe(hass, "test-topic", record_calls) - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.2) -async def test_subscribe_and_resubscribe( - hass: HomeAssistant, - client_debug_log: None, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test resubscribing within the debounce time.""" - mqtt_mock = await mqtt_mock_entry() - # Fake that the client is connected - mqtt_mock().connected = True - - unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) - # This unsub will be un-done with the following subscribe - # unsubscribe should not be called at the broker - unsub() - await asyncio.sleep(0.1) - unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) - await asyncio.sleep(0.1) - await hass.async_block_till_done() - - async_fire_mqtt_message(hass, "test-topic", "test-payload") - await hass.async_block_till_done() - - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "test-topic" - assert recorded_calls[0].payload == "test-payload" - # assert unsubscribe was not called - mqtt_client_mock.unsubscribe.assert_not_called() - - unsub() - - await asyncio.sleep(0.2) - await hass.async_block_till_done() - mqtt_client_mock.unsubscribe.assert_called_once_with(["test-topic"]) - - -async def test_subscribe_topic_non_async( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of a topic using the non-async function.""" - await mqtt_mock_entry() - unsub = await hass.async_add_executor_job( - mqtt.subscribe, hass, "test-topic", record_calls - ) - await hass.async_block_till_done() - - async_fire_mqtt_message(hass, "test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "test-topic" - assert recorded_calls[0].payload == "test-payload" - - await hass.async_add_executor_job(unsub) - - async_fire_mqtt_message(hass, "test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - - -async def test_subscribe_bad_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of a topic.""" - await mqtt_mock_entry() - with pytest.raises(HomeAssistantError): - await mqtt.async_subscribe(hass, 55, record_calls) # type: ignore[arg-type] - - -async def test_subscribe_topic_not_match( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test if subscribed topic is not a match.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "test-topic", record_calls) - - async_fire_mqtt_message(hass, "another-test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 0 - - -async def test_subscribe_topic_level_wildcard( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "test-topic/+/on", record_calls) - - async_fire_mqtt_message(hass, "test-topic/bier/on", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "test-topic/bier/on" - assert recorded_calls[0].payload == "test-payload" - - -async def test_subscribe_topic_level_wildcard_no_subtree_match( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "test-topic/+/on", record_calls) - - async_fire_mqtt_message(hass, "test-topic/bier", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 0 - - -async def test_subscribe_topic_level_wildcard_root_topic_no_subtree_match( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "test-topic/#", record_calls) - - async_fire_mqtt_message(hass, "test-topic-123", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 0 - - -async def test_subscribe_topic_subtree_wildcard_subtree_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "test-topic/#", record_calls) - - async_fire_mqtt_message(hass, "test-topic/bier/on", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "test-topic/bier/on" - assert recorded_calls[0].payload == "test-payload" - - -async def test_subscribe_topic_subtree_wildcard_root_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "test-topic/#", record_calls) - - async_fire_mqtt_message(hass, "test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "test-topic" - assert recorded_calls[0].payload == "test-payload" - - -async def test_subscribe_topic_subtree_wildcard_no_match( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "test-topic/#", record_calls) - - async_fire_mqtt_message(hass, "another-test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 0 - - -async def test_subscribe_topic_level_wildcard_and_wildcard_root_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) - - async_fire_mqtt_message(hass, "hi/test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "hi/test-topic" - assert recorded_calls[0].payload == "test-payload" - - -async def test_subscribe_topic_level_wildcard_and_wildcard_subtree_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) - - async_fire_mqtt_message(hass, "hi/test-topic/here-iam", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "hi/test-topic/here-iam" - assert recorded_calls[0].payload == "test-payload" - - -async def test_subscribe_topic_level_wildcard_and_wildcard_level_no_match( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) - - async_fire_mqtt_message(hass, "hi/here-iam/test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 0 - - -async def test_subscribe_topic_level_wildcard_and_wildcard_no_match( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) - - async_fire_mqtt_message(hass, "hi/another-test-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 0 - - -async def test_subscribe_topic_sys_root( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of $ root topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "$test-topic/subtree/on", record_calls) - - async_fire_mqtt_message(hass, "$test-topic/subtree/on", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "$test-topic/subtree/on" - assert recorded_calls[0].payload == "test-payload" - - -async def test_subscribe_topic_sys_root_and_wildcard_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of $ root and wildcard topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "$test-topic/#", record_calls) - - async_fire_mqtt_message(hass, "$test-topic/some-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "$test-topic/some-topic" - assert recorded_calls[0].payload == "test-payload" - - -async def test_subscribe_topic_sys_root_and_wildcard_subtree_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription of $ root and wildcard subtree topics.""" - await mqtt_mock_entry() - await mqtt.async_subscribe(hass, "$test-topic/subtree/#", record_calls) - - async_fire_mqtt_message(hass, "$test-topic/subtree/some-topic", "test-payload") - - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == "$test-topic/subtree/some-topic" - assert recorded_calls[0].payload == "test-payload" - - -async def test_subscribe_special_characters( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test the subscription to topics with special characters.""" - await mqtt_mock_entry() - topic = "/test-topic/$(.)[^]{-}" - payload = "p4y.l[]a|> ?" - - await mqtt.async_subscribe(hass, topic, record_calls) - - async_fire_mqtt_message(hass, topic, payload) - await hass.async_block_till_done() - assert len(recorded_calls) == 1 - assert recorded_calls[0].topic == topic - assert recorded_calls[0].payload == payload - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_subscribe_same_topic( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test subscribing to same topic twice and simulate retained messages. - - When subscribing to the same topic again, SUBSCRIBE must be sent to the broker again - for it to resend any retained messages. - """ - mqtt_mock = await mqtt_mock_entry() - - # Fake that the client is connected - mqtt_mock().connected = True - - calls_a: list[ReceiveMessage] = [] - calls_b: list[ReceiveMessage] = [] - - def _callback_a(msg: ReceiveMessage) -> None: - calls_a.append(msg) - - def _callback_b(msg: ReceiveMessage) -> None: - calls_b.append(msg) - - await mqtt.async_subscribe(hass, "test/state", _callback_a, qos=0) - # Simulate a non retained message after the first subscription - async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=False) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=1)) - await hass.async_block_till_done() - assert len(calls_a) == 1 - mqtt_client_mock.subscribe.assert_called() - calls_a = [] - mqtt_client_mock.reset_mock() - - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - await mqtt.async_subscribe(hass, "test/state", _callback_b, qos=1) - # Simulate an other non retained message after the second subscription - async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=False) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=1)) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=1)) - await hass.async_block_till_done() - # Both subscriptions should receive updates - assert len(calls_a) == 1 - assert len(calls_b) == 1 - mqtt_client_mock.subscribe.assert_called() - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_replaying_payload_same_topic( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test replaying retained messages. - - When subscribing to the same topic again, SUBSCRIBE must be sent to the broker again - for it to resend any retained messages for new subscriptions. - Retained messages must only be replayed for new subscriptions, except - when the MQTT client is reconnecting. - """ - mqtt_mock = await mqtt_mock_entry() - - # Fake that the client is connected - mqtt_mock().connected = True - - calls_a: list[ReceiveMessage] = [] - calls_b: list[ReceiveMessage] = [] - - def _callback_a(msg: ReceiveMessage) -> None: - calls_a.append(msg) - - def _callback_b(msg: ReceiveMessage) -> None: - calls_b.append(msg) - - await mqtt.async_subscribe(hass, "test/state", _callback_a) - async_fire_mqtt_message( - hass, "test/state", "online", qos=0, retain=True - ) # Simulate a (retained) message played back - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert len(calls_a) == 1 - mqtt_client_mock.subscribe.assert_called() - calls_a = [] - mqtt_client_mock.reset_mock() - - await mqtt.async_subscribe(hass, "test/state", _callback_b) - - # Simulate edge case where non retained message was received - # after subscription at HA but before the debouncer delay was passed. - # The message without retain flag directly after a subscription should - # be processed by both subscriptions. - async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=False) - - # Simulate a (retained) message played back on new subscriptions - async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=True) - - # Make sure the debouncer delay was passed - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - - # The current subscription only received the message without retain flag - assert len(calls_a) == 1 - assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=False) - # The retained message playback should only be processed by the new subscription. - # The existing subscription already got the latest update, hence the existing - # subscription should not receive the replayed (retained) message. - # Messages without retain flag are received on both subscriptions. - assert len(calls_b) == 2 - assert help_assert_message(calls_b[0], "test/state", "online", qos=0, retain=False) - assert help_assert_message(calls_b[1], "test/state", "online", qos=0, retain=True) - mqtt_client_mock.subscribe.assert_called() - - calls_a = [] - calls_b = [] - mqtt_client_mock.reset_mock() - - # Simulate new message played back on new subscriptions - # After connecting the retain flag will not be set, even if the - # payload published was retained, we cannot see that - async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=False) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - assert len(calls_a) == 1 - assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=False) - assert len(calls_b) == 1 - assert help_assert_message(calls_b[0], "test/state", "online", qos=0, retain=False) - - # Now simulate the broker was disconnected shortly - calls_a = [] - calls_b = [] - mqtt_client_mock.reset_mock() - mqtt_client_mock.on_disconnect(None, None, 0) - mqtt_client_mock.on_connect(None, None, None, 0) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - mqtt_client_mock.subscribe.assert_called() - # Simulate a (retained) message played back after reconnecting - async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=True) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - # Both subscriptions now should replay the retained message - assert len(calls_a) == 1 - assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=True) - assert len(calls_b) == 1 - assert help_assert_message(calls_b[0], "test/state", "online", qos=0, retain=True) - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_replaying_payload_after_resubscribing( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test replaying and filtering retained messages after resubscribing. - - When subscribing to the same topic again, SUBSCRIBE must be sent to the broker again - for it to resend any retained messages for new subscriptions. - Retained messages must only be replayed for new subscriptions, except - when the MQTT client is reconnection. - """ - mqtt_mock = await mqtt_mock_entry() - - # Fake that the client is connected - mqtt_mock().connected = True - - calls_a: list[ReceiveMessage] = [] - - def _callback_a(msg: ReceiveMessage) -> None: - calls_a.append(msg) - - unsub = await mqtt.async_subscribe(hass, "test/state", _callback_a) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - await hass.async_block_till_done() - mqtt_client_mock.subscribe.assert_called() - - # Simulate a (retained) message played back - async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=True) - await hass.async_block_till_done() - assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=True) - calls_a.clear() - - # Test we get updates - async_fire_mqtt_message(hass, "test/state", "offline", qos=0, retain=False) - await hass.async_block_till_done() - assert help_assert_message(calls_a[0], "test/state", "offline", qos=0, retain=False) - calls_a.clear() - - # Test we filter new retained updates - async_fire_mqtt_message(hass, "test/state", "offline", qos=0, retain=True) - await hass.async_block_till_done() - assert len(calls_a) == 0 - - # Unsubscribe an resubscribe again - unsub() - unsub = await mqtt.async_subscribe(hass, "test/state", _callback_a) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - mqtt_client_mock.subscribe.assert_called() - - # Simulate we can receive a (retained) played back message again - async_fire_mqtt_message(hass, "test/state", "online", qos=0, retain=True) - await hass.async_block_till_done() - assert help_assert_message(calls_a[0], "test/state", "online", qos=0, retain=True) - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_replaying_payload_wildcard_topic( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test replaying retained messages. - - When we have multiple subscriptions to the same wildcard topic, - SUBSCRIBE must be sent to the broker again - for it to resend any retained messages for new subscriptions. - Retained messages should only be replayed for new subscriptions, except - when the MQTT client is reconnection. - """ - mqtt_mock = await mqtt_mock_entry() - - # Fake that the client is connected - mqtt_mock().connected = True - - calls_a: list[ReceiveMessage] = [] - calls_b: list[ReceiveMessage] = [] - - def _callback_a(msg: ReceiveMessage) -> None: - calls_a.append(msg) - - def _callback_b(msg: ReceiveMessage) -> None: - calls_b.append(msg) - - await mqtt.async_subscribe(hass, "test/#", _callback_a) - # Simulate (retained) messages being played back on new subscriptions - async_fire_mqtt_message(hass, "test/state1", "new_value_1", qos=0, retain=True) - async_fire_mqtt_message(hass, "test/state2", "new_value_2", qos=0, retain=True) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - assert len(calls_a) == 2 - mqtt_client_mock.subscribe.assert_called() - calls_a = [] - mqtt_client_mock.reset_mock() - - # resubscribe to the wild card topic again - await mqtt.async_subscribe(hass, "test/#", _callback_b) - # Simulate (retained) messages being played back on new subscriptions - async_fire_mqtt_message(hass, "test/state1", "initial_value_1", qos=0, retain=True) - async_fire_mqtt_message(hass, "test/state2", "initial_value_2", qos=0, retain=True) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - # The retained messages playback should only be processed for the new subscriptions - assert len(calls_a) == 0 - assert len(calls_b) == 2 - mqtt_client_mock.subscribe.assert_called() - - calls_a = [] - calls_b = [] - mqtt_client_mock.reset_mock() - - # Simulate new messages being received - async_fire_mqtt_message(hass, "test/state1", "update_value_1", qos=0, retain=False) - async_fire_mqtt_message(hass, "test/state2", "update_value_2", qos=0, retain=False) - await hass.async_block_till_done() - assert len(calls_a) == 2 - assert len(calls_b) == 2 - - # Now simulate the broker was disconnected shortly - calls_a = [] - calls_b = [] - mqtt_client_mock.reset_mock() - mqtt_client_mock.on_disconnect(None, None, 0) - mqtt_client_mock.on_connect(None, None, None, 0) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - mqtt_client_mock.subscribe.assert_called() - # Simulate the (retained) messages are played back after reconnecting - # for all subscriptions - async_fire_mqtt_message(hass, "test/state1", "update_value_1", qos=0, retain=True) - async_fire_mqtt_message(hass, "test/state2", "update_value_2", qos=0, retain=True) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - # Both subscriptions should replay - assert len(calls_a) == 2 - assert len(calls_b) == 2 - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_not_calling_unsubscribe_with_active_subscribers( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - record_calls: MessageCallbackType, -) -> None: - """Test not calling unsubscribe() when other subscribers are active.""" - mqtt_mock = await mqtt_mock_entry() - # Fake that the client is connected - mqtt_mock().connected = True - - unsub = await mqtt.async_subscribe(hass, "test/state", record_calls, 2) - await mqtt.async_subscribe(hass, "test/state", record_calls, 1) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - await hass.async_block_till_done() - assert mqtt_client_mock.subscribe.called - - unsub() - await hass.async_block_till_done() - assert not mqtt_client_mock.unsubscribe.called - - -async def test_not_calling_subscribe_when_unsubscribed_within_cooldown( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - record_calls: MessageCallbackType, -) -> None: - """Test not calling subscribe() when it is unsubscribed. - - Make sure subscriptions are cleared if unsubscribed before - the subscribe cool down period has ended. - """ - mqtt_mock = await mqtt_mock_entry() - mqtt_client_mock.subscribe.reset_mock() - # Fake that the client is connected - mqtt_mock().connected = True - - unsub = await mqtt.async_subscribe(hass, "test/state", record_calls) - unsub() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - assert not mqtt_client_mock.subscribe.called - - -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_unsubscribe_race( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test not calling unsubscribe() when other subscribers are active.""" - mqtt_mock = await mqtt_mock_entry() - # Fake that the client is connected - mqtt_mock().connected = True - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - - calls_a: list[ReceiveMessage] = [] - calls_b: list[ReceiveMessage] = [] - - def _callback_a(msg: ReceiveMessage) -> None: - calls_a.append(msg) - - def _callback_b(msg: ReceiveMessage) -> None: - calls_b.append(msg) - - mqtt_client_mock.reset_mock() - unsub = await mqtt.async_subscribe(hass, "test/state", _callback_a) - unsub() - await mqtt.async_subscribe(hass, "test/state", _callback_b) - await hass.async_block_till_done() - await hass.async_block_till_done() - - async_fire_mqtt_message(hass, "test/state", "online") - await hass.async_block_till_done() - assert not calls_a - assert calls_b - - # We allow either calls [subscribe, unsubscribe, subscribe], [subscribe, subscribe] or - # when both subscriptions were combined [subscribe] - expected_calls_1 = [ - call.subscribe([("test/state", 0)]), - call.unsubscribe("test/state"), - call.subscribe([("test/state", 0)]), - ] - expected_calls_2 = [ - call.subscribe([("test/state", 0)]), - call.subscribe([("test/state", 0)]), - ] - expected_calls_3 = [ - call.subscribe([("test/state", 0)]), - ] - assert mqtt_client_mock.mock_calls in ( - expected_calls_1, - expected_calls_2, - expected_calls_3, - ) - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_DISCOVERY: False}], -) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -async def test_restore_subscriptions_on_reconnect( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - record_calls: MessageCallbackType, -) -> None: - """Test subscriptions are restored on reconnect.""" - mqtt_mock = await mqtt_mock_entry() - # Fake that the client is connected - mqtt_mock().connected = True - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - mqtt_client_mock.subscribe.reset_mock() - - await mqtt.async_subscribe(hass, "test/state", record_calls) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - assert mqtt_client_mock.subscribe.call_count == 1 - - mqtt_client_mock.on_disconnect(None, None, 0) - mqtt_client_mock.on_connect(None, None, None, 0) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown - await hass.async_block_till_done() - assert mqtt_client_mock.subscribe.call_count == 2 - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_DISCOVERY: False}], -) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 1.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 1.0) -async def test_restore_all_active_subscriptions_on_reconnect( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - record_calls: MessageCallbackType, - freezer: FrozenDateTimeFactory, -) -> None: - """Test active subscriptions are restored correctly on reconnect.""" - mqtt_mock = await mqtt_mock_entry() - # Fake that the client is connected - mqtt_mock().connected = True - - unsub = await mqtt.async_subscribe(hass, "test/state", record_calls, qos=2) - await mqtt.async_subscribe(hass, "test/state", record_calls, qos=1) - await mqtt.async_subscribe(hass, "test/state", record_calls, qos=0) - await hass.async_block_till_done() - freezer.tick(3) - async_fire_time_changed(hass) # cooldown - await hass.async_block_till_done() - - # the subscription with the highest QoS should survive - expected = [ - call([("test/state", 2)]), - ] - assert mqtt_client_mock.subscribe.mock_calls == expected - - unsub() - await hass.async_block_till_done() - assert mqtt_client_mock.unsubscribe.call_count == 0 - - mqtt_client_mock.on_disconnect(None, None, 0) - await hass.async_block_till_done() - mqtt_client_mock.on_connect(None, None, None, 0) - freezer.tick(3) - async_fire_time_changed(hass) # cooldown - await hass.async_block_till_done() - - expected.append(call([("test/state", 1)])) - assert mqtt_client_mock.subscribe.mock_calls == expected - - freezer.tick(3) - async_fire_time_changed(hass) # cooldown - await hass.async_block_till_done() - freezer.tick(3) - async_fire_time_changed(hass) # cooldown - await hass.async_block_till_done() - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_DISCOVERY: False}], -) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 1.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 1.0) -async def test_subscribed_at_highest_qos( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - record_calls: MessageCallbackType, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the highest qos as assigned when subscribing to the same topic.""" - mqtt_mock = await mqtt_mock_entry() - # Fake that the client is connected - mqtt_mock().connected = True - - await mqtt.async_subscribe(hass, "test/state", record_calls, qos=0) - await hass.async_block_till_done() - freezer.tick(5) - async_fire_time_changed(hass) # cooldown - await hass.async_block_till_done() - assert ("test/state", 0) in help_all_subscribe_calls(mqtt_client_mock) - mqtt_client_mock.reset_mock() - freezer.tick(5) - async_fire_time_changed(hass) # cooldown - await hass.async_block_till_done() - await hass.async_block_till_done() - - await mqtt.async_subscribe(hass, "test/state", record_calls, qos=1) - await mqtt.async_subscribe(hass, "test/state", record_calls, qos=2) - await hass.async_block_till_done() - freezer.tick(5) - async_fire_time_changed(hass) # cooldown - await hass.async_block_till_done() - # the subscription with the highest QoS should survive - assert help_all_subscribe_calls(mqtt_client_mock) == [("test/state", 2)] - - +@pytest.mark.usefixtures("mqtt_client_mock") async def test_reload_entry_with_restored_subscriptions( hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + mock_debouncer: asyncio.Event, record_calls: MessageCallbackType, recorded_calls: list[ReceiveMessage], ) -> None: @@ -2001,17 +865,18 @@ async def test_reload_entry_with_restored_subscriptions( entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) entry.add_to_hass(hass) hass.config.components.add(mqtt.DOMAIN) - mqtt_client_mock.connect.return_value = 0 with patch("homeassistant.config.load_yaml_config_file", return_value={}): await hass.config_entries.async_setup(entry.entry_id) + mock_debouncer.clear() await mqtt.async_subscribe(hass, "test-topic", record_calls) await mqtt.async_subscribe(hass, "wild/+/card", record_calls) + # cooldown + await mock_debouncer.wait() async_fire_mqtt_message(hass, "test-topic", "test-payload") async_fire_mqtt_message(hass, "wild/any/card", "wild-card-payload") - await hass.async_block_till_done() assert len(recorded_calls) == 2 assert recorded_calls[0].topic == "test-topic" assert recorded_calls[0].payload == "test-payload" @@ -2022,13 +887,14 @@ async def test_reload_entry_with_restored_subscriptions( # Reload the entry with patch("homeassistant.config.load_yaml_config_file", return_value={}): assert await hass.config_entries.async_reload(entry.entry_id) + mock_debouncer.clear() assert entry.state is ConfigEntryState.LOADED - await hass.async_block_till_done() + # cooldown + await mock_debouncer.wait() async_fire_mqtt_message(hass, "test-topic", "test-payload2") async_fire_mqtt_message(hass, "wild/any/card", "wild-card-payload2") - await hass.async_block_till_done() assert len(recorded_calls) == 2 assert recorded_calls[0].topic == "test-topic" assert recorded_calls[0].payload == "test-payload2" @@ -2039,13 +905,14 @@ async def test_reload_entry_with_restored_subscriptions( # Reload the entry again with patch("homeassistant.config.load_yaml_config_file", return_value={}): assert await hass.config_entries.async_reload(entry.entry_id) + mock_debouncer.clear() assert entry.state is ConfigEntryState.LOADED - await hass.async_block_till_done() + # cooldown + await mock_debouncer.wait() async_fire_mqtt_message(hass, "test-topic", "test-payload3") async_fire_mqtt_message(hass, "wild/any/card", "wild-card-payload3") - await hass.async_block_till_done() assert len(recorded_calls) == 2 assert recorded_calls[0].topic == "test-topic" assert recorded_calls[0].payload == "test-payload3" @@ -2053,259 +920,6 @@ async def test_reload_entry_with_restored_subscriptions( assert recorded_calls[1].payload == "wild-card-payload3" -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 2) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 2) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 2) -async def test_canceling_debouncer_on_shutdown( - hass: HomeAssistant, - record_calls: MessageCallbackType, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test canceling the debouncer when HA shuts down.""" - - mqtt_mock = await mqtt_mock_entry() - mqtt_client_mock.subscribe.reset_mock() - - # Fake that the client is connected - mqtt_mock().connected = True - - await mqtt.async_subscribe(hass, "test/state1", record_calls) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=0.2)) - await hass.async_block_till_done() - - await mqtt.async_subscribe(hass, "test/state2", record_calls) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=0.2)) - await hass.async_block_till_done() - - await mqtt.async_subscribe(hass, "test/state3", record_calls) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=0.2)) - await hass.async_block_till_done() - - await mqtt.async_subscribe(hass, "test/state4", record_calls) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=0.2)) - await hass.async_block_till_done() - - await mqtt.async_subscribe(hass, "test/state5", record_calls) - - mqtt_client_mock.subscribe.assert_not_called() - - # Stop HA so the scheduled task will be canceled - hass.bus.fire(EVENT_HOMEASSISTANT_STOP) - # mock disconnect status - mqtt_client_mock.on_disconnect(None, None, 0) - await hass.async_block_till_done() - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) - await hass.async_block_till_done() - mqtt_client_mock.subscribe.assert_not_called() - - -async def test_canceling_debouncer_normal( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test canceling the debouncer before completion.""" - - async def _async_myjob() -> None: - await asyncio.sleep(1.0) - - debouncer = EnsureJobAfterCooldown(0.0, _async_myjob) - debouncer.async_schedule() - await asyncio.sleep(0.01) - assert debouncer._task is not None - await debouncer.async_cleanup() - assert debouncer._task is None - - -async def test_canceling_debouncer_throws( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test canceling the debouncer when HA shuts down.""" - - async def _async_myjob() -> None: - await asyncio.sleep(1.0) - - debouncer = EnsureJobAfterCooldown(0.0, _async_myjob) - debouncer.async_schedule() - await asyncio.sleep(0.01) - assert debouncer._task is not None - # let debouncer._task fail by mocking it - with patch.object(debouncer, "_task") as task: - task.cancel = MagicMock(return_value=True) - await debouncer.async_cleanup() - assert "Error cleaning up task" in caplog.text - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) - await hass.async_block_till_done() - - -async def test_initial_setup_logs_error( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_client_mock: MqttMockPahoClient, -) -> None: - """Test for setup failure if initial client connection fails.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) - entry.add_to_hass(hass) - mqtt_client_mock.connect.side_effect = MagicMock(return_value=1) - try: - assert await hass.config_entries.async_setup(entry.entry_id) - except HomeAssistantError: - assert True - assert "Failed to connect to MQTT server:" in caplog.text - - -async def test_logs_error_if_no_connect_broker( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, -) -> None: - """Test for setup failure if connection to broker is missing.""" - await mqtt_mock_entry() - # test with rc = 3 -> broker unavailable - mqtt_client_mock.on_connect(mqtt_client_mock, None, None, 3) - await hass.async_block_till_done() - assert ( - "Unable to connect to the MQTT broker: Connection Refused: broker unavailable." - in caplog.text - ) - - -@pytest.mark.parametrize("return_code", [4, 5]) -async def test_triggers_reauth_flow_if_auth_fails( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, - return_code: int, -) -> None: - """Test re-auth is triggered if authentication is failing.""" - await mqtt_mock_entry() - # test with rc = 4 -> CONNACK_REFUSED_NOT_AUTHORIZED and 5 -> CONNACK_REFUSED_BAD_USERNAME_PASSWORD - mqtt_client_mock.on_connect(mqtt_client_mock, None, None, return_code) - await hass.async_block_till_done() - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - assert flows[0]["context"]["source"] == "reauth" - - -@patch("homeassistant.components.mqtt.client.TIMEOUT_ACK", 0.3) -async def test_handle_mqtt_on_callback( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, -) -> None: - """Test receiving an ACK callback before waiting for it.""" - await mqtt_mock_entry() - with patch.object(mqtt_client_mock, "get_mid", return_value=100): - # Simulate an ACK for mid == 100, this will call mqtt_mock._async_get_mid_future(mid) - mqtt_client_mock.on_publish(mqtt_client_mock, None, 100) - await hass.async_block_till_done() - # Make sure the ACK has been received - await hass.async_block_till_done() - # Now call publish without call back, this will call _async_async_wait_for_mid(msg_info.mid) - await mqtt.async_publish(hass, "no_callback/test-topic", "test-payload") - # Since the mid event was already set, we should not see any timeout warning in the log - await hass.async_block_till_done() - assert "No ACK from MQTT server" not in caplog.text - - -async def test_handle_mqtt_on_callback_after_timeout( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, -) -> None: - """Test receiving an ACK after a timeout.""" - mqtt_mock = await mqtt_mock_entry() - # Simulate the mid future getting a timeout - mqtt_mock()._async_get_mid_future(100).set_exception(asyncio.TimeoutError) - # Simulate an ACK for mid == 100, being received after the timeout - mqtt_client_mock.on_publish(mqtt_client_mock, None, 100) - await hass.async_block_till_done() - assert "No ACK from MQTT server" not in caplog.text - assert "InvalidStateError" not in caplog.text - - -async def test_publish_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test publish error.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) - entry.add_to_hass(hass) - - # simulate an Out of memory error - with patch( - "homeassistant.components.mqtt.async_client.AsyncMQTTClient" - ) as mock_client: - mock_client().connect = lambda *args: 1 - mock_client().publish().rc = 1 - assert await hass.config_entries.async_setup(entry.entry_id) - with pytest.raises(HomeAssistantError): - await mqtt.async_publish( - hass, "some-topic", b"test-payload", qos=0, retain=False, encoding=None - ) - assert "Failed to connect to MQTT server: Out of memory." in caplog.text - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -async def test_subscribe_error( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, - record_calls: MessageCallbackType, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test publish error.""" - await mqtt_mock_entry() - mqtt_client_mock.on_connect(mqtt_client_mock, None, None, 0) - await hass.async_block_till_done() - await hass.async_block_till_done() - mqtt_client_mock.reset_mock() - # simulate client is not connected error before subscribing - mqtt_client_mock.subscribe.side_effect = lambda *args: (4, None) - await mqtt.async_subscribe(hass, "some-topic", record_calls) - while mqtt_client_mock.subscribe.call_count == 0: - await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() - assert ( - "Error talking to MQTT: The client is not currently connected." in caplog.text - ) - - -async def test_handle_message_callback( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, -) -> None: - """Test for handling an incoming message callback.""" - callbacks = [] - - @callback - def _callback(args) -> None: - callbacks.append(args) - - mock_mqtt = await mqtt_mock_entry() - msg = ReceiveMessage( - "some-topic", b"test-payload", 1, False, "some-topic", datetime.now() - ) - mqtt_client_mock.on_connect(mqtt_client_mock, None, None, 0) - await mqtt.async_subscribe(hass, "some-topic", _callback) - mqtt_client_mock.on_message(mock_mqtt, None, msg) - - await hass.async_block_till_done() - await hass.async_block_till_done() - assert len(callbacks) == 1 - assert callbacks[0].topic == "some-topic" - assert callbacks[0].qos == 1 - assert callbacks[0].payload == "test-payload" - - @pytest.mark.parametrize( "hass_config", [ @@ -2321,9 +935,7 @@ async def test_handle_message_callback( ], ) async def test_setup_manual_mqtt_with_platform_key( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test set up a manual MQTT item with a platform key.""" assert await mqtt_mock_entry() @@ -2335,628 +947,16 @@ async def test_setup_manual_mqtt_with_platform_key( @pytest.mark.parametrize("hass_config", [{mqtt.DOMAIN: {"light": {"name": "test"}}}]) async def test_setup_manual_mqtt_with_invalid_config( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test set up a manual MQTT item with an invalid config.""" assert await mqtt_mock_entry() assert "required key not provided" in caplog.text -@pytest.mark.parametrize( - ("mqtt_config_entry_data", "protocol"), - [ - ( - { - mqtt.CONF_BROKER: "mock-broker", - CONF_PROTOCOL: "3.1", - }, - 3, - ), - ( - { - mqtt.CONF_BROKER: "mock-broker", - CONF_PROTOCOL: "3.1.1", - }, - 4, - ), - ( - { - mqtt.CONF_BROKER: "mock-broker", - CONF_PROTOCOL: "5", - }, - 5, - ), - ], -) -async def test_setup_mqtt_client_protocol( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - protocol: int, -) -> None: - """Test MQTT client protocol setup.""" - with patch( - "homeassistant.components.mqtt.async_client.AsyncMQTTClient" - ) as mock_client: - await mqtt_mock_entry() - - # check if protocol setup was correctly - assert mock_client.call_args[1]["protocol"] == protocol - - -@patch("homeassistant.components.mqtt.client.TIMEOUT_ACK", 0.2) -async def test_handle_mqtt_timeout_on_callback( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test publish without receiving an ACK callback.""" - mid = 0 - - class FakeInfo: - """Returns a simulated client publish response.""" - - mid = 100 - rc = 0 - - with patch( - "homeassistant.components.mqtt.async_client.AsyncMQTTClient" - ) as mock_client: - - def _mock_ack(topic: str, qos: int = 0) -> tuple[int, int]: - # Handle ACK for subscribe normally - nonlocal mid - mid += 1 - mock_client.on_subscribe(0, 0, mid) - return (0, mid) - - # We want to simulate the publish behaviour MQTT client - mock_client = mock_client.return_value - mock_client.publish.return_value = FakeInfo() - mock_client.subscribe.side_effect = _mock_ack - mock_client.connect = MagicMock( - return_value=0, - side_effect=lambda *args, **kwargs: hass.loop.call_soon_threadsafe( - mock_client.on_connect, mock_client, None, 0, 0, 0 - ), - ) - - entry = MockConfigEntry( - domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} - ) - entry.add_to_hass(hass) - - # Make sure we are connected correctly - mock_client.on_connect(mock_client, None, None, 0) - # Set up the integration - assert await hass.config_entries.async_setup(entry.entry_id) - - # Now call we publish without simulating and ACK callback - await mqtt.async_publish(hass, "no_callback/test-topic", "test-payload") - await hass.async_block_till_done() - # There is no ACK so we should see a timeout in the log after publishing - assert len(mock_client.publish.mock_calls) == 1 - assert "No ACK from MQTT server" in caplog.text - - -async def test_setup_raises_config_entry_not_ready_if_no_connect_broker( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test for setup failure if connection to broker is missing.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) - entry.add_to_hass(hass) - - with patch( - "homeassistant.components.mqtt.async_client.AsyncMQTTClient" - ) as mock_client: - mock_client().connect = MagicMock(side_effect=OSError("Connection error")) - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert "Failed to connect to MQTT server due to exception:" in caplog.text - - -@pytest.mark.parametrize( - ("mqtt_config_entry_data", "insecure_param"), - [ - ({"broker": "test-broker", "certificate": "auto"}, "not set"), - ( - {"broker": "test-broker", "certificate": "auto", "tls_insecure": False}, - False, - ), - ({"broker": "test-broker", "certificate": "auto", "tls_insecure": True}, True), - ], -) -async def test_setup_uses_certificate_on_certificate_set_to_auto_and_insecure( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - insecure_param: bool | str, -) -> None: - """Test setup uses bundled certs when certificate is set to auto and insecure.""" - calls = [] - insecure_check = {"insecure": "not set"} - - def mock_tls_set( - certificate, certfile=None, keyfile=None, tls_version=None - ) -> None: - calls.append((certificate, certfile, keyfile, tls_version)) - - def mock_tls_insecure_set(insecure_param) -> None: - insecure_check["insecure"] = insecure_param - - with patch( - "homeassistant.components.mqtt.async_client.AsyncMQTTClient" - ) as mock_client: - mock_client().tls_set = mock_tls_set - mock_client().tls_insecure_set = mock_tls_insecure_set - await mqtt_mock_entry() - await hass.async_block_till_done() - - assert calls - - expected_certificate = certifi.where() - assert calls[0][0] == expected_certificate - - # test if insecure is set - assert insecure_check["insecure"] == insecure_param - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ - { - mqtt.CONF_BROKER: "mock-broker", - mqtt.CONF_CERTIFICATE: "auto", - } - ], -) -async def test_tls_version( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test setup defaults for tls.""" - await mqtt_mock_entry() - await hass.async_block_till_done() - assert ( - mqtt_client_mock.tls_set.mock_calls[0][2]["tls_version"] - == ssl.PROTOCOL_TLS_CLIENT - ) - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ - { - mqtt.CONF_BROKER: "mock-broker", - mqtt.CONF_BIRTH_MESSAGE: { - mqtt.ATTR_TOPIC: "birth", - mqtt.ATTR_PAYLOAD: "birth", - mqtt.ATTR_QOS: 0, - mqtt.ATTR_RETAIN: False, - }, - } - ], -) -async def test_custom_birth_message( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test sending birth message.""" - await mqtt_mock_entry() - birth = asyncio.Event() - - async def wait_birth(msg: ReceiveMessage) -> None: - """Handle birth message.""" - birth.set() - - with patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.1): - await mqtt.async_subscribe(hass, "birth", wait_birth) - mqtt_client_mock.on_connect(None, None, 0, 0) - await hass.async_block_till_done() - await birth.wait() - mqtt_client_mock.publish.assert_called_with("birth", "birth", 0, False) - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ - { - mqtt.CONF_BROKER: "mock-broker", - mqtt.CONF_BIRTH_MESSAGE: { - mqtt.ATTR_TOPIC: "homeassistant/status", - mqtt.ATTR_PAYLOAD: "online", - mqtt.ATTR_QOS: 0, - mqtt.ATTR_RETAIN: False, - }, - } - ], -) -async def test_default_birth_message( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test sending birth message.""" - await mqtt_mock_entry() - birth = asyncio.Event() - - async def wait_birth(msg: ReceiveMessage) -> None: - """Handle birth message.""" - birth.set() - - with patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.1): - await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) - mqtt_client_mock.on_connect(None, None, 0, 0) - await hass.async_block_till_done() - await birth.wait() - mqtt_client_mock.publish.assert_called_with( - "homeassistant/status", "online", 0, False - ) - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_BIRTH_MESSAGE: {}}], -) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_no_birth_message( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test disabling birth message.""" - await mqtt_mock_entry() - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - mqtt_client_mock.reset_mock() - - # Assert no birth message was sent - mqtt_client_mock.on_connect(None, None, 0, 0) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - mqtt_client_mock.publish.assert_not_called() - - async def callback(msg: ReceiveMessage) -> None: - """Handle birth message.""" - - mqtt_client_mock.reset_mock() - await mqtt.async_subscribe(hass, "homeassistant/some-topic", callback) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) - await hass.async_block_till_done() - mqtt_client_mock.subscribe.assert_called() - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ - { - mqtt.CONF_BROKER: "mock-broker", - mqtt.CONF_BIRTH_MESSAGE: { - mqtt.ATTR_TOPIC: "homeassistant/status", - mqtt.ATTR_PAYLOAD: "online", - mqtt.ATTR_QOS: 0, - mqtt.ATTR_RETAIN: False, - }, - } - ], -) -async def test_delayed_birth_message( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_config_entry_data, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test sending birth message does not happen until Home Assistant starts.""" - mqtt_mock = await mqtt_mock_entry() - - hass.set_state(CoreState.starting) - birth = asyncio.Event() - - await hass.async_block_till_done() - - entry = MockConfigEntry(domain=mqtt.DOMAIN, data=mqtt_config_entry_data) - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - mqtt_component_mock = MagicMock( - return_value=hass.data["mqtt"].client, - wraps=hass.data["mqtt"].client, - ) - mqtt_component_mock._mqttc = mqtt_client_mock - - hass.data["mqtt"].client = mqtt_component_mock - mqtt_mock = hass.data["mqtt"].client - mqtt_mock.reset_mock() - - async def wait_birth(msg: ReceiveMessage) -> None: - """Handle birth message.""" - birth.set() - - with patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.1): - await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) - mqtt_client_mock.on_connect(None, None, 0, 0) - await hass.async_block_till_done() - with pytest.raises(TimeoutError): - await asyncio.wait_for(birth.wait(), 0.2) - assert not mqtt_client_mock.publish.called - assert not birth.is_set() - - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await birth.wait() - mqtt_client_mock.publish.assert_called_with( - "homeassistant/status", "online", 0, False - ) - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ - { - mqtt.CONF_BROKER: "mock-broker", - mqtt.CONF_BIRTH_MESSAGE: { - mqtt.ATTR_TOPIC: "homeassistant/status", - mqtt.ATTR_PAYLOAD: "online", - mqtt.ATTR_QOS: 0, - mqtt.ATTR_RETAIN: False, - }, - } - ], -) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_subscription_done_when_birth_message_is_sent( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - record_calls: MessageCallbackType, - mqtt_config_entry_data, -) -> None: - """Test sending birth message until initial subscription has been completed.""" - hass.set_state(CoreState.starting) - birth = asyncio.Event() - - await hass.async_block_till_done() - - entry = MockConfigEntry(domain=mqtt.DOMAIN, data=mqtt_config_entry_data) - entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - mqtt_client_mock.on_disconnect(None, None, 0, 0) - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - - @callback - def wait_birth(msg: ReceiveMessage) -> None: - """Handle birth message.""" - birth.set() - - await mqtt.async_subscribe(hass, "topic/test", record_calls) - await mqtt.async_subscribe(hass, "homeassistant/status", wait_birth) - await hass.async_block_till_done() - mqtt_client_mock.reset_mock() - mqtt_client_mock.on_connect(None, None, 0, 0) - # We wait until we receive a birth message - await asyncio.wait_for(birth.wait(), 1) - - # Assert we already have subscribed at the client - # for new config payloads at the time we the birth message is received - subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) - assert ("homeassistant/+/+/config", 0) in subscribe_calls - assert ("homeassistant/+/+/+/config", 0) in subscribe_calls - mqtt_client_mock.publish.assert_called_with( - "homeassistant/status", "online", 0, False - ) - assert ("topic/test", 0) in subscribe_calls - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ - { - mqtt.CONF_BROKER: "mock-broker", - mqtt.CONF_WILL_MESSAGE: { - mqtt.ATTR_TOPIC: "death", - mqtt.ATTR_PAYLOAD: "death", - mqtt.ATTR_QOS: 0, - mqtt.ATTR_RETAIN: False, - }, - } - ], -) -async def test_custom_will_message( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test will message.""" - await mqtt_mock_entry() - - mqtt_client_mock.will_set.assert_called_with( - topic="death", payload="death", qos=0, retain=False - ) - - -async def test_default_will_message( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test will message.""" - await mqtt_mock_entry() - - mqtt_client_mock.will_set.assert_called_with( - topic="homeassistant/status", payload="offline", qos=0, retain=False - ) - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_WILL_MESSAGE: {}}], -) -async def test_no_will_message( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, -) -> None: - """Test will message.""" - await mqtt_mock_entry() - - mqtt_client_mock.will_set.assert_not_called() - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ - { - mqtt.CONF_BROKER: "mock-broker", - mqtt.CONF_BIRTH_MESSAGE: {}, - mqtt.CONF_DISCOVERY: False, - } - ], -) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_mqtt_subscribes_topics_on_connect( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - record_calls: MessageCallbackType, -) -> None: - """Test subscription to topic on connect.""" - await mqtt_mock_entry() - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - mqtt_client_mock.reset_mock() - - await mqtt.async_subscribe(hass, "topic/test", record_calls) - await mqtt.async_subscribe(hass, "home/sensor", record_calls, 2) - await mqtt.async_subscribe(hass, "still/pending", record_calls) - await mqtt.async_subscribe(hass, "still/pending", record_calls, 1) - - mqtt_client_mock.on_connect(None, None, 0, 0) - - await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) - await hass.async_block_till_done() - - assert mqtt_client_mock.disconnect.call_count == 0 - - subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) - assert len(subscribe_calls) == 3 - assert ("topic/test", 0) in subscribe_calls - assert ("home/sensor", 2) in subscribe_calls - assert ("still/pending", 1) in subscribe_calls - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ - { - mqtt.CONF_BROKER: "mock-broker", - mqtt.CONF_BIRTH_MESSAGE: {}, - mqtt.CONF_DISCOVERY: False, - } - ], -) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -async def test_mqtt_subscribes_in_single_call( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - record_calls: MessageCallbackType, -) -> None: - """Test bundled client subscription to topic.""" - mqtt_mock = await mqtt_mock_entry() - # Fake that the client is connected - mqtt_mock().connected = True - - mqtt_client_mock.subscribe.reset_mock() - await mqtt.async_subscribe(hass, "topic/test", record_calls) - await mqtt.async_subscribe(hass, "home/sensor", record_calls) - await hass.async_block_till_done() - # Make sure the debouncer finishes - await asyncio.sleep(0.2) - - assert mqtt_client_mock.subscribe.call_count == 1 - # Assert we have a single subscription call with both subscriptions - assert mqtt_client_mock.subscribe.mock_calls[0][1][0] in [ - [("topic/test", 0), ("home/sensor", 0)], - [("home/sensor", 0), ("topic/test", 0)], - ] - - -@pytest.mark.parametrize( - "mqtt_config_entry_data", - [ - { - mqtt.CONF_BROKER: "mock-broker", - mqtt.CONF_BIRTH_MESSAGE: {}, - mqtt.CONF_DISCOVERY: False, - } - ], -) -@patch("homeassistant.components.mqtt.client.MAX_SUBSCRIBES_PER_CALL", 2) -@patch("homeassistant.components.mqtt.client.MAX_UNSUBSCRIBES_PER_CALL", 2) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -async def test_mqtt_subscribes_and_unsubscribes_in_chunks( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - record_calls: MessageCallbackType, -) -> None: - """Test chunked client subscriptions.""" - mqtt_mock = await mqtt_mock_entry() - # Fake that the client is connected - mqtt_mock().connected = True - - mqtt_client_mock.subscribe.reset_mock() - unsub_tasks: list[CALLBACK_TYPE] = [] - unsub_tasks.append(await mqtt.async_subscribe(hass, "topic/test1", record_calls)) - unsub_tasks.append(await mqtt.async_subscribe(hass, "home/sensor1", record_calls)) - unsub_tasks.append(await mqtt.async_subscribe(hass, "topic/test2", record_calls)) - unsub_tasks.append(await mqtt.async_subscribe(hass, "home/sensor2", record_calls)) - await hass.async_block_till_done() - # Make sure the debouncer finishes - await asyncio.sleep(0.2) - - assert mqtt_client_mock.subscribe.call_count == 2 - # Assert we have a 2 subscription calls with both 2 subscriptions - assert len(mqtt_client_mock.subscribe.mock_calls[0][1][0]) == 2 - assert len(mqtt_client_mock.subscribe.mock_calls[1][1][0]) == 2 - - # Unsubscribe all topics - for task in unsub_tasks: - task() - await hass.async_block_till_done() - # Make sure the debouncer finishes - await asyncio.sleep(0.2) - - assert mqtt_client_mock.unsubscribe.call_count == 2 - # Assert we have a 2 unsubscribe calls with both 2 topic - assert len(mqtt_client_mock.unsubscribe.mock_calls[0][1][0]) == 2 - assert len(mqtt_client_mock.unsubscribe.mock_calls[1][1][0]) == 2 - - +@pytest.mark.usefixtures("mqtt_client_mock") async def test_default_entry_setting_are_applied( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: """Test if the MQTT component loads when config entry data not has all default settings.""" data = ( @@ -2966,11 +966,12 @@ async def test_default_entry_setting_are_applied( ) # Config entry data is incomplete but valid according the schema - entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - hass.config_entries.async_update_entry( - entry, data={"broker": "test-broker", "port": 1234} + entry = MockConfigEntry( + domain=mqtt.DOMAIN, data={"broker": "test-broker", "port": 1234} ) - await mqtt_mock_entry() + entry.add_to_hass(hass) + hass.config.components.add(mqtt.DOMAIN) + assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() # Discover a device to verify the entry was setup correctly @@ -3008,13 +1009,11 @@ async def test_message_callback_exception_gets_logged( @pytest.mark.no_fail_on_log_exception +@pytest.mark.usefixtures("mock_debouncer", "setup_with_birth_msg_client_mock") async def test_message_partial_callback_exception_gets_logged( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_debouncer: asyncio.Event ) -> None: """Test exception raised by message handler.""" - await mqtt_mock_entry() @callback def bad_handler(msg: ReceiveMessage) -> None: @@ -3029,9 +1028,11 @@ async def test_message_partial_callback_exception_gets_logged( """Partial callback handler.""" msg_callback(msg) + mock_debouncer.clear() await mqtt.async_subscribe( hass, "test-topic", partial(parial_handler, bad_handler, {"some_attr"}) ) + await mock_debouncer.wait() async_fire_mqtt_message(hass, "test-topic", "test") await hass.async_block_till_done() @@ -3707,10 +1708,12 @@ async def test_debug_info_qos_retain( } in messages +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 async def test_publish_json_from_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the publishing of call to services.""" + """Test the publishing of call to mqtt publish action.""" mqtt_mock = await mqtt_mock_entry() test_str = "{'valid': 'python', 'invalid': 'json'}" @@ -3760,11 +1763,11 @@ async def test_publish_json_from_template( async def test_subscribe_connection_status( hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - mqtt_client_mock: MqttMockPahoClient, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, ) -> None: """Test connextion status subscription.""" - mqtt_mock = await mqtt_mock_entry() + mqtt_client_mock = setup_with_birth_msg_client_mock mqtt_connected_calls_callback: list[bool] = [] mqtt_connected_calls_async: list[bool] = [] @@ -3777,7 +1780,13 @@ async def test_subscribe_connection_status( """Update state on connection/disconnection to MQTT broker.""" mqtt_connected_calls_async.append(status) - mqtt_mock.connected = True + # Check connection status + assert mqtt.is_connected(hass) is True + + # Mock disconnect status + mqtt_client_mock.on_disconnect(None, None, 0) + await hass.async_block_till_done() + assert mqtt.is_connected(hass) is False unsub_callback = mqtt.async_subscribe_connection_status( hass, async_mqtt_connected_callback @@ -3787,21 +1796,26 @@ async def test_subscribe_connection_status( ) await hass.async_block_till_done() - # Mock connection status + # Mock connect status + mock_debouncer.clear() mqtt_client_mock.on_connect(None, None, 0, 0) - await hass.async_block_till_done() + await mock_debouncer.wait() assert mqtt.is_connected(hass) is True # Mock disconnect status mqtt_client_mock.on_disconnect(None, None, 0) await hass.async_block_till_done() + assert mqtt.is_connected(hass) is False # Unsubscribe unsub_callback() unsub_async() + # Mock connect status + mock_debouncer.clear() mqtt_client_mock.on_connect(None, None, 0, 0) - await hass.async_block_till_done() + await mock_debouncer.wait() + assert mqtt.is_connected(hass) is True # Check calls assert len(mqtt_connected_calls_callback) == 2 @@ -3815,11 +1829,11 @@ async def test_subscribe_connection_status( async def test_unload_config_entry( hass: HomeAssistant, - mqtt_mock: MqttMockHAClient, - mqtt_client_mock: MqttMockPahoClient, + setup_with_birth_msg_client_mock: MqttMockPahoClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test unloading the MQTT entry.""" + mqtt_client_mock = setup_with_birth_msg_client_mock assert hass.services.has_service(mqtt.DOMAIN, "dump") assert hass.services.has_service(mqtt.DOMAIN, "publish") @@ -3835,7 +1849,7 @@ async def test_unload_config_entry( new_mqtt_config_entry = mqtt_config_entry mqtt_client_mock.publish.assert_any_call("just_in_time", "published", 0, False) assert new_mqtt_config_entry.state is ConfigEntryState.NOT_LOADED - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert not hass.services.has_service(mqtt.DOMAIN, "dump") assert not hass.services.has_service(mqtt.DOMAIN, "publish") assert "No ACK from MQTT server" not in caplog.text @@ -4021,9 +2035,7 @@ async def test_setup_manual_items_with_unique_ids( ], ) async def test_link_config_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test manual and dynamically setup entities are linked to the config entry.""" # set up manual item @@ -4049,6 +2061,7 @@ async def test_link_config_entry( mqtt_config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] mqtt_platforms = async_get_platforms(hass, mqtt.DOMAIN) + @callback def _check_entities() -> int: entities: list[Entity] = [] for mqtt_platform in mqtt_platforms: @@ -4108,9 +2121,7 @@ async def test_link_config_entry( ], ) async def test_reload_config_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test manual entities reloaded and set up correctly.""" await mqtt_mock_entry() @@ -4130,6 +2141,7 @@ async def test_reload_config_entry( entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + @callback def _check_entities() -> int: entities: list[Entity] = [] mqtt_platforms = async_get_platforms(hass, mqtt.DOMAIN) @@ -4177,9 +2189,6 @@ async def test_reload_config_entry( assert await hass.config_entries.async_reload(entry.entry_id) assert entry.state is ConfigEntryState.LOADED await hass.async_block_till_done() - # Assert the MQTT client was connected gracefully - with caplog.at_level(logging.INFO): - assert "Disconnected from MQTT server mock-broker:1883" in caplog.text assert (state := hass.states.get("sensor.test_manual1")) is not None assert state.attributes["friendly_name"] == "test_manual1_updated" @@ -4258,8 +2267,7 @@ async def test_reload_config_entry( ], ) async def test_reload_with_invalid_config( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test reloading yaml config fails.""" await mqtt_mock_entry() @@ -4299,8 +2307,7 @@ async def test_reload_with_invalid_config( ], ) async def test_reload_with_empty_config( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test reloading yaml config fails.""" await mqtt_mock_entry() @@ -4335,8 +2342,7 @@ async def test_reload_with_empty_config( ], ) async def test_reload_with_new_platform_config( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test reloading yaml with new platform config.""" await mqtt_mock_entry() @@ -4443,247 +2449,6 @@ async def test_multi_platform_discovery( ) -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_auto_reconnect( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test reconnection is automatically done.""" - mqtt_mock = await mqtt_mock_entry() - await hass.async_block_till_done() - assert mqtt_mock.connected is True - mqtt_client_mock.reconnect.reset_mock() - - mqtt_client_mock.disconnect() - mqtt_client_mock.on_disconnect(None, None, 0) - await hass.async_block_till_done() - - mqtt_client_mock.reconnect.side_effect = OSError("foo") - async_fire_time_changed( - hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) - ) - await hass.async_block_till_done() - assert len(mqtt_client_mock.reconnect.mock_calls) == 1 - assert "Error re-connecting to MQTT server due to exception: foo" in caplog.text - - mqtt_client_mock.reconnect.side_effect = None - async_fire_time_changed( - hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) - ) - await hass.async_block_till_done() - assert len(mqtt_client_mock.reconnect.mock_calls) == 2 - - hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) - - mqtt_client_mock.disconnect() - mqtt_client_mock.on_disconnect(None, None, 0) - await hass.async_block_till_done() - - async_fire_time_changed( - hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) - ) - await hass.async_block_till_done() - # Should not reconnect after stop - assert len(mqtt_client_mock.reconnect.mock_calls) == 2 - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_server_sock_connect_and_disconnect( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test handling the socket connected and disconnected.""" - mqtt_mock = await mqtt_mock_entry() - await hass.async_block_till_done() - assert mqtt_mock.connected is True - - mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS - - client, server = socket.socketpair( - family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 - ) - client.setblocking(False) - server.setblocking(False) - mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) - mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) - await hass.async_block_till_done() - - server.close() # mock the server closing the connection on us - - unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) - - mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_CONN_LOST - mqtt_client_mock.on_socket_unregister_write(mqtt_client_mock, None, client) - mqtt_client_mock.on_socket_close(mqtt_client_mock, None, client) - mqtt_client_mock.on_disconnect(mqtt_client_mock, None, client) - await hass.async_block_till_done() - unsub() - - # Should have failed - assert len(recorded_calls) == 0 - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_server_sock_buffer_size( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test handling the socket buffer size fails.""" - mqtt_mock = await mqtt_mock_entry() - await hass.async_block_till_done() - assert mqtt_mock.connected is True - - mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS - - client, server = socket.socketpair( - family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 - ) - client.setblocking(False) - server.setblocking(False) - with patch.object(client, "setsockopt", side_effect=OSError("foo")): - mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) - mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) - await hass.async_block_till_done() - assert "Unable to increase the socket buffer size" in caplog.text - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_server_sock_buffer_size_with_websocket( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test handling the socket buffer size fails.""" - mqtt_mock = await mqtt_mock_entry() - await hass.async_block_till_done() - assert mqtt_mock.connected is True - - mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS - - client, server = socket.socketpair( - family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 - ) - client.setblocking(False) - server.setblocking(False) - - class FakeWebsocket(paho_mqtt.WebsocketWrapper): - def _do_handshake(self, *args, **kwargs): - pass - - wrapped_socket = FakeWebsocket(client, "127.0.01", 1, False, "/", None) - - with patch.object(client, "setsockopt", side_effect=OSError("foo")): - mqtt_client_mock.on_socket_open(mqtt_client_mock, None, wrapped_socket) - mqtt_client_mock.on_socket_register_write( - mqtt_client_mock, None, wrapped_socket - ) - await hass.async_block_till_done() - assert "Unable to increase the socket buffer size" in caplog.text - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_client_sock_failure_after_connect( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - recorded_calls: list[ReceiveMessage], - record_calls: MessageCallbackType, -) -> None: - """Test handling the socket connected and disconnected.""" - mqtt_mock = await mqtt_mock_entry() - # Fake that the client is connected - mqtt_mock().connected = True - await hass.async_block_till_done() - assert mqtt_mock.connected is True - - mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS - - client, server = socket.socketpair( - family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 - ) - client.setblocking(False) - server.setblocking(False) - mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) - mqtt_client_mock.on_socket_register_writer(mqtt_client_mock, None, client) - await hass.async_block_till_done() - - mqtt_client_mock.loop_write.side_effect = OSError("foo") - client.close() # close the client socket out from under the client - - assert mqtt_mock.connected is True - unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) - await hass.async_block_till_done() - - unsub() - # Should have failed - assert len(recorded_calls) == 0 - - -@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) -@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) -async def test_loop_write_failure( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test handling the socket connected and disconnected.""" - mqtt_mock = await mqtt_mock_entry() - await hass.async_block_till_done() - assert mqtt_mock.connected is True - - mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS - - client, server = socket.socketpair( - family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 - ) - client.setblocking(False) - server.setblocking(False) - mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) - mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) - mqtt_client_mock.loop_write.return_value = paho_mqtt.MQTT_ERR_CONN_LOST - mqtt_client_mock.loop_read.return_value = paho_mqtt.MQTT_ERR_CONN_LOST - - # Fill up the outgoing buffer to ensure that loop_write - # and loop_read are called that next time control is - # returned to the event loop - try: - for _ in range(1000): - server.send(b"long" * 100) - except BlockingIOError: - pass - - server.close() - # Once for the reader callback - await hass.async_block_till_done() - # Another for the writer callback - await hass.async_block_till_done() - # Final for the disconnect callback - await hass.async_block_till_done() - - assert "Disconnected from MQTT server mock-broker:1883" in caplog.text - - @pytest.mark.parametrize( "attr", [ @@ -4707,6 +2472,6 @@ async def test_loop_write_failure( "valid_subscribe_topic", ], ) -async def test_mqtt_integration_level_imports(hass: HomeAssistant, attr: str) -> None: +async def test_mqtt_integration_level_imports(attr: str) -> None: """Test mqtt integration level public published imports are available.""" assert hasattr(mqtt, attr) diff --git a/tests/components/mqtt/test_lawn_mower.py b/tests/components/mqtt/test_lawn_mower.py index a258339e9cc..4906f6cfda3 100644 --- a/tests/components/mqtt/test_lawn_mower.py +++ b/tests/components/mqtt/test_lawn_mower.py @@ -91,8 +91,7 @@ DEFAULT_CONFIG = { ], ) async def test_run_lawn_mower_setup_and_state_updates( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test that it sets up correctly fetches the given payload.""" await mqtt_mock_entry() @@ -442,11 +441,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - lawn_mower.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, lawn_mower.DOMAIN, DEFAULT_CONFIG ) @@ -457,26 +452,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - lawn_mower.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, lawn_mower.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - lawn_mower.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, lawn_mower.DOMAIN, DEFAULT_CONFIG ) @@ -509,21 +494,15 @@ async def test_unique_id( async def test_discovery_removal_lawn_mower( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered lawn_mower.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][lawn_mower.DOMAIN]) - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, lawn_mower.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, lawn_mower.DOMAIN, data) async def test_discovery_update_lawn_mower( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered lawn_mower.""" config1 = { @@ -540,14 +519,12 @@ async def test_discovery_update_lawn_mower( } await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, lawn_mower.DOMAIN, config1, config2 + hass, mqtt_mock_entry, lawn_mower.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_lawn_mower( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered lawn_mower.""" data1 = '{ "name": "Beer", "activity_state_topic": "test-topic", "command_topic": "test-topic", "actions": ["milk", "beer"]}' @@ -555,27 +532,20 @@ async def test_discovery_update_unchanged_lawn_mower( "homeassistant.components.mqtt.lawn_mower.MqttLawnMower.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - lawn_mower.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, lawn_mower.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "invalid" }' data2 = '{ "name": "Milk", "activity_state_topic": "test-topic", "pause_command_topic": "test-topic"}' await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, lawn_mower.DOMAIN, data1, data2 + hass, mqtt_mock_entry, lawn_mower.DOMAIN, data1, data2 ) @@ -791,8 +761,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = lawn_mower.DOMAIN @@ -846,8 +815,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = lawn_mower.DOMAIN diff --git a/tests/components/mqtt/test_legacy_vacuum.py b/tests/components/mqtt/test_legacy_vacuum.py index e4f5e3cd481..9b45b65d2cc 100644 --- a/tests/components/mqtt/test_legacy_vacuum.py +++ b/tests/components/mqtt/test_legacy_vacuum.py @@ -23,7 +23,7 @@ DEFAULT_CONFIG = {mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}} [ ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "legacy"}}}, True), ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}}, False), - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "state"}}}, False), + ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "state"}}}, True), ], ) async def test_removed_support_yaml( @@ -39,8 +39,8 @@ async def test_removed_support_yaml( if removed: assert entity is None assert ( - "The support for the `legacy` MQTT " - "vacuum schema has been removed" in caplog.text + "The 'schema' option has been removed, " + "please remove it from your configuration" in caplog.text ) else: assert entity is not None @@ -51,7 +51,7 @@ async def test_removed_support_yaml( [ ({"name": "test", "schema": "legacy"}, True), ({"name": "test"}, False), - ({"name": "test", "schema": "state"}, False), + ({"name": "test", "schema": "state"}, True), ], ) async def test_removed_support_discovery( @@ -69,12 +69,15 @@ async def test_removed_support_discovery( await hass.async_block_till_done() entity = hass.states.get("vacuum.test") + assert entity is not None if removed: - assert entity is None assert ( - "The support for the `legacy` MQTT " - "vacuum schema has been removed" in caplog.text + "The 'schema' option has been removed, " + "please remove it from your configuration" in caplog.text ) else: - assert entity is not None + assert ( + "The 'schema' option has been removed, " + "please remove it from your configuration" not in caplog.text + ) diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index 492bc6806da..18815281f63 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -2492,11 +2492,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) @@ -2507,26 +2503,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, light.DOMAIN, DEFAULT_CONFIG ) @@ -2561,9 +2547,7 @@ async def test_unique_id( async def test_discovery_removal_light( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered light.""" data = ( @@ -2571,13 +2555,11 @@ async def test_discovery_removal_light( ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, light.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, light.DOMAIN, data) async def test_discovery_ignores_extra_keys( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test discovery ignores extra keys that are not blocked.""" await mqtt_mock_entry() @@ -2591,9 +2573,7 @@ async def test_discovery_ignores_extra_keys( async def test_discovery_update_light_topic_and_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered light.""" config1 = { @@ -2838,7 +2818,6 @@ async def test_discovery_update_light_topic_and_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, light.DOMAIN, config1, config2, @@ -2848,9 +2827,7 @@ async def test_discovery_update_light_topic_and_template( async def test_discovery_update_light_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered light.""" config1 = { @@ -3053,7 +3030,6 @@ async def test_discovery_update_light_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, light.DOMAIN, config1, config2, @@ -3063,9 +3039,7 @@ async def test_discovery_update_light_template( async def test_discovery_update_unchanged_light( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered light.""" data1 = ( @@ -3077,20 +3051,13 @@ async def test_discovery_update_unchanged_light( "homeassistant.components.mqtt.light.schema_basic.MqttLight.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, light.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' @@ -3099,9 +3066,7 @@ async def test_discovery_broken( ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, light.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, light.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -3320,8 +3285,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = light.DOMAIN @@ -3403,7 +3367,6 @@ async def test_encoding_subscribable_topics( async def test_encoding_subscribable_topics_brightness( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, topic: str, value: str, attribute: str, @@ -3615,8 +3578,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = light.DOMAIN diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 739240a352c..829222e0304 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -185,7 +185,6 @@ class JsonValidator: "hass_config", [{mqtt.DOMAIN: {light.DOMAIN: {"schema": "json", "name": "test"}}}] ) async def test_fail_setup_if_no_command_topic( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -204,7 +203,6 @@ async def test_fail_setup_if_no_command_topic( ], ) async def test_fail_setup_if_color_mode_deprecated( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -233,7 +231,6 @@ async def test_fail_setup_if_color_mode_deprecated( ids=["color_temp", "hs", "rgb", "xy", "color_temp, rgb"], ) async def test_warning_if_color_mode_flags_are_used( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, color_modes: tuple[str, ...], @@ -316,7 +313,6 @@ async def test_warning_on_discovery_if_color_mode_flags_are_used( ids=["color_temp"], ) async def test_warning_if_color_mode_option_flag_is_used( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -393,7 +389,6 @@ async def test_warning_on_discovery_if_color_mode_option_flag_is_used( ], ) async def test_fail_setup_if_color_modes_invalid( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, error: str, @@ -421,8 +416,7 @@ async def test_fail_setup_if_color_modes_invalid( ], ) async def test_single_color_mode( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setup with single color_mode.""" await mqtt_mock_entry() @@ -448,8 +442,7 @@ async def test_single_color_mode( @pytest.mark.parametrize("hass_config", [COLOR_MODES_CONFIG]) async def test_turn_on_with_unknown_color_mode_optimistic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setup and turn with unknown color_mode in optimistic mode.""" await mqtt_mock_entry() @@ -486,8 +479,7 @@ async def test_turn_on_with_unknown_color_mode_optimistic( ], ) async def test_controlling_state_with_unknown_color_mode( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setup and turn with unknown color_mode in optimistic mode.""" await mqtt_mock_entry() @@ -2374,11 +2366,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) @@ -2389,26 +2377,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, light.DOMAIN, DEFAULT_CONFIG ) @@ -2445,25 +2423,15 @@ async def test_unique_id( async def test_discovery_removal( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered mqtt_json lights.""" data = '{ "name": "test", "schema": "json", "command_topic": "test_topic" }' - await help_test_discovery_removal( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - data, - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, light.DOMAIN, data) async def test_discovery_update_light( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered light.""" config1 = { @@ -2479,19 +2447,12 @@ async def test_discovery_update_light( "command_topic": "test_topic", } await help_test_discovery_update( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - config1, - config2, + hass, mqtt_mock_entry, light.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_light( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered light.""" data1 = ( @@ -2504,20 +2465,13 @@ async def test_discovery_update_unchanged_light( "homeassistant.components.mqtt.light.schema_json.MqttLightJson.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, light.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' @@ -2527,14 +2481,7 @@ async def test_discovery_broken( ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) - await help_test_discovery_broken( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - data1, - data2, - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, light.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -2703,8 +2650,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = light.DOMAIN diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index da6195fa32e..d570454a6bf 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -978,11 +978,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) @@ -993,26 +989,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, light.DOMAIN, DEFAULT_CONFIG ) @@ -1053,9 +1039,7 @@ async def test_unique_id( async def test_discovery_removal( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered mqtt_json lights.""" data = ( @@ -1065,13 +1049,11 @@ async def test_discovery_removal( ' "command_on_template": "on",' ' "command_off_template": "off"}' ) - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, light.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, light.DOMAIN, data) async def test_discovery_update_light( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered light.""" config1 = { @@ -1091,14 +1073,12 @@ async def test_discovery_update_light( "command_off_template": "off", } await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, light.DOMAIN, config1, config2 + hass, mqtt_mock_entry, light.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_light( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered light.""" data1 = ( @@ -1113,20 +1093,13 @@ async def test_discovery_update_unchanged_light( "homeassistant.components.mqtt.light.schema_template.MqttLightTemplate.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, light.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' @@ -1138,9 +1111,7 @@ async def test_discovery_broken( ' "command_on_template": "on",' ' "command_off_template": "off"}' ) - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, light.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, light.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -1309,8 +1280,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = light.DOMAIN @@ -1364,8 +1334,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = light.DOMAIN diff --git a/tests/components/mqtt/test_lock.py b/tests/components/mqtt/test_lock.py index c9c2928f991..331f21a0a7c 100644 --- a/tests/components/mqtt/test_lock.py +++ b/tests/components/mqtt/test_lock.py @@ -757,11 +757,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, - mqtt_mock_entry, - lock.DOMAIN, - DEFAULT_CONFIG, - MQTT_LOCK_ATTRIBUTES_BLOCKED, + hass, mqtt_mock_entry, lock.DOMAIN, DEFAULT_CONFIG, MQTT_LOCK_ATTRIBUTES_BLOCKED ) @@ -781,11 +777,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - lock.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, lock.DOMAIN, DEFAULT_CONFIG ) @@ -796,22 +788,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - lock.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, lock.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, mqtt_mock_entry, caplog, lock.DOMAIN, DEFAULT_CONFIG + hass, mqtt_mock_entry, lock.DOMAIN, DEFAULT_CONFIG ) @@ -846,19 +832,15 @@ async def test_unique_id( async def test_discovery_removal_lock( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered lock.""" data = '{ "name": "test", "command_topic": "test_topic" }' - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, lock.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, lock.DOMAIN, data) async def test_discovery_update_lock( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered lock.""" config1 = { @@ -874,14 +856,12 @@ async def test_discovery_update_lock( "availability_topic": "availability_topic2", } await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, lock.DOMAIN, config1, config2 + hass, mqtt_mock_entry, lock.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_lock( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered lock.""" data1 = ( @@ -893,27 +873,18 @@ async def test_discovery_update_unchanged_lock( "homeassistant.components.mqtt.lock.MqttLock.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - lock.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, lock.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "command_topic": "test_topic" }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, lock.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, lock.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -1025,8 +996,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = lock.DOMAIN @@ -1076,8 +1046,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = lock.DOMAIN diff --git a/tests/components/mqtt/test_mixins.py b/tests/components/mqtt/test_mixins.py index e46f0b56c15..5b7984cad62 100644 --- a/tests/components/mqtt/test_mixins.py +++ b/tests/components/mqtt/test_mixins.py @@ -15,7 +15,7 @@ from homeassistant.core import CoreState, HomeAssistant, callback from homeassistant.helpers import device_registry as dr, issue_registry as ir from tests.common import MockConfigEntry, async_capture_events, async_fire_mqtt_message -from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient +from tests.typing import MqttMockHAClientGenerator @pytest.mark.parametrize( @@ -37,8 +37,7 @@ from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient ], ) async def test_availability_with_shared_state_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the state is not changed twice. @@ -295,11 +294,10 @@ async def test_availability_with_shared_state_topic( ], ) @patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@pytest.mark.usefixtures("mqtt_client_mock") async def test_default_entity_and_device_name( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - mqtt_client_mock: MqttMockPahoClient, - mqtt_config_entry_data, caplog: pytest.LogCaptureFixture, entity_id: str, friendly_name: str, @@ -335,14 +333,13 @@ async def test_default_entity_and_device_name( # Assert that no issues ware registered assert len(events) == 0 - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) # Assert that no issues ware registered assert len(events) == 0 async def test_name_attribute_is_set_or_not( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test frendly name with device_class set. diff --git a/tests/components/mqtt/test_notify.py b/tests/components/mqtt/test_notify.py index bc833b79eb0..4837ee214c4 100644 --- a/tests/components/mqtt/test_notify.py +++ b/tests/components/mqtt/test_notify.py @@ -199,11 +199,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - notify.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, notify.DOMAIN, DEFAULT_CONFIG ) @@ -214,26 +210,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - notify.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, notify.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - notify.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, notify.DOMAIN, DEFAULT_CONFIG ) @@ -266,21 +252,15 @@ async def test_unique_id( async def test_discovery_removal_notify( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered notify.""" data = '{ "name": "test", "command_topic": "test_topic" }' - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, notify.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, notify.DOMAIN, data) async def test_discovery_update_notify( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered notify.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][notify.DOMAIN]) @@ -289,19 +269,12 @@ async def test_discovery_update_notify( config2["name"] = "Milk" await help_test_discovery_update( - hass, - mqtt_mock_entry, - caplog, - notify.DOMAIN, - config1, - config2, + hass, mqtt_mock_entry, notify.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_notify( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered notify.""" data1 = ( @@ -313,27 +286,18 @@ async def test_discovery_update_unchanged_notify( "homeassistant.components.mqtt.notify.MqttNotify.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - notify.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, notify.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "command_topic": "test_topic" }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, notify.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, notify.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -463,8 +427,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = notify.DOMAIN diff --git a/tests/components/mqtt/test_number.py b/tests/components/mqtt/test_number.py index b0f9e79cb3e..44652681fc3 100644 --- a/tests/components/mqtt/test_number.py +++ b/tests/components/mqtt/test_number.py @@ -557,11 +557,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - number.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, number.DOMAIN, DEFAULT_CONFIG ) @@ -572,26 +568,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - number.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, number.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - number.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, number.DOMAIN, DEFAULT_CONFIG ) @@ -626,21 +612,15 @@ async def test_unique_id( async def test_discovery_removal_number( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered number.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][number.DOMAIN]) - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, number.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, number.DOMAIN, data) async def test_discovery_update_number( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered number.""" config1 = { @@ -655,14 +635,12 @@ async def test_discovery_update_number( } await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, number.DOMAIN, config1, config2 + hass, mqtt_mock_entry, number.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_number( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered number.""" data1 = ( @@ -672,20 +650,13 @@ async def test_discovery_update_unchanged_number( "homeassistant.components.mqtt.number.MqttNumber.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - number.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, number.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' @@ -693,9 +664,7 @@ async def test_discovery_broken( '{ "name": "Milk", "state_topic": "test-topic", "command_topic": "test-topic"}' ) - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, number.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, number.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -814,7 +783,6 @@ async def test_min_max_step_attributes( ], ) async def test_invalid_min_max_attributes( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -894,7 +862,7 @@ async def test_default_mode( async def test_mode( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - mode, + mode: str, ) -> None: """Test mode.""" await mqtt_mock_entry() @@ -1053,8 +1021,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = number.DOMAIN @@ -1105,8 +1072,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = number.DOMAIN diff --git a/tests/components/mqtt/test_scene.py b/tests/components/mqtt/test_scene.py index 3e9eacd3be2..d78dbe5c003 100644 --- a/tests/components/mqtt/test_scene.py +++ b/tests/components/mqtt/test_scene.py @@ -183,19 +183,15 @@ async def test_unique_id( async def test_discovery_removal_scene( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered scene.""" data = '{ "name": "test", "command_topic": "test_topic" }' - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, scene.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, scene.DOMAIN, data) async def test_discovery_update_payload( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered scene.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][scene.DOMAIN]) @@ -206,19 +202,12 @@ async def test_discovery_update_payload( config2["payload_on"] = "ACTIVATE" await help_test_discovery_update( - hass, - mqtt_mock_entry, - caplog, - scene.DOMAIN, - config1, - config2, + hass, mqtt_mock_entry, scene.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_scene( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered scene.""" data1 = '{ "name": "Beer", "command_topic": "test_topic" }' @@ -226,27 +215,18 @@ async def test_discovery_update_unchanged_scene( "homeassistant.components.mqtt.scene.MqttScene.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - scene.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, scene.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "command_topic": "test_topic" }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, scene.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, scene.DOMAIN, data1, data2) async def test_setting_attribute_via_mqtt_json_message( @@ -283,11 +263,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - scene.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, scene.DOMAIN, DEFAULT_CONFIG ) @@ -298,26 +274,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - scene.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, scene.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - scene.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, scene.DOMAIN, DEFAULT_CONFIG ) @@ -416,8 +382,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = scene.DOMAIN @@ -440,8 +405,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = scene.DOMAIN diff --git a/tests/components/mqtt/test_select.py b/tests/components/mqtt/test_select.py index b8c55dd2ffb..60eb4893760 100644 --- a/tests/components/mqtt/test_select.py +++ b/tests/components/mqtt/test_select.py @@ -67,9 +67,7 @@ DEFAULT_CONFIG = { } -def _test_run_select_setup_params( - topic: str, -) -> Generator[tuple[ConfigType, str], None]: +def _test_run_select_setup_params(topic: str) -> Generator[tuple[ConfigType, str]]: yield ( { mqtt.DOMAIN: { @@ -407,11 +405,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - select.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, select.DOMAIN, DEFAULT_CONFIG ) @@ -431,17 +425,11 @@ async def test_update_with_json_attrs_bad_json( async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - select.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, select.DOMAIN, DEFAULT_CONFIG ) @@ -478,21 +466,15 @@ async def test_unique_id( async def test_discovery_removal_select( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered select.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][select.DOMAIN]) - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, select.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, select.DOMAIN, data) async def test_discovery_update_select( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered select.""" config1 = { @@ -509,14 +491,12 @@ async def test_discovery_update_select( } await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, select.DOMAIN, config1, config2 + hass, mqtt_mock_entry, select.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_select( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered select.""" data1 = '{ "name": "Beer", "state_topic": "test-topic", "command_topic": "test-topic", "options": ["milk", "beer"]}' @@ -524,28 +504,19 @@ async def test_discovery_update_unchanged_select( "homeassistant.components.mqtt.select.MqttSelect.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - select.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, select.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "state_topic": "test-topic", "command_topic": "test-topic", "options": ["milk", "beer"]}' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, select.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, select.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -620,7 +591,7 @@ async def test_entity_debug_info_message( def _test_options_attributes_options_config( request: tuple[list[str]], -) -> Generator[tuple[ConfigType, list[str]], None]: +) -> Generator[tuple[ConfigType, list[str]]]: for option in request: yield ( { @@ -642,9 +613,7 @@ def _test_options_attributes_options_config( _test_options_attributes_options_config((["milk", "beer"], ["milk"], [])), ) async def test_options_attributes( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - options: list[str], + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, options: list[str] ) -> None: """Test options attribute.""" await mqtt_mock_entry() @@ -728,8 +697,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = select.DOMAIN @@ -782,8 +750,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = select.DOMAIN diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index bde85abf3fb..4b117aaa4d5 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -617,9 +617,7 @@ async def test_setting_sensor_last_reset_via_mqtt_json_message( ], ) async def test_setting_sensor_last_reset_via_mqtt_json_message_2( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the setting of the value via MQTT with JSON payload.""" await hass.async_block_till_done() @@ -810,9 +808,7 @@ async def test_discovery_update_availability( ], ) async def test_invalid_device_class( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test device_class option with invalid value.""" assert await mqtt_mock_entry() @@ -871,9 +867,7 @@ async def test_valid_device_class_and_uom( ], ) async def test_invalid_state_class( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test state_class option with invalid value.""" assert await mqtt_mock_entry() @@ -954,11 +948,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, sensor.DOMAIN, DEFAULT_CONFIG ) @@ -969,24 +959,17 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, sensor.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( hass, mqtt_mock_entry, - caplog, sensor.DOMAIN, DEFAULT_CONFIG, ) @@ -1021,21 +1004,15 @@ async def test_unique_id( async def test_discovery_removal_sensor( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered sensor.""" data = '{ "name": "test", "state_topic": "test_topic" }' - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, sensor.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, sensor.DOMAIN, data) async def test_discovery_update_sensor_topic_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered sensor.""" config = {"name": "test", "state_topic": "test_topic"} @@ -1060,7 +1037,6 @@ async def test_discovery_update_sensor_topic_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, sensor.DOMAIN, config1, config2, @@ -1070,9 +1046,7 @@ async def test_discovery_update_sensor_topic_template( async def test_discovery_update_sensor_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered sensor.""" config = {"name": "test", "state_topic": "test_topic"} @@ -1095,7 +1069,6 @@ async def test_discovery_update_sensor_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, sensor.DOMAIN, config1, config2, @@ -1105,9 +1078,7 @@ async def test_discovery_update_sensor_template( async def test_discovery_update_unchanged_sensor( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered sensor.""" data1 = '{ "name": "Beer", "state_topic": "test_topic" }' @@ -1115,27 +1086,18 @@ async def test_discovery_update_unchanged_sensor( "homeassistant.components.mqtt.sensor.MqttSensor.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - sensor.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, sensor.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer", "state_topic": "test_topic#" }' data2 = '{ "name": "Milk", "state_topic": "test_topic" }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, sensor.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, sensor.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -1322,8 +1284,7 @@ async def test_value_template_with_entity_id( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = sensor.DOMAIN @@ -1478,8 +1439,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = sensor.DOMAIN diff --git a/tests/components/mqtt/test_siren.py b/tests/components/mqtt/test_siren.py index 28b88e2793d..3f720e3ee3c 100644 --- a/tests/components/mqtt/test_siren.py +++ b/tests/components/mqtt/test_siren.py @@ -60,9 +60,7 @@ DEFAULT_CONFIG = { async def async_turn_on( - hass: HomeAssistant, - entity_id: str, - parameters: dict[str, Any], + hass: HomeAssistant, entity_id: str, parameters: dict[str, Any] ) -> None: """Turn all or specified siren on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} @@ -180,9 +178,7 @@ async def test_sending_mqtt_commands_and_optimistic( ], ) async def test_controlling_state_via_topic_and_json_message( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the controlling state via topic and JSON message.""" await mqtt_mock_entry() @@ -618,11 +614,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - siren.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, siren.DOMAIN, DEFAULT_CONFIG ) @@ -633,26 +625,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - siren.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, siren.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - siren.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, siren.DOMAIN, DEFAULT_CONFIG ) @@ -687,9 +669,7 @@ async def test_unique_id( async def test_discovery_removal_siren( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered siren.""" data = ( @@ -697,13 +677,11 @@ async def test_discovery_removal_siren( ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, siren.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, siren.DOMAIN, data) async def test_discovery_update_siren_topic_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered siren.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][siren.DOMAIN]) @@ -730,7 +708,6 @@ async def test_discovery_update_siren_topic_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, siren.DOMAIN, config1, config2, @@ -740,9 +717,7 @@ async def test_discovery_update_siren_topic_template( async def test_discovery_update_siren_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered siren.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][siren.DOMAIN]) @@ -767,7 +742,6 @@ async def test_discovery_update_siren_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, siren.DOMAIN, config1, config2, @@ -801,8 +775,7 @@ async def test_discovery_update_siren_template( ], ) async def test_command_templates( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test siren with command templates optimistic.""" mqtt_mock = await mqtt_mock_entry() @@ -867,9 +840,7 @@ async def test_command_templates( async def test_discovery_update_unchanged_siren( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered siren.""" data1 = ( @@ -884,7 +855,6 @@ async def test_discovery_update_unchanged_siren( await help_test_discovery_update_unchanged( hass, mqtt_mock_entry, - caplog, siren.DOMAIN, data1, discovery_update, @@ -893,9 +863,7 @@ async def test_discovery_update_unchanged_siren( @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' @@ -904,9 +872,7 @@ async def test_discovery_broken( ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, siren.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, siren.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -1026,8 +992,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = siren.DOMAIN @@ -1037,9 +1002,7 @@ async def test_reloadable( @pytest.mark.parametrize( ("topic", "value", "attribute", "attribute_value"), - [ - ("state_topic", "ON", None, "on"), - ], + [("state_topic", "ON", None, "on")], ) async def test_encoding_subscribable_topics( hass: HomeAssistant, @@ -1077,8 +1040,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = siren.DOMAIN diff --git a/tests/components/mqtt/test_subscription.py b/tests/components/mqtt/test_subscription.py index 7247458a667..86279b2006c 100644 --- a/tests/components/mqtt/test_subscription.py +++ b/tests/components/mqtt/test_subscription.py @@ -2,8 +2,6 @@ from unittest.mock import ANY -import pytest - from homeassistant.components.mqtt.subscription import ( async_prepare_subscribe_topics, async_subscribe_topics, @@ -16,9 +14,7 @@ from tests.typing import MqttMockHAClientGenerator async def test_subscribe_topics( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test subscription to topics.""" await mqtt_mock_entry() @@ -69,9 +65,7 @@ async def test_subscribe_topics( async def test_modify_topics( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test modification of topics.""" await mqtt_mock_entry() @@ -136,9 +130,7 @@ async def test_modify_topics( async def test_qos_encoding_default( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test default qos and encoding.""" mqtt_mock = await mqtt_mock_entry() @@ -158,9 +150,7 @@ async def test_qos_encoding_default( async def test_qos_encoding_custom( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test custom qos and encoding.""" mqtt_mock = await mqtt_mock_entry() @@ -187,9 +177,7 @@ async def test_qos_encoding_custom( async def test_no_change( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test subscription to topics without change.""" mqtt_mock = await mqtt_mock_entry() diff --git a/tests/components/mqtt/test_switch.py b/tests/components/mqtt/test_switch.py index b497d4a2f52..fddbfd8fbe2 100644 --- a/tests/components/mqtt/test_switch.py +++ b/tests/components/mqtt/test_switch.py @@ -191,6 +191,50 @@ async def test_sending_inital_state_and_optimistic( assert state.attributes.get(ATTR_ASSUMED_STATE) +@pytest.mark.parametrize( + "hass_config", + [ + { + mqtt.DOMAIN: { + switch.DOMAIN: { + "name": "test", + "command_topic": "command-topic", + "command_template": '{"state": "{{ value }}"}', + "payload_on": "beer on", + "payload_off": "beer off", + "qos": "2", + } + } + } + ], +) +async def test_sending_mqtt_commands_with_command_template( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator +) -> None: + """Test the sending MQTT commands using a command template.""" + fake_state = State("switch.test", "on") + mock_restore_cache(hass, (fake_state,)) + + mqtt_mock = await mqtt_mock_entry() + + state = hass.states.get("switch.test") + assert state.state == STATE_ON + assert state.attributes.get(ATTR_ASSUMED_STATE) + + await common.async_turn_on(hass, "switch.test") + + mqtt_mock.async_publish.assert_called_once_with( + "command-topic", '{"state": "beer on"}', 2, False + ) + mqtt_mock.async_publish.reset_mock() + + await common.async_turn_off(hass, "switch.test") + + mqtt_mock.async_publish.assert_called_once_with( + "command-topic", '{"state": "beer off"}', 2, False + ) + + @pytest.mark.parametrize( "hass_config", [ @@ -379,11 +423,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - switch.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, switch.DOMAIN, DEFAULT_CONFIG ) @@ -394,26 +434,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - switch.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, switch.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - switch.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, switch.DOMAIN, DEFAULT_CONFIG ) @@ -448,9 +478,7 @@ async def test_unique_id( async def test_discovery_removal_switch( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered switch.""" data = ( @@ -458,15 +486,11 @@ async def test_discovery_removal_switch( ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, switch.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, switch.DOMAIN, data) async def test_discovery_update_switch_topic_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][switch.DOMAIN]) @@ -493,7 +517,6 @@ async def test_discovery_update_switch_topic_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, switch.DOMAIN, config1, config2, @@ -503,9 +526,7 @@ async def test_discovery_update_switch_topic_template( async def test_discovery_update_switch_template( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered switch.""" config1 = copy.deepcopy(DEFAULT_CONFIG[mqtt.DOMAIN][switch.DOMAIN]) @@ -530,7 +551,6 @@ async def test_discovery_update_switch_template( await help_test_discovery_update( hass, mqtt_mock_entry, - caplog, switch.DOMAIN, config1, config2, @@ -540,9 +560,7 @@ async def test_discovery_update_switch_template( async def test_discovery_update_unchanged_switch( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered switch.""" data1 = ( @@ -557,7 +575,6 @@ async def test_discovery_update_unchanged_switch( await help_test_discovery_update_unchanged( hass, mqtt_mock_entry, - caplog, switch.DOMAIN, data1, discovery_update, @@ -566,9 +583,7 @@ async def test_discovery_update_unchanged_switch( @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' @@ -577,9 +592,7 @@ async def test_discovery_broken( ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, switch.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, switch.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -697,8 +710,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = switch.DOMAIN @@ -748,8 +760,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = switch.DOMAIN diff --git a/tests/components/mqtt/test_tag.py b/tests/components/mqtt/test_tag.py index e70c06c2c4a..adebd157588 100644 --- a/tests/components/mqtt/test_tag.py +++ b/tests/components/mqtt/test_tag.py @@ -1,11 +1,11 @@ """The tests for MQTT tag scanner.""" +from collections.abc import Generator import copy import json from unittest.mock import ANY, AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.mqtt.const import DOMAIN as MQTT_DOMAIN @@ -20,7 +20,7 @@ from tests.common import ( async_fire_mqtt_message, async_get_device_automations, ) -from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, WebSocketGenerator +from tests.typing import MqttMockHAClientGenerator, WebSocketGenerator DEFAULT_CONFIG_DEVICE = { "device": {"identifiers": ["0AFFD2"]}, @@ -102,9 +102,7 @@ async def test_if_fires_on_mqtt_message_with_device( async def test_if_fires_on_mqtt_message_without_device( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock ) -> None: """Test tag scanning, without device.""" await mqtt_mock_entry() @@ -140,9 +138,7 @@ async def test_if_fires_on_mqtt_message_with_template( async def test_strip_tag_id( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock ) -> None: """Test strip whitespace from tag_id.""" await mqtt_mock_entry() @@ -208,9 +204,7 @@ async def test_if_fires_on_mqtt_message_after_update_with_device( async def test_if_fires_on_mqtt_message_after_update_without_device( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock ) -> None: """Test tag scanning after update.""" await mqtt_mock_entry() @@ -359,9 +353,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_with_device( async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_without_device( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock ) -> None: """Test tag scanning not firing after removal.""" await mqtt_mock_entry() @@ -587,7 +579,7 @@ async def test_cleanup_tag( identifiers={("mqtt", "helloworld")} ) assert device_entry1 is not None - assert device_entry1.config_entries == [config_entry.entry_id, mqtt_entry.entry_id] + assert device_entry1.config_entries == {config_entry.entry_id, mqtt_entry.entry_id} device_entry2 = device_registry.async_get_device(identifiers={("mqtt", "hejhopp")}) assert device_entry2 is not None @@ -599,7 +591,7 @@ async def test_cleanup_tag( identifiers={("mqtt", "helloworld")} ) assert device_entry1 is not None - assert device_entry1.config_entries == [mqtt_entry.entry_id] + assert device_entry1.config_entries == {mqtt_entry.entry_id} device_entry2 = device_registry.async_get_device(identifiers={("mqtt", "hejhopp")}) assert device_entry2 is not None mqtt_mock.async_publish.assert_not_called() @@ -904,11 +896,9 @@ async def test_update_with_bad_config_not_breaks_discovery( tag_mock.assert_called_once_with(ANY, "12345", ANY) +@pytest.mark.usefixtures("mqtt_mock") async def test_unload_entry( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - tag_mock: AsyncMock, + hass: HomeAssistant, device_registry: dr.DeviceRegistry, tag_mock: AsyncMock ) -> None: """Test unloading the MQTT entry.""" @@ -934,12 +924,9 @@ async def test_unload_entry( tag_mock.assert_not_called() +@pytest.mark.usefixtures("mqtt_mock", "tag_mock") async def test_value_template_fails( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - tag_mock: AsyncMock, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test the rendering of MQTT value template fails.""" config = copy.deepcopy(DEFAULT_CONFIG_DEVICE) diff --git a/tests/components/mqtt/test_text.py b/tests/components/mqtt/test_text.py index 2c58cae690d..ebcb835844d 100644 --- a/tests/components/mqtt/test_text.py +++ b/tests/components/mqtt/test_text.py @@ -251,9 +251,7 @@ async def test_controlling_validation_state_via_topic( ], ) async def test_attribute_validation_max_greater_then_min( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test the validation of min and max configuration attributes.""" assert await mqtt_mock_entry() @@ -276,9 +274,7 @@ async def test_attribute_validation_max_greater_then_min( ], ) async def test_attribute_validation_max_not_greater_then_max_state_length( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test the max value of of max configuration attribute.""" assert await mqtt_mock_entry() @@ -436,13 +432,7 @@ async def test_default_availability_payload( } } await help_test_default_availability_payload( - hass, - mqtt_mock_entry, - text.DOMAIN, - config, - True, - "state-topic", - "some state", + hass, mqtt_mock_entry, text.DOMAIN, config, True, "state-topic", "some state" ) @@ -461,13 +451,7 @@ async def test_custom_availability_payload( } await help_test_custom_availability_payload( - hass, - mqtt_mock_entry, - text.DOMAIN, - config, - True, - "state-topic", - "1", + hass, mqtt_mock_entry, text.DOMAIN, config, True, "state-topic", "1" ) @@ -505,11 +489,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - text.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, text.DOMAIN, DEFAULT_CONFIG ) @@ -520,26 +500,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - text.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, text.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - text.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, text.DOMAIN, DEFAULT_CONFIG ) @@ -574,9 +544,7 @@ async def test_unique_id( async def test_discovery_removal_text( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered text entity.""" data = ( @@ -584,13 +552,11 @@ async def test_discovery_removal_text( ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, text.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, text.DOMAIN, data) async def test_discovery_text_update( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered text entity.""" config1 = { @@ -605,14 +571,12 @@ async def test_discovery_text_update( } await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, text.DOMAIN, config1, config2 + hass, mqtt_mock_entry, text.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_update( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered update.""" data1 = '{ "name": "Beer", "state_topic": "text-topic", "command_topic": "command-topic"}' @@ -620,32 +584,23 @@ async def test_discovery_update_unchanged_update( "homeassistant.components.mqtt.text.MqttTextEntity.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - text.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, text.DOMAIN, data1, discovery_update ) async def test_discovery_update_text( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered text entity.""" config1 = {"name": "Beer", "command_topic": "cmd-topic1"} config2 = {"name": "Milk", "command_topic": "cmd-topic2"} await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, text.DOMAIN, config1, config2 + hass, mqtt_mock_entry, text.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_climate( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered text entity.""" data1 = '{ "name": "Beer", "command_topic": "cmd-topic" }' @@ -653,20 +608,13 @@ async def test_discovery_update_unchanged_climate( "homeassistant.components.mqtt.text.MqttTextEntity.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - text.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, text.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' @@ -675,9 +623,7 @@ async def test_discovery_broken( ' "state_topic": "test_topic",' ' "command_topic": "test_topic" }' ) - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, text.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, text.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -784,8 +730,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = text.DOMAIN @@ -835,8 +780,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = text.DOMAIN diff --git a/tests/components/mqtt/test_trigger.py b/tests/components/mqtt/test_trigger.py index 2e0506a02ab..5bf36849b13 100644 --- a/tests/components/mqtt/test_trigger.py +++ b/tests/components/mqtt/test_trigger.py @@ -9,7 +9,7 @@ from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_O from homeassistant.core import HassJobType, HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_fire_mqtt_message, async_mock_service, mock_component +from tests.common import async_fire_mqtt_message, mock_component from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator @@ -18,12 +18,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) async def setup_comp( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator @@ -34,7 +28,7 @@ async def setup_comp( async def test_if_fires_on_topic_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on topic match.""" assert await async_setup_component( @@ -57,9 +51,10 @@ async def test_if_fires_on_topic_match( async_fire_mqtt_message(hass, "test-topic", '{ "hello": "world" }') await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == 'mqtt - test-topic - { "hello": "world" } - world - 0' + service_calls[0].data["some"] + == 'mqtt - test-topic - { "hello": "world" } - world - 0' ) await hass.services.async_call( @@ -68,13 +63,15 @@ async def test_if_fires_on_topic_match( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 + async_fire_mqtt_message(hass, "test-topic", "test_payload") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_topic_and_payload_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on topic and payload match.""" assert await async_setup_component( @@ -94,11 +91,11 @@ async def test_if_fires_on_topic_and_payload_match( async_fire_mqtt_message(hass, "test-topic", "hello") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_topic_and_payload_match2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on topic and payload match. @@ -121,11 +118,11 @@ async def test_if_fires_on_topic_and_payload_match2( async_fire_mqtt_message(hass, "test-topic", "0") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_templated_topic_and_payload_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on templated topic and payload match.""" assert await async_setup_component( @@ -145,19 +142,19 @@ async def test_if_fires_on_templated_topic_and_payload_match( async_fire_mqtt_message(hass, "test-topic-", "foo") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, "test-topic-4", "foo") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, "test-topic-4", "bar") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_payload_template( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on templated topic and payload match.""" assert await async_setup_component( @@ -178,19 +175,21 @@ async def test_if_fires_on_payload_template( async_fire_mqtt_message(hass, "test-topic", "hello") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, "test-topic", '{"unwanted_key":"hello"}') await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, "test-topic", '{"wanted_key":"hello"}') await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_non_allowed_templates( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + service_calls: list[ServiceCall], + caplog: pytest.LogCaptureFixture, ) -> None: """Test non allowed function in template.""" assert await async_setup_component( @@ -214,7 +213,7 @@ async def test_non_allowed_templates( async def test_if_not_fires_on_topic_but_no_payload_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is not fired on topic but no payload.""" assert await async_setup_component( @@ -234,11 +233,11 @@ async def test_if_not_fires_on_topic_but_no_payload_match( async_fire_mqtt_message(hass, "test-topic", "no-hello") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_encoding_default( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp + hass: HomeAssistant, service_calls: list[ServiceCall], setup_comp ) -> None: """Test default encoding.""" assert await async_setup_component( @@ -258,7 +257,7 @@ async def test_encoding_default( async def test_encoding_custom( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp + hass: HomeAssistant, service_calls: list[ServiceCall], setup_comp ) -> None: """Test default encoding.""" assert await async_setup_component( diff --git a/tests/components/mqtt/test_update.py b/tests/components/mqtt/test_update.py index bb80a0c274f..937b8cdebd0 100644 --- a/tests/components/mqtt/test_update.py +++ b/tests/components/mqtt/test_update.py @@ -504,11 +504,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - update.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, update.DOMAIN, DEFAULT_CONFIG ) @@ -519,26 +515,17 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - update.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, update.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - update.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, update.DOMAIN, DEFAULT_CONFIG ) @@ -573,21 +560,15 @@ async def test_unique_id( async def test_discovery_removal_update( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered update.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][update.DOMAIN]) - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, update.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, update.DOMAIN, data) async def test_discovery_update_update( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered update.""" config1 = { @@ -602,14 +583,12 @@ async def test_discovery_update_update( } await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, update.DOMAIN, config1, config2 + hass, mqtt_mock_entry, update.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_update( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered update.""" data1 = '{ "name": "Beer", "state_topic": "installed-topic", "latest_version_topic": "latest-topic"}' @@ -617,28 +596,19 @@ async def test_discovery_update_unchanged_update( "homeassistant.components.mqtt.update.MqttUpdate.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - update.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, update.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk", "state_topic": "installed-topic", "latest_version_topic": "latest-topic" }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, update.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, update.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -701,8 +671,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = update.DOMAIN @@ -713,8 +682,7 @@ async def test_unload_entry( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = update.DOMAIN diff --git a/tests/components/mqtt/test_util.py b/tests/components/mqtt/test_util.py index 290f561e1ad..a3802de69da 100644 --- a/tests/components/mqtt/test_util.py +++ b/tests/components/mqtt/test_util.py @@ -1,22 +1,107 @@ """Test MQTT utils.""" +import asyncio from collections.abc import Callable +from datetime import timedelta from pathlib import Path from random import getrandbits import shutil import tempfile -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest from homeassistant.components import mqtt +from homeassistant.components.mqtt.models import MessageCallbackType +from homeassistant.components.mqtt.util import EnsureJobAfterCooldown from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState +from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import CoreState, HomeAssistant +from homeassistant.util.dt import utcnow -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed from tests.typing import MqttMockHAClient, MqttMockPahoClient +async def test_canceling_debouncer_on_shutdown( + hass: HomeAssistant, + record_calls: MessageCallbackType, + mock_debouncer: asyncio.Event, + setup_with_birth_msg_client_mock: MqttMockPahoClient, +) -> None: + """Test canceling the debouncer when HA shuts down.""" + mqtt_client_mock = setup_with_birth_msg_client_mock + # Mock we are past initial setup + await mock_debouncer.wait() + with patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 2): + mock_debouncer.clear() + await mqtt.async_subscribe(hass, "test/state1", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=0.1)) + # Stop HA so the scheduled debouncer task will be canceled + mqtt_client_mock.subscribe.reset_mock() + hass.bus.fire(EVENT_HOMEASSISTANT_STOP) + await mqtt.async_subscribe(hass, "test/state2", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=0.1)) + await mqtt.async_subscribe(hass, "test/state3", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=0.1)) + await mqtt.async_subscribe(hass, "test/state4", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=0.1)) + await mqtt.async_subscribe(hass, "test/state5", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) + await hass.async_block_till_done(wait_background_tasks=True) + # Assert the debouncer subscribe job was not executed + assert not mock_debouncer.is_set() + mqtt_client_mock.subscribe.assert_not_called() + + # Note thet the broker connection will not be disconnected gracefully + await hass.async_block_till_done() + async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) + await asyncio.sleep(0) + await hass.async_block_till_done(wait_background_tasks=True) + mqtt_client_mock.subscribe.assert_not_called() + mqtt_client_mock.disconnect.assert_not_called() + + +async def test_canceling_debouncer_normal( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test canceling the debouncer before completion.""" + + async def _async_myjob() -> None: + await asyncio.sleep(1.0) + + debouncer = EnsureJobAfterCooldown(0.0, _async_myjob) + debouncer.async_schedule() + await asyncio.sleep(0.01) + assert debouncer._task is not None + await debouncer.async_cleanup() + assert debouncer._task is None + + +async def test_canceling_debouncer_throws( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test canceling the debouncer when HA shuts down.""" + + async def _async_myjob() -> None: + await asyncio.sleep(1.0) + + debouncer = EnsureJobAfterCooldown(0.0, _async_myjob) + debouncer.async_schedule() + await asyncio.sleep(0.01) + assert debouncer._task is not None + # let debouncer._task fail by mocking it + with patch.object(debouncer, "_task") as task: + task.cancel = MagicMock(return_value=True) + await debouncer.async_cleanup() + assert "Error cleaning up task" in caplog.text + await hass.async_block_till_done() + async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) + await hass.async_block_till_done() + + async def help_create_test_certificate_file( hass: HomeAssistant, mock_temp_dir: str, diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index 0a06759c7e6..7fc4ff981fd 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -119,16 +119,13 @@ async def test_warning_schema_option( await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("vacuum.test") + # We do not fail if the schema option is still in the payload, but we log an error assert state is not None with caplog.at_level(logging.WARNING): assert ( - "The `schema` option is deprecated for MQTT vacuum, but it was used in a " - "discovery payload. Please contact the maintainer of the integration or " - "service that supplies the config, and suggest to remove the option." - in caplog.text + "The 'schema' option has been removed, " + "please remove it from your configuration" in caplog.text ) - assert "https://example.com/support" in caplog.text - assert "at discovery topic homeassistant/vacuum/bla/config" in caplog.text @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) @@ -507,11 +504,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - vacuum.DOMAIN, - DEFAULT_CONFIG_2, + hass, mqtt_mock_entry, caplog, vacuum.DOMAIN, DEFAULT_CONFIG_2 ) @@ -522,26 +515,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - vacuum.DOMAIN, - DEFAULT_CONFIG_2, + hass, mqtt_mock_entry, caplog, vacuum.DOMAIN, DEFAULT_CONFIG_2 ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - vacuum.DOMAIN, - DEFAULT_CONFIG_2, + hass, mqtt_mock_entry, vacuum.DOMAIN, DEFAULT_CONFIG_2 ) @@ -574,34 +557,27 @@ async def test_unique_id( async def test_discovery_removal_vacuum( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered vacuum.""" data = '{"name": "test", "command_topic": "test_topic"}' - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, vacuum.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, vacuum.DOMAIN, data) async def test_discovery_update_vacuum( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered vacuum.""" config1 = {"name": "Beer", "command_topic": "test_topic"} config2 = {"name": "Milk", "command_topic": "test_topic"} await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, vacuum.DOMAIN, config1, config2 + hass, mqtt_mock_entry, vacuum.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_vacuum( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, ) -> None: """Test update of discovered vacuum.""" data1 = '{"name": "Beer", "command_topic": "test_topic"}' @@ -609,27 +585,18 @@ async def test_discovery_update_unchanged_vacuum( "homeassistant.components.mqtt.vacuum.MqttStateVacuum.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - vacuum.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, vacuum.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{"name": "Beer", "command_topic": "test_topic#"}' data2 = '{"name": "Milk", "command_topic": "test_topic"}' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, vacuum.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, vacuum.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -704,20 +671,8 @@ async def test_entity_debug_info_message( @pytest.mark.parametrize( ("service", "topic", "parameters", "payload", "template"), [ - ( - vacuum.SERVICE_START, - "command_topic", - None, - "start", - None, - ), - ( - vacuum.SERVICE_CLEAN_SPOT, - "command_topic", - None, - "clean_spot", - None, - ), + (vacuum.SERVICE_START, "command_topic", None, "start", None), + (vacuum.SERVICE_CLEAN_SPOT, "command_topic", None, "clean_spot", None), ( vacuum.SERVICE_SET_FAN_SPEED, "set_fan_speed_topic", @@ -732,13 +687,7 @@ async def test_entity_debug_info_message( "custom command", None, ), - ( - vacuum.SERVICE_STOP, - "command_topic", - None, - "stop", - None, - ), + (vacuum.SERVICE_STOP, "command_topic", None, "stop", None), ], ) async def test_publishing_with_custom_encoding( @@ -782,8 +731,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = vacuum.DOMAIN diff --git a/tests/components/mqtt/test_valve.py b/tests/components/mqtt/test_valve.py index 2efa30d096a..53a7190eaf3 100644 --- a/tests/components/mqtt/test_valve.py +++ b/tests/components/mqtt/test_valve.py @@ -306,8 +306,7 @@ async def test_state_via_state_topic_through_position( ], ) async def test_opening_closing_state_is_reset( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the controlling state via topic through position. @@ -734,11 +733,7 @@ async def test_controlling_valve_by_position( ) @pytest.mark.parametrize( ("position", "asserted_message"), - [ - (0, "0"), - (30, "30"), - (100, "100"), - ], + [(0, "0"), (30, "30"), (100, "100")], ) async def test_controlling_valve_by_set_valve_position( hass: HomeAssistant, @@ -842,12 +837,7 @@ async def test_controlling_valve_optimistic_by_set_valve_position( ) @pytest.mark.parametrize( ("position", "asserted_message"), - [ - (0, "-128"), - (30, "-52"), - (80, "76"), - (100, "127"), - ], + [(0, "-128"), (30, "-52"), (80, "76"), (100, "127")], ) async def test_controlling_valve_with_alt_range_by_set_valve_position( hass: HomeAssistant, @@ -1127,9 +1117,7 @@ async def test_valid_device_class( ], ) async def test_invalid_device_class( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test the setting of an invalid device class.""" assert await mqtt_mock_entry() @@ -1174,11 +1162,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - valve.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, valve.DOMAIN, DEFAULT_CONFIG ) @@ -1189,26 +1173,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - valve.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, valve.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - valve.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, valve.DOMAIN, DEFAULT_CONFIG ) @@ -1241,32 +1215,26 @@ async def test_unique_id( async def test_discovery_removal_valve( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered valve.""" data = '{ "name": "test", "command_topic": "test_topic" }' - await help_test_discovery_removal(hass, mqtt_mock_entry, caplog, valve.DOMAIN, data) + await help_test_discovery_removal(hass, mqtt_mock_entry, valve.DOMAIN, data) async def test_discovery_update_valve( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered valve.""" config1 = {"name": "Beer", "command_topic": "test_topic"} config2 = {"name": "Milk", "command_topic": "test_topic"} await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, valve.DOMAIN, config1, config2 + hass, mqtt_mock_entry, valve.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_valve( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered valve.""" data1 = '{ "name": "Beer", "command_topic": "test_topic" }' @@ -1274,27 +1242,18 @@ async def test_discovery_update_unchanged_valve( "homeassistant.components.mqtt.valve.MqttValve.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - valve.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, valve.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer", "command_topic": "test_topic#" }' data2 = '{ "name": "Milk", "command_topic": "test_topic" }' - await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, valve.DOMAIN, data1, data2 - ) + await help_test_discovery_broken(hass, mqtt_mock_entry, valve.DOMAIN, data1, data2) async def test_entity_device_info_with_connection( @@ -1406,8 +1365,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = valve.DOMAIN @@ -1459,8 +1417,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = valve.DOMAIN diff --git a/tests/components/mqtt/test_water_heater.py b/tests/components/mqtt/test_water_heater.py index a80ab59657f..7bab4a5e233 100644 --- a/tests/components/mqtt/test_water_heater.py +++ b/tests/components/mqtt/test_water_heater.py @@ -141,7 +141,7 @@ async def test_get_operation_modes( await mqtt_mock_entry() state = hass.states.get(ENTITY_WATER_HEATER) - assert [ + assert state.attributes.get("operation_list") == [ STATE_ECO, STATE_ELECTRIC, STATE_GAS, @@ -149,14 +149,12 @@ async def test_get_operation_modes( STATE_HIGH_DEMAND, STATE_PERFORMANCE, STATE_OFF, - ] == state.attributes.get("operation_list") + ] @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_set_operation_mode_bad_attr_and_state( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting operation mode without required attribute.""" await mqtt_mock_entry() @@ -615,8 +613,7 @@ async def test_get_with_templates( ], ) async def test_set_and_templates( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting various attributes with templates.""" mqtt_mock = await mqtt_mock_entry() @@ -834,11 +831,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - water_heater.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, water_heater.DOMAIN, DEFAULT_CONFIG ) @@ -849,26 +842,16 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - water_heater.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, water_heater.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( - hass, - mqtt_mock_entry, - caplog, - water_heater.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, water_heater.DOMAIN, DEFAULT_CONFIG ) @@ -933,34 +916,26 @@ async def test_encoding_subscribable_topics( async def test_discovery_removal_water_heater( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test removal of discovered water heater.""" data = json.dumps(DEFAULT_CONFIG[mqtt.DOMAIN][water_heater.DOMAIN]) - await help_test_discovery_removal( - hass, mqtt_mock_entry, caplog, water_heater.DOMAIN, data - ) + await help_test_discovery_removal(hass, mqtt_mock_entry, water_heater.DOMAIN, data) async def test_discovery_update_water_heater( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered water heater.""" config1 = {"name": "Beer"} config2 = {"name": "Milk"} await help_test_discovery_update( - hass, mqtt_mock_entry, caplog, water_heater.DOMAIN, config1, config2 + hass, mqtt_mock_entry, water_heater.DOMAIN, config1, config2 ) async def test_discovery_update_unchanged_water_heater( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered water heater.""" data1 = '{ "name": "Beer" }' @@ -968,26 +943,19 @@ async def test_discovery_update_unchanged_water_heater( "homeassistant.components.mqtt.water_heater.MqttWaterHeater.discovery_update" ) as discovery_update: await help_test_discovery_update_unchanged( - hass, - mqtt_mock_entry, - caplog, - water_heater.DOMAIN, - data1, - discovery_update, + hass, mqtt_mock_entry, water_heater.DOMAIN, data1, discovery_update ) @pytest.mark.no_fail_on_log_exception async def test_discovery_broken( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test handling of bad discovery message.""" data1 = '{ "name": "Beer", "mode_command_topic": "test_topic#" }' data2 = '{ "name": "Milk", "mode_command_topic": "test_topic" }' await help_test_discovery_broken( - hass, mqtt_mock_entry, caplog, water_heater.DOMAIN, data1, data2 + hass, mqtt_mock_entry, water_heater.DOMAIN, data1, data2 ) @@ -1041,11 +1009,7 @@ async def test_entity_id_update_subscriptions( } } await help_test_entity_id_update_subscriptions( - hass, - mqtt_mock_entry, - water_heater.DOMAIN, - config, - ["test-topic", "avty-topic"], + hass, mqtt_mock_entry, water_heater.DOMAIN, config, ["test-topic", "avty-topic"] ) @@ -1221,8 +1185,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = water_heater.DOMAIN diff --git a/tests/components/mqtt_json/test_device_tracker.py b/tests/components/mqtt_json/test_device_tracker.py index a992c985057..36073c11a5d 100644 --- a/tests/components/mqtt_json/test_device_tracker.py +++ b/tests/components/mqtt_json/test_device_tracker.py @@ -1,12 +1,12 @@ """The tests for the JSON MQTT device tracker platform.""" +from collections.abc import AsyncGenerator import json import logging import os from unittest.mock import patch import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.device_tracker.legacy import ( DOMAIN as DT_DOMAIN, diff --git a/tests/components/mysensors/conftest.py b/tests/components/mysensors/conftest.py index f1b86c9ce5b..b6fce35a4c7 100644 --- a/tests/components/mysensors/conftest.py +++ b/tests/components/mysensors/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import AsyncGenerator, Callable, Generator from copy import deepcopy import json from typing import Any @@ -12,7 +12,6 @@ from mysensors import BaseSyncGateway from mysensors.persistence import MySensorsJSONDecoder from mysensors.sensor import Sensor import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN from homeassistant.components.mysensors.config_flow import DEFAULT_BAUD_RATE diff --git a/tests/components/mystrom/__init__.py b/tests/components/mystrom/__init__.py index ac6ac1d8c54..8ee62996f92 100644 --- a/tests/components/mystrom/__init__.py +++ b/tests/components/mystrom/__init__.py @@ -173,3 +173,10 @@ class MyStromSwitchMock(MyStromDeviceMock): if not self._requested_state: return None return self._state["temperature"] + + @property + def uri(self) -> str | None: + """Return the URI.""" + if not self._requested_state: + return None + return f"http://{self._state["ip"]}" diff --git a/tests/components/mystrom/conftest.py b/tests/components/mystrom/conftest.py index f5405055805..af8d80ed27e 100644 --- a/tests/components/mystrom/conftest.py +++ b/tests/components/mystrom/conftest.py @@ -1,9 +1,9 @@ """Provide common mystrom fixtures and mocks.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.mystrom.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/myuplink/conftest.py b/tests/components/myuplink/conftest.py index dd05bedcaf4..9ede11146ef 100644 --- a/tests/components/myuplink/conftest.py +++ b/tests/components/myuplink/conftest.py @@ -1,5 +1,6 @@ """Test helpers for myuplink.""" +from collections.abc import AsyncGenerator, Generator import time from typing import Any from unittest.mock import MagicMock, patch @@ -7,7 +8,6 @@ from unittest.mock import MagicMock, patch from myuplink import Device, DevicePoint, System import orjson import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/myuplink/fixtures/device_points_nibe_f730.json b/tests/components/myuplink/fixtures/device_points_nibe_f730.json index 49340bd9e26..9ec5db0ea3b 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_f730.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_f730.json @@ -951,5 +951,43 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "NIBEF F730 CU 3x400V", + "parameterId": "47041", + "parameterName": "comfort mode", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-05-22T15:02:03+00:00", + "value": 0, + "strVal": "economy", + "smartHomeCategories": [], + "minValue": null, + "maxValue": null, + "stepValue": 1, + "enumValues": [ + { + "value": "4", + "text": "smart control", + "icon": "" + }, + { + "value": "0", + "text": "economy", + "icon": "" + }, + { + "value": "1", + "text": "normal", + "icon": "" + }, + { + "value": "2", + "text": "luxury", + "icon": "" + } + ], + "scaleValue": "1", + "zoneId": null } ] diff --git a/tests/components/myuplink/fixtures/device_points_nibe_smo20.json b/tests/components/myuplink/fixtures/device_points_nibe_smo20.json index b64869c236c..9135862d991 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_smo20.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_smo20.json @@ -3996,9 +3996,9 @@ "parameterUnit": "", "writable": true, "timestamp": "2024-02-14T08:36:05+00:00", - "value": 0, + "value": 0.0, "strVal": "economy", - "smartHomeCategories": [], + "smartHomeCategories": ["test"], "minValue": null, "maxValue": null, "stepValue": 1, diff --git a/tests/components/myuplink/snapshots/test_diagnostics.ambr b/tests/components/myuplink/snapshots/test_diagnostics.ambr index 53664820364..9160fd3b365 100644 --- a/tests/components/myuplink/snapshots/test_diagnostics.ambr +++ b/tests/components/myuplink/snapshots/test_diagnostics.ambr @@ -1012,6 +1012,44 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "NIBEF F730 CU 3x400V", + "parameterId": "47041", + "parameterName": "comfort mode", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-05-22T15:02:03+00:00", + "value": 0, + "strVal": "economy", + "smartHomeCategories": [], + "minValue": null, + "maxValue": null, + "stepValue": 1, + "enumValues": [ + { + "value": "4", + "text": "smart control", + "icon": "" + }, + { + "value": "0", + "text": "economy", + "icon": "" + }, + { + "value": "1", + "text": "normal", + "icon": "" + }, + { + "value": "2", + "text": "luxury", + "icon": "" + } + ], + "scaleValue": "1", + "zoneId": null } ] @@ -2017,6 +2055,44 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "NIBEF F730 CU 3x400V", + "parameterId": "47041", + "parameterName": "comfort mode", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-05-22T15:02:03+00:00", + "value": 0, + "strVal": "economy", + "smartHomeCategories": [], + "minValue": null, + "maxValue": null, + "stepValue": 1, + "enumValues": [ + { + "value": "4", + "text": "smart control", + "icon": "" + }, + { + "value": "0", + "text": "economy", + "icon": "" + }, + { + "value": "1", + "text": "normal", + "icon": "" + }, + { + "value": "2", + "text": "luxury", + "icon": "" + } + ], + "scaleValue": "1", + "zoneId": null } ] diff --git a/tests/components/myuplink/test_select.py b/tests/components/myuplink/test_select.py new file mode 100644 index 00000000000..7ad2d17cb5d --- /dev/null +++ b/tests/components/myuplink/test_select.py @@ -0,0 +1,89 @@ +"""Tests for myuplink select module.""" + +from unittest.mock import MagicMock + +from aiohttp import ClientError +import pytest + +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_OPTION, + SERVICE_SELECT_OPTION, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +TEST_PLATFORM = Platform.SELECT +pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) + +ENTITY_ID = "select.gotham_city_comfort_mode" +ENTITY_FRIENDLY_NAME = "Gotham City comfort mode" +ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041" + + +async def test_select_entity( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_myuplink_client: MagicMock, + setup_platform: None, +) -> None: + """Test that the entities are registered in the entity registry.""" + + entry = entity_registry.async_get(ENTITY_ID) + assert entry.unique_id == ENTITY_UID + + # Test the select attributes are correct. + + state = hass.states.get(ENTITY_ID) + assert state.state == "Economy" + assert state.attributes == { + "options": ["Smart control", "Economy", "Normal", "Luxury"], + "friendly_name": ENTITY_FRIENDLY_NAME, + } + + +async def test_selecting( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + setup_platform: None, +) -> None: + """Test select option service.""" + + await hass.services.async_call( + TEST_PLATFORM, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_OPTION: "Economy"}, + blocking=True, + ) + await hass.async_block_till_done() + mock_myuplink_client.async_set_device_points.assert_called_once() + + # Test handling of exception from API. + + mock_myuplink_client.async_set_device_points.side_effect = ClientError + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + TEST_PLATFORM, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_OPTION: "Economy"}, + blocking=True, + ) + assert mock_myuplink_client.async_set_device_points.call_count == 2 + + +@pytest.mark.parametrize( + "load_device_points_file", + ["device_points_nibe_smo20.json"], +) +async def test_entity_registry_smo20( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_myuplink_client: MagicMock, + setup_platform: None, +) -> None: + """Test that the entities are registered in the entity registry.""" + + entry = entity_registry.async_get("select.gotham_city_all") + assert entry.unique_id == "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47660" diff --git a/tests/components/nam/snapshots/test_diagnostics.ambr b/tests/components/nam/snapshots/test_diagnostics.ambr index c187dec2866..e92e02fa1d8 100644 --- a/tests/components/nam/snapshots/test_diagnostics.ambr +++ b/tests/components/nam/snapshots/test_diagnostics.ambr @@ -2,18 +2,18 @@ # name: test_entry_diagnostics dict({ 'data': dict({ - 'bme280_humidity': 45.7, - 'bme280_pressure': 1011.012, - 'bme280_temperature': 7.6, - 'bmp180_pressure': 1032.012, - 'bmp180_temperature': 7.6, - 'bmp280_pressure': 1022.012, - 'bmp280_temperature': 5.6, - 'dht22_humidity': 46.2, - 'dht22_temperature': 6.3, - 'ds18b20_temperature': 12.6, - 'heca_humidity': 50.0, - 'heca_temperature': 8.0, + 'bme280_humidity': 45.69, + 'bme280_pressure': 1011.0117, + 'bme280_temperature': 7.56, + 'bmp180_pressure': 1032.0118, + 'bmp180_temperature': 7.56, + 'bmp280_pressure': 1022.0117999999999, + 'bmp280_temperature': 5.56, + 'dht22_humidity': 46.23, + 'dht22_temperature': 6.26, + 'ds18b20_temperature': 12.56, + 'heca_humidity': 49.97, + 'heca_temperature': 7.95, 'mhz14a_carbon_dioxide': 865.0, 'pms_caqi': 19, 'pms_caqi_level': 'very_low', @@ -22,17 +22,17 @@ 'pms_p2': 11.0, 'sds011_caqi': 19, 'sds011_caqi_level': 'very_low', - 'sds011_p1': 18.6, - 'sds011_p2': 11.0, - 'sht3x_humidity': 34.7, - 'sht3x_temperature': 6.3, + 'sds011_p1': 18.65, + 'sds011_p2': 11.03, + 'sht3x_humidity': 34.69, + 'sht3x_temperature': 6.28, 'signal': -72.0, 'sps30_caqi': 54, 'sps30_caqi_level': 'medium', - 'sps30_p0': 31.2, - 'sps30_p1': 21.2, - 'sps30_p2': 34.3, - 'sps30_p4': 24.7, + 'sps30_p0': 31.23, + 'sps30_p1': 21.23, + 'sps30_p2': 34.32, + 'sps30_p4': 24.72, 'uptime': 456987, }), 'info': dict({ diff --git a/tests/components/nam/snapshots/test_sensor.ambr b/tests/components/nam/snapshots/test_sensor.ambr index ea47998f3de..16129c5d7ce 100644 --- a/tests/components/nam/snapshots/test_sensor.ambr +++ b/tests/components/nam/snapshots/test_sensor.ambr @@ -1,51 +1,4 @@ # serializer version: 1 -# name: test_sensor[button.nettigo_air_monitor_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.nettigo_air_monitor_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'nam', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'aa:bb:cc:dd:ee:ff-restart', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[button.nettigo_air_monitor_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'Nettigo Air Monitor Restart', - }), - 'context': , - 'entity_id': 'button.nettigo_air_monitor_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -97,7 +50,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '45.7', + 'state': '45.69', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_pressure-entry] @@ -151,7 +104,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1011.012', + 'state': '1011.0117', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_temperature-entry] @@ -205,7 +158,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '7.6', + 'state': '7.56', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp180_pressure-entry] @@ -259,7 +212,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1032.012', + 'state': '1032.0118', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp180_temperature-entry] @@ -313,7 +266,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '7.6', + 'state': '7.56', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp280_pressure-entry] @@ -367,7 +320,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1022.012', + 'state': '1022.0118', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp280_temperature-entry] @@ -421,7 +374,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '5.6', + 'state': '5.56', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_dht22_humidity-entry] @@ -475,7 +428,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '46.2', + 'state': '46.23', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_dht22_temperature-entry] @@ -529,7 +482,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6.3', + 'state': '6.26', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_ds18b20_temperature-entry] @@ -583,7 +536,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '12.6', + 'state': '12.56', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_heca_humidity-entry] @@ -637,7 +590,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '50.0', + 'state': '49.97', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_heca_temperature-entry] @@ -691,7 +644,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '8.0', + 'state': '7.95', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_last_restart-entry] @@ -1224,7 +1177,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '18.6', + 'state': '18.65', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sds011_pm2_5-entry] @@ -1278,7 +1231,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '11.0', + 'state': '11.03', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sht3x_humidity-entry] @@ -1332,7 +1285,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '34.7', + 'state': '34.69', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sht3x_temperature-entry] @@ -1386,7 +1339,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6.3', + 'state': '6.28', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_signal_strength-entry] @@ -1602,7 +1555,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '31.2', + 'state': '31.23', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sps30_pm10-entry] @@ -1656,7 +1609,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '21.2', + 'state': '21.23', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sps30_pm2_5-entry] @@ -1710,7 +1663,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '34.3', + 'state': '34.32', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sps30_pm4-entry] @@ -1763,6 +1716,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '24.7', + 'state': '24.72', }) # --- diff --git a/tests/components/nam/test_init.py b/tests/components/nam/test_init.py index 8b8c3a4835a..13bde1432b3 100644 --- a/tests/components/nam/test_init.py +++ b/tests/components/nam/test_init.py @@ -23,7 +23,7 @@ async def test_async_setup_entry(hass: HomeAssistant) -> None: state = hass.states.get("sensor.nettigo_air_monitor_sds011_pm2_5") assert state is not None assert state.state != STATE_UNAVAILABLE - assert state.state == "11.0" + assert state.state == "11.03" async def test_config_not_ready(hass: HomeAssistant) -> None: diff --git a/tests/components/nam/test_sensor.py b/tests/components/nam/test_sensor.py index 53945e1c8a2..6924af48f01 100644 --- a/tests/components/nam/test_sensor.py +++ b/tests/components/nam/test_sensor.py @@ -77,7 +77,7 @@ async def test_incompleta_data_after_device_restart(hass: HomeAssistant) -> None state = hass.states.get("sensor.nettigo_air_monitor_heca_temperature") assert state - assert state.state == "8.0" + assert state.state == "7.95" assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS @@ -110,7 +110,7 @@ async def test_availability( state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") assert state assert state.state != STATE_UNAVAILABLE - assert state.state == "7.6" + assert state.state == "7.56" with ( patch("homeassistant.components.nam.NettigoAirMonitor.initialize"), @@ -142,7 +142,7 @@ async def test_availability( state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") assert state assert state.state != STATE_UNAVAILABLE - assert state.state == "7.6" + assert state.state == "7.56" async def test_manual_update_entity(hass: HomeAssistant) -> None: diff --git a/tests/components/nest/common.py b/tests/components/nest/common.py index bbaa92b7b28..9c8de0224f0 100644 --- a/tests/components/nest/common.py +++ b/tests/components/nest/common.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator import copy from dataclasses import dataclass, field import time @@ -14,7 +14,6 @@ from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.event import EventMessage from google_nest_sdm.event_media import CachePolicy from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber -from typing_extensions import Generator from homeassistant.components.application_credentials import ClientCredential from homeassistant.components.nest import DOMAIN @@ -93,7 +92,7 @@ class FakeSubscriber(GoogleNestSubscriber): stop_calls = 0 - def __init__(self): # pylint: disable=super-init-not-called + def __init__(self) -> None: # pylint: disable=super-init-not-called """Initialize Fake Subscriber.""" self._device_manager = DeviceManager() diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index de0fc2079fa..85c64aff379 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -3,6 +3,7 @@ from __future__ import annotations from asyncio import AbstractEventLoop +from collections.abc import Generator import copy import shutil import time @@ -15,7 +16,6 @@ from google_nest_sdm import diagnostics from google_nest_sdm.auth import AbstractAuth from google_nest_sdm.device_manager import DeviceManager import pytest -from typing_extensions import Generator from homeassistant.components.application_credentials import ( async_import_client_credential, @@ -53,7 +53,7 @@ class FakeAuth(AbstractAuth): from the API. """ - def __init__(self): + def __init__(self) -> None: """Initialize FakeAuth.""" super().__init__(None, None) # Tests can set fake responses here. @@ -109,7 +109,7 @@ async def auth(aiohttp_client: ClientSessionGenerator) -> FakeAuth: @pytest.fixture(autouse=True) -def cleanup_media_storage(hass): +def cleanup_media_storage(hass: HomeAssistant) -> Generator[None]: """Test cleanup, remove any media storage persisted during the test.""" tmp_path = str(uuid.uuid4()) with patch("homeassistant.components.nest.media_source.MEDIA_PATH", new=tmp_path): diff --git a/tests/components/nest/test_api.py b/tests/components/nest/test_api.py index fd07233fa8c..a13d4d3a337 100644 --- a/tests/components/nest/test_api.py +++ b/tests/components/nest/test_api.py @@ -18,7 +18,7 @@ from homeassistant.components.nest.const import API_URL, OAUTH2_TOKEN, SDM_SCOPE from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from .common import CLIENT_ID, CLIENT_SECRET, PROJECT_ID, PlatformSetup +from .common import CLIENT_ID, CLIENT_SECRET, PROJECT_ID, FakeSubscriber, PlatformSetup from .conftest import FAKE_REFRESH_TOKEN, FAKE_TOKEN from tests.test_util.aiohttp import AiohttpClientMocker @@ -27,7 +27,7 @@ FAKE_UPDATED_TOKEN = "fake-updated-token" @pytest.fixture -def subscriber() -> None: +def subscriber() -> FakeSubscriber | None: """Disable default subscriber since tests use their own patch.""" return None diff --git a/tests/components/nest/test_camera.py b/tests/components/nest/test_camera.py index 1838c18b6d4..fd2b5ef0388 100644 --- a/tests/components/nest/test_camera.py +++ b/tests/components/nest/test_camera.py @@ -4,6 +4,7 @@ These tests fake out the subscriber/devicemanager, and are not using a real pubsub subscriber. """ +from collections.abc import Generator import datetime from http import HTTPStatus from unittest.mock import AsyncMock, Mock, patch @@ -12,7 +13,6 @@ import aiohttp from freezegun import freeze_time from google_nest_sdm.event import EventMessage import pytest -from typing_extensions import Generator from homeassistant.components import camera from homeassistant.components.camera import STATE_IDLE, STATE_STREAMING, StreamType diff --git a/tests/components/nest/test_config_flow.py b/tests/components/nest/test_config_flow.py index 5c8f01c8e39..b6e84ce358f 100644 --- a/tests/components/nest/test_config_flow.py +++ b/tests/components/nest/test_config_flow.py @@ -56,7 +56,12 @@ def nest_test_config() -> NestTestConfig: class OAuthFixture: """Simulate the oauth flow used by the config flow.""" - def __init__(self, hass, hass_client_no_auth, aioclient_mock): + def __init__( + self, + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + ) -> None: """Initialize OAuthFixture.""" self.hass = hass self.hass_client = hass_client_no_auth diff --git a/tests/components/nest/test_device_trigger.py b/tests/components/nest/test_device_trigger.py index 1820096d2a6..f818713d382 100644 --- a/tests/components/nest/test_device_trigger.py +++ b/tests/components/nest/test_device_trigger.py @@ -20,7 +20,7 @@ from homeassistant.util.dt import utcnow from .common import DEVICE_ID, CreateDevice, FakeSubscriber, PlatformSetup -from tests.common import async_get_device_automations, async_mock_service +from tests.common import async_get_device_automations DEVICE_NAME = "My Camera" DATA_MESSAGE = {"message": "service-called"} @@ -83,12 +83,6 @@ async def setup_automation(hass, device_id, trigger_type): ) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -248,7 +242,7 @@ async def test_fires_on_camera_motion( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test camera_motion triggers firing.""" create_device.create( @@ -273,8 +267,8 @@ async def test_fires_on_camera_motion( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE async def test_fires_on_camera_person( @@ -282,7 +276,7 @@ async def test_fires_on_camera_person( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test camera_person triggers firing.""" create_device.create( @@ -307,8 +301,8 @@ async def test_fires_on_camera_person( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE async def test_fires_on_camera_sound( @@ -316,7 +310,7 @@ async def test_fires_on_camera_sound( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test camera_sound triggers firing.""" create_device.create( @@ -341,8 +335,8 @@ async def test_fires_on_camera_sound( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE async def test_fires_on_doorbell_chime( @@ -350,7 +344,7 @@ async def test_fires_on_doorbell_chime( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test doorbell_chime triggers firing.""" create_device.create( @@ -375,8 +369,8 @@ async def test_fires_on_doorbell_chime( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE async def test_trigger_for_wrong_device_id( @@ -384,7 +378,7 @@ async def test_trigger_for_wrong_device_id( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test messages for the wrong device are ignored.""" create_device.create( @@ -409,7 +403,7 @@ async def test_trigger_for_wrong_device_id( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_trigger_for_wrong_event_type( @@ -417,7 +411,7 @@ async def test_trigger_for_wrong_event_type( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test that messages for the wrong event type are ignored.""" create_device.create( @@ -442,13 +436,13 @@ async def test_trigger_for_wrong_event_type( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_subscriber_automation( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], create_device: CreateDevice, setup_platform: PlatformSetup, subscriber: FakeSubscriber, @@ -488,5 +482,5 @@ async def test_subscriber_automation( await subscriber.async_receive_event(event) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE diff --git a/tests/components/nest/test_init.py b/tests/components/nest/test_init.py index f9813ca63ee..f3226c936fb 100644 --- a/tests/components/nest/test_init.py +++ b/tests/components/nest/test_init.py @@ -8,6 +8,7 @@ mode (e.g. yaml, ConfigEntry, etc) however some tests override and just run in relevant modes. """ +from collections.abc import Generator import logging from typing import Any from unittest.mock import patch @@ -19,7 +20,6 @@ from google_nest_sdm.exceptions import ( SubscriberException, ) import pytest -from typing_extensions import Generator from homeassistant.components.nest import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -67,13 +67,15 @@ def warning_caplog( @pytest.fixture -def subscriber_side_effect() -> None: +def subscriber_side_effect() -> Any | None: """Fixture to inject failures into FakeSubscriber start.""" return None @pytest.fixture -def failing_subscriber(subscriber_side_effect: Any) -> YieldFixture[FakeSubscriber]: +def failing_subscriber( + subscriber_side_effect: Any | None, +) -> YieldFixture[FakeSubscriber]: """Fixture overriding default subscriber behavior to allow failure injection.""" subscriber = FakeSubscriber() with patch( diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index f4fb8bdb623..3cfa4ee6687 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -4,6 +4,7 @@ These tests simulate recent camera events received by the subscriber exposed as media in the media source. """ +from collections.abc import Generator import datetime from http import HTTPStatus import io @@ -15,7 +16,6 @@ import av from google_nest_sdm.event import EventMessage import numpy as np import pytest -from typing_extensions import Generator from homeassistant.components.media_player.errors import BrowseError from homeassistant.components.media_source import ( diff --git a/tests/components/netatmo/common.py b/tests/components/netatmo/common.py index 08c8679acf3..730cb0cb117 100644 --- a/tests/components/netatmo/common.py +++ b/tests/components/netatmo/common.py @@ -1,9 +1,10 @@ """Common methods used across tests for Netatmo.""" +from collections.abc import Iterator from contextlib import contextmanager import json from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import patch from syrupy import SnapshotAssertion @@ -86,7 +87,7 @@ async def fake_post_request(*args: Any, **kwargs: Any): ) -async def fake_get_image(*args: Any, **kwargs: Any) -> bytes | str: +async def fake_get_image(*args: Any, **kwargs: Any) -> bytes | str | None: """Return fake data.""" if "endpoint" not in kwargs: return "{}" @@ -95,6 +96,7 @@ async def fake_get_image(*args: Any, **kwargs: Any) -> bytes | str: if endpoint in "snapshot_720.jpg": return b"test stream image bytes" + return None async def simulate_webhook(hass: HomeAssistant, webhook_id: str, response) -> None: @@ -109,7 +111,7 @@ async def simulate_webhook(hass: HomeAssistant, webhook_id: str, response) -> No @contextmanager -def selected_platforms(platforms: list[Platform]) -> AsyncMock: +def selected_platforms(platforms: list[Platform]) -> Iterator[None]: """Restrict loaded platforms to list given.""" with ( patch("homeassistant.components.netatmo.data_handler.PLATFORMS", platforms), diff --git a/tests/components/netatmo/conftest.py b/tests/components/netatmo/conftest.py index d2e6c1fdc88..b79e6480711 100644 --- a/tests/components/netatmo/conftest.py +++ b/tests/components/netatmo/conftest.py @@ -69,6 +69,15 @@ def mock_config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: "area_name": "Home max", "mode": "max", }, + "Home min": { + "lat_ne": 32.2345678, + "lon_ne": -117.1234567, + "lat_sw": 32.1234567, + "lon_sw": -117.2345678, + "show_on_map": True, + "area_name": "Home min", + "mode": "min", + }, } }, ) diff --git a/tests/components/netatmo/snapshots/test_diagnostics.ambr b/tests/components/netatmo/snapshots/test_diagnostics.ambr index 8ce00279b83..35cd0bfbf47 100644 --- a/tests/components/netatmo/snapshots/test_diagnostics.ambr +++ b/tests/components/netatmo/snapshots/test_diagnostics.ambr @@ -630,6 +630,15 @@ 'mode': 'max', 'show_on_map': True, }), + 'Home min': dict({ + 'area_name': 'Home min', + 'lat_ne': '**REDACTED**', + 'lat_sw': '**REDACTED**', + 'lon_ne': '**REDACTED**', + 'lon_sw': '**REDACTED**', + 'mode': 'min', + 'show_on_map': True, + }), }), }), 'pref_disable_new_entities': False, diff --git a/tests/components/netatmo/snapshots/test_init.ambr b/tests/components/netatmo/snapshots/test_init.ambr index 8f4b357fc5f..60cb22d74f2 100644 --- a/tests/components/netatmo/snapshots/test_init.ambr +++ b/tests/components/netatmo/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': 'Bubbendorf', 'model': 'Roller Shutter', + 'model_id': None, 'name': 'Entrance Blinds', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -51,8 +53,10 @@ }), 'manufacturer': 'Bubbendorf', 'model': 'Orientable Shutter', + 'model_id': None, 'name': 'Bubendorff blind', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -81,8 +85,10 @@ }), 'manufacturer': 'Legrand', 'model': '2 wire light switch/dimmer', + 'model_id': None, 'name': 'Unknown 00:11:22:33:00:11:45:fe', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -111,8 +117,10 @@ }), 'manufacturer': 'Smarther', 'model': 'Smarther with Netatmo', + 'model_id': None, 'name': 'Corridor', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': 'Corridor', 'sw_version': None, @@ -141,8 +149,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Energy Meter', + 'model_id': None, 'name': 'Consumption meter', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -171,8 +181,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Light switch/dimmer with neutral', + 'model_id': None, 'name': 'Bathroom light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -201,8 +213,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 1', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -231,8 +245,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 2', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -261,8 +277,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 3', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -291,8 +309,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 4', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -321,8 +341,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 5', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -351,8 +373,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Total', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -381,8 +405,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Gas', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -411,8 +437,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Hot water', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -441,8 +469,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Cold water', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -471,8 +501,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Écocompteur', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -501,8 +533,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Camera', + 'model_id': None, 'name': 'Hall', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -531,8 +565,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Anemometer', + 'model_id': None, 'name': 'Villa Garden', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -561,8 +597,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Outdoor Camera', + 'model_id': None, 'name': 'Front', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -591,8 +629,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Video Doorbell', + 'model_id': None, 'name': 'Netatmo-Doorbell', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -621,8 +661,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Kitchen', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -651,8 +693,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Livingroom', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -681,8 +725,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Baby Bedroom', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -711,8 +757,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Bedroom', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -741,8 +789,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Parents Bedroom', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -771,8 +821,10 @@ }), 'manufacturer': 'Legrand', 'model': 'Plug', + 'model_id': None, 'name': 'Prise', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -801,8 +853,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Outdoor Module', + 'model_id': None, 'name': 'Villa Outdoor', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -831,8 +885,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Module', + 'model_id': None, 'name': 'Villa Bedroom', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -861,8 +917,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Module', + 'model_id': None, 'name': 'Villa Bathroom', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -891,8 +949,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Home Weather station', + 'model_id': None, 'name': 'Villa', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -921,8 +981,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Rain Gauge', + 'model_id': None, 'name': 'Villa Rain', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -951,8 +1013,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'OpenTherm Modulating Thermostat', + 'model_id': None, 'name': 'Bureau Modulate', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': 'Bureau', 'sw_version': None, @@ -981,8 +1045,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Thermostat', + 'model_id': None, 'name': 'Livingroom', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': 'Livingroom', 'sw_version': None, @@ -1011,8 +1077,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Valve', + 'model_id': None, 'name': 'Valve1', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': 'Entrada', 'sw_version': None, @@ -1041,8 +1109,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Valve', + 'model_id': None, 'name': 'Valve2', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': 'Cocina', 'sw_version': None, @@ -1071,8 +1141,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Climate', + 'model_id': None, 'name': 'MYHOME', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1101,8 +1173,10 @@ }), 'manufacturer': 'Netatmo', 'model': 'Public Weather station', + 'model_id': None, 'name': 'Home avg', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1131,8 +1205,42 @@ }), 'manufacturer': 'Netatmo', 'model': 'Public Weather station', + 'model_id': None, 'name': 'Home max', 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_devices[netatmo-Home min] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'https://weathermap.netatmo.com/', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'netatmo', + 'Home min', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Netatmo', + 'model': 'Public Weather station', + 'model_id': None, + 'name': 'Home min', + 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index 6ab1e4b1e1a..bc2a18d918d 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -2663,6 +2663,556 @@ 'state': '15', }) # --- +# name: test_entity[sensor.home_min_atmospheric_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_atmospheric_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Atmospheric pressure', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_atmospheric_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'atmospheric_pressure', + 'friendly_name': 'Home min Atmospheric pressure', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_atmospheric_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1005.4', + }) +# --- +# name: test_entity[sensor.home_min_gust_angle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_gust_angle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Gust angle', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gust_angle', + 'unique_id': 'Home-min-gustangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_min_gust_angle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'friendly_name': 'Home min Gust angle', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_min_gust_angle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '217', + }) +# --- +# name: test_entity[sensor.home_min_gust_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_gust_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Gust strength', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gust_strength', + 'unique_id': 'Home-min-guststrength', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_gust_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_speed', + 'friendly_name': 'Home min Gust strength', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_gust_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '31', + }) +# --- +# name: test_entity[sensor.home_min_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entity[sensor.home_min_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'humidity', + 'friendly_name': 'Home min Humidity', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.home_min_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56', + }) +# --- +# name: test_entity[sensor.home_min_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_min_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'friendly_name': 'Home min None', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_min_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- +# name: test_entity[sensor.home_min_precipitation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_precipitation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Precipitation', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-rain', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_precipitation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'precipitation', + 'friendly_name': 'Home min Precipitation', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_precipitation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity[sensor.home_min_precipitation_last_hour-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_precipitation_last_hour', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Precipitation last hour', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sum_rain_1', + 'unique_id': 'Home-min-sum_rain_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_precipitation_last_hour-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'precipitation', + 'friendly_name': 'Home min Precipitation last hour', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_precipitation_last_hour', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity[sensor.home_min_precipitation_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_precipitation_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Precipitation today', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sum_rain_24', + 'unique_id': 'Home-min-sum_rain_24', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_precipitation_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'precipitation', + 'friendly_name': 'Home min Precipitation today', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_precipitation_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.999', + }) +# --- +# name: test_entity[sensor.home_min_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'temperature', + 'friendly_name': 'Home min Temperature', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19.8', + }) +# --- +# name: test_entity[sensor.home_min_wind_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_wind_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind speed', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-windstrength', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_wind_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_speed', + 'friendly_name': 'Home min Wind speed', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_wind_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- # name: test_entity[sensor.hot_water_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/netatmo/test_device_trigger.py b/tests/components/netatmo/test_device_trigger.py index ad1e9bd8cb9..99709572024 100644 --- a/tests/components/netatmo/test_device_trigger.py +++ b/tests/components/netatmo/test_device_trigger.py @@ -22,16 +22,9 @@ from tests.common import ( MockConfigEntry, async_capture_events, async_get_device_automations, - async_mock_service, ) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("platform", "device_type", "event_types"), [ @@ -113,7 +106,7 @@ async def test_get_triggers( ) async def test_if_fires_on_event( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, platform, @@ -175,8 +168,8 @@ async def test_if_fires_on_event( ) await hass.async_block_till_done() assert len(events) == 1 - assert len(calls) == 1 - assert calls[0].data["some"] == f"{event_type} - device - {device.id}" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == f"{event_type} - device - {device.id}" @pytest.mark.parametrize( @@ -196,7 +189,7 @@ async def test_if_fires_on_event( ) async def test_if_fires_on_event_legacy( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, platform, @@ -258,8 +251,8 @@ async def test_if_fires_on_event_legacy( ) await hass.async_block_till_done() assert len(events) == 1 - assert len(calls) == 1 - assert calls[0].data["some"] == f"{event_type} - device - {device.id}" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == f"{event_type} - device - {device.id}" @pytest.mark.parametrize( @@ -275,7 +268,7 @@ async def test_if_fires_on_event_legacy( ) async def test_if_fires_on_event_with_subtype( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, platform, @@ -343,8 +336,11 @@ async def test_if_fires_on_event_with_subtype( ) await hass.async_block_till_done() assert len(events) == 1 - assert len(calls) == 1 - assert calls[0].data["some"] == f"{event_type} - {sub_type} - device - {device.id}" + assert len(service_calls) == 1 + assert ( + service_calls[0].data["some"] + == f"{event_type} - {sub_type} - device - {device.id}" + ) @pytest.mark.parametrize( diff --git a/tests/components/netatmo/test_diagnostics.py b/tests/components/netatmo/test_diagnostics.py index 48f021295e1..7a0bf11c652 100644 --- a/tests/components/netatmo/test_diagnostics.py +++ b/tests/components/netatmo/test_diagnostics.py @@ -42,4 +42,11 @@ async def test_entry_diagnostics( assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry - ) == snapshot(exclude=paths("info.data.token.expires_at", "info.entry_id")) + ) == snapshot( + exclude=paths( + "info.data.token.expires_at", + "info.entry_id", + "info.created_at", + "info.modified_at", + ) + ) diff --git a/tests/components/netatmo/test_sensor.py b/tests/components/netatmo/test_sensor.py index 3c16e6e60f9..2c47cdefa60 100644 --- a/tests/components/netatmo/test_sensor.py +++ b/tests/components/netatmo/test_sensor.py @@ -81,6 +81,12 @@ async def test_public_weather_sensor( assert hass.states.get(f"{prefix}humidity").state == "76" assert hass.states.get(f"{prefix}atmospheric_pressure").state == "1014.4" + prefix = "sensor.home_min_" + + assert hass.states.get(f"{prefix}temperature").state == "19.8" + assert hass.states.get(f"{prefix}humidity").state == "56" + assert hass.states.get(f"{prefix}atmospheric_pressure").state == "1005.4" + prefix = "sensor.home_avg_" assert hass.states.get(f"{prefix}temperature").state == "22.7" diff --git a/tests/components/netgear_lte/snapshots/test_init.ambr b/tests/components/netgear_lte/snapshots/test_init.ambr index 8af22f98e02..ca65c17cc8e 100644 --- a/tests/components/netgear_lte/snapshots/test_init.ambr +++ b/tests/components/netgear_lte/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': 'Netgear', 'model': 'LM1200', + 'model_id': None, 'name': 'Netgear LM1200', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'FFFFFFFFFFFFF', 'suggested_area': None, 'sw_version': 'EC25AFFDR07A09M4G', diff --git a/tests/components/network/conftest.py b/tests/components/network/conftest.py index 36d9c449d27..d5fbb95a814 100644 --- a/tests/components/network/conftest.py +++ b/tests/components/network/conftest.py @@ -1,9 +1,9 @@ """Tests for the Network Configuration integration.""" +from collections.abc import Generator from unittest.mock import _patch import pytest -from typing_extensions import Generator @pytest.fixture(autouse=True) diff --git a/tests/components/nextbus/conftest.py b/tests/components/nextbus/conftest.py index 84445905c2e..231faccf907 100644 --- a/tests/components/nextbus/conftest.py +++ b/tests/components/nextbus/conftest.py @@ -8,15 +8,32 @@ import pytest @pytest.fixture( params=[ - {"name": "Outbound", "stop": [{"tag": "5650"}]}, [ { "name": "Outbound", - "stop": [{"tag": "5650"}], + "shortName": "Outbound", + "useForUi": True, + "stops": ["5184"], + }, + { + "name": "Outbound - Hidden", + "shortName": "Outbound - Hidden", + "useForUi": False, + "stops": ["5651"], + }, + ], + [ + { + "name": "Outbound", + "shortName": "Outbound", + "useForUi": True, + "stops": ["5184"], }, { "name": "Inbound", - "stop": [{"tag": "5651"}], + "shortName": "Inbound", + "useForUi": True, + "stops": ["5651"], }, ], ] @@ -35,22 +52,65 @@ def mock_nextbus_lists( ) -> MagicMock: """Mock all list functions in nextbus to test validate logic.""" instance = mock_nextbus.return_value - instance.get_agency_list.return_value = { - "agency": [{"tag": "sf-muni", "title": "San Francisco Muni"}] - } - instance.get_route_list.return_value = { - "route": [{"tag": "F", "title": "F - Market & Wharves"}] - } - instance.get_route_config.return_value = { - "route": { - "stop": [ - {"tag": "5650", "title": "Market St & 7th St"}, - {"tag": "5651", "title": "Market St & 7th St"}, - # Error case test. Duplicate title with no unique direction - {"tag": "5652", "title": "Market St & 7th St"}, - ], - "direction": route_config_direction, + instance.agencies.return_value = [ + { + "id": "sfmta-cis", + "name": "San Francisco Muni CIS", + "shortName": "SF Muni CIS", + "region": "", + "website": "", + "logo": "", + "nxbs2RedirectUrl": "", } + ] + + instance.routes.return_value = [ + { + "id": "F", + "rev": 1057, + "title": "F Market & Wharves", + "description": "7am-10pm daily", + "color": "", + "textColor": "", + "hidden": False, + "timestamp": "2024-06-23T03:06:58Z", + }, + ] + + instance.route_details.return_value = { + "id": "F", + "rev": 1057, + "title": "F Market & Wharves", + "description": "7am-10pm daily", + "color": "", + "textColor": "", + "hidden": False, + "boundingBox": {}, + "stops": [ + { + "id": "5184", + "lat": 37.8071299, + "lon": -122.41732, + "name": "Jones St & Beach St", + "code": "15184", + "hidden": False, + "showDestinationSelector": True, + "directions": ["F_0_var1", "F_0_var0"], + }, + { + "id": "5651", + "lat": 37.8071299, + "lon": -122.41732, + "name": "Jones St & Beach St", + "code": "15651", + "hidden": False, + "showDestinationSelector": True, + "directions": ["F_0_var1", "F_0_var0"], + }, + ], + "directions": route_config_direction, + "paths": [], + "timestamp": "2024-06-23T03:06:58Z", } return instance diff --git a/tests/components/nextbus/test_config_flow.py b/tests/components/nextbus/test_config_flow.py index 0a64bc97d9a..4e5b933a189 100644 --- a/tests/components/nextbus/test_config_flow.py +++ b/tests/components/nextbus/test_config_flow.py @@ -1,9 +1,9 @@ """Test the NextBus config flow.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant import config_entries, setup from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN @@ -44,7 +44,7 @@ async def test_user_config( result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_AGENCY: "sf-muni", + CONF_AGENCY: "sfmta-cis", }, ) await hass.async_block_till_done() @@ -68,16 +68,16 @@ async def test_user_config( result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_STOP: "5650", + CONF_STOP: "5184", }, ) await hass.async_block_till_done() assert result.get("type") is FlowResultType.CREATE_ENTRY assert result.get("data") == { - "agency": "sf-muni", + "agency": "sfmta-cis", "route": "F", - "stop": "5650", + "stop": "5184", } assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/nextbus/test_sensor.py b/tests/components/nextbus/test_sensor.py index 3630ff88855..dd0346c3e7a 100644 --- a/tests/components/nextbus/test_sensor.py +++ b/tests/components/nextbus/test_sensor.py @@ -1,12 +1,12 @@ """The tests for the nexbus sensor component.""" +from collections.abc import Generator from copy import deepcopy from unittest.mock import MagicMock, patch from urllib.error import HTTPError from py_nextbus.client import NextBusFormatError, NextBusHTTPError import pytest -from typing_extensions import Generator from homeassistant.components import sensor from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN @@ -18,9 +18,9 @@ from homeassistant.helpers.update_coordinator import UpdateFailed from tests.common import MockConfigEntry -VALID_AGENCY = "sf-muni" +VALID_AGENCY = "sfmta-cis" VALID_ROUTE = "F" -VALID_STOP = "5650" +VALID_STOP = "5184" VALID_AGENCY_TITLE = "San Francisco Muni" VALID_ROUTE_TITLE = "F-Market & Wharves" VALID_STOP_TITLE = "Market St & 7th St" @@ -44,25 +44,38 @@ CONFIG_BASIC = { } } -BASIC_RESULTS = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "direction": { - "title": "Outbound", - "prediction": [ - {"minutes": "1", "epochTime": "1553807371000"}, - {"minutes": "2", "epochTime": "1553807372000"}, - {"minutes": "3", "epochTime": "1553807373000"}, - {"minutes": "10", "epochTime": "1553807380000"}, - ], +BASIC_RESULTS = [ + { + "route": { + "title": VALID_ROUTE_TITLE, + "id": VALID_ROUTE, }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [ + {"minutes": 1, "timestamp": 1553807371000}, + {"minutes": 2, "timestamp": 1553807372000}, + {"minutes": 3, "timestamp": 1553807373000}, + {"minutes": 10, "timestamp": 1553807380000}, + ], } -} +] + +NO_UPCOMING = [ + { + "route": { + "title": VALID_ROUTE_TITLE, + "id": VALID_ROUTE, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [], + } +] @pytest.fixture @@ -78,9 +91,9 @@ def mock_nextbus_predictions( ) -> Generator[MagicMock]: """Create a mock of NextBusClient predictions.""" instance = mock_nextbus.return_value - instance.get_predictions_for_multi_stops.return_value = BASIC_RESULTS + instance.predictions_for_stop.return_value = BASIC_RESULTS - return instance.get_predictions_for_multi_stops + return instance.predictions_for_stop async def assert_setup_sensor( @@ -105,117 +118,23 @@ async def assert_setup_sensor( return config_entry -async def test_message_dict( - hass: HomeAssistant, - mock_nextbus: MagicMock, - mock_nextbus_lists: MagicMock, - mock_nextbus_predictions: MagicMock, -) -> None: - """Verify that a single dict message is rendered correctly.""" - mock_nextbus_predictions.return_value = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "message": {"text": "Message"}, - "direction": { - "title": "Outbound", - "prediction": [ - {"minutes": "1", "epochTime": "1553807371000"}, - {"minutes": "2", "epochTime": "1553807372000"}, - {"minutes": "3", "epochTime": "1553807373000"}, - ], - }, - } - } - - await assert_setup_sensor(hass, CONFIG_BASIC) - - state = hass.states.get(SENSOR_ID) - assert state is not None - assert state.attributes["message"] == "Message" - - -async def test_message_list( +async def test_predictions( hass: HomeAssistant, mock_nextbus: MagicMock, mock_nextbus_lists: MagicMock, mock_nextbus_predictions: MagicMock, ) -> None: """Verify that a list of messages are rendered correctly.""" - mock_nextbus_predictions.return_value = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "message": [{"text": "Message 1"}, {"text": "Message 2"}], - "direction": { - "title": "Outbound", - "prediction": [ - {"minutes": "1", "epochTime": "1553807371000"}, - {"minutes": "2", "epochTime": "1553807372000"}, - {"minutes": "3", "epochTime": "1553807373000"}, - ], - }, - } - } - - await assert_setup_sensor(hass, CONFIG_BASIC) - - state = hass.states.get(SENSOR_ID) - assert state is not None - assert state.attributes["message"] == "Message 1 -- Message 2" - - -async def test_direction_list( - hass: HomeAssistant, - mock_nextbus: MagicMock, - mock_nextbus_lists: MagicMock, - mock_nextbus_predictions: MagicMock, -) -> None: - """Verify that a list of messages are rendered correctly.""" - mock_nextbus_predictions.return_value = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "message": [{"text": "Message 1"}, {"text": "Message 2"}], - "direction": [ - { - "title": "Outbound", - "prediction": [ - {"minutes": "1", "epochTime": "1553807371000"}, - {"minutes": "2", "epochTime": "1553807372000"}, - {"minutes": "3", "epochTime": "1553807373000"}, - ], - }, - { - "title": "Outbound 2", - "prediction": {"minutes": "0", "epochTime": "1553807374000"}, - }, - ], - } - } await assert_setup_sensor(hass, CONFIG_BASIC) state = hass.states.get(SENSOR_ID) assert state is not None assert state.state == "2019-03-28T21:09:31+00:00" - assert state.attributes["agency"] == VALID_AGENCY_TITLE + assert state.attributes["agency"] == VALID_AGENCY assert state.attributes["route"] == VALID_ROUTE_TITLE assert state.attributes["stop"] == VALID_STOP_TITLE - assert state.attributes["direction"] == "Outbound, Outbound 2" - assert state.attributes["upcoming"] == "0, 1, 2, 3" + assert state.attributes["upcoming"] == "1, 2, 3, 10" @pytest.mark.parametrize( @@ -256,27 +175,19 @@ async def test_custom_name( assert state.name == "Custom Name" -@pytest.mark.parametrize( - "prediction_results", - [ - {}, - {"Error": "Failed"}, - ], -) -async def test_no_predictions( +async def test_verify_no_predictions( hass: HomeAssistant, mock_nextbus: MagicMock, - mock_nextbus_predictions: MagicMock, mock_nextbus_lists: MagicMock, - prediction_results: dict[str, str], + mock_nextbus_predictions: MagicMock, ) -> None: - """Verify there are no exceptions when no predictions are returned.""" - mock_nextbus_predictions.return_value = prediction_results - + """Verify attributes are set despite no upcoming times.""" + mock_nextbus_predictions.return_value = [] await assert_setup_sensor(hass, CONFIG_BASIC) state = hass.states.get(SENSOR_ID) assert state is not None + assert "upcoming" not in state.attributes assert state.state == "unknown" @@ -287,21 +198,10 @@ async def test_verify_no_upcoming( mock_nextbus_predictions: MagicMock, ) -> None: """Verify attributes are set despite no upcoming times.""" - mock_nextbus_predictions.return_value = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "direction": {"title": "Outbound", "prediction": []}, - } - } - + mock_nextbus_predictions.return_value = NO_UPCOMING await assert_setup_sensor(hass, CONFIG_BASIC) state = hass.states.get(SENSOR_ID) assert state is not None - assert state.state == "unknown" assert state.attributes["upcoming"] == "No upcoming predictions" + assert state.state == "unknown" diff --git a/tests/components/nextcloud/conftest.py b/tests/components/nextcloud/conftest.py index d6cd39e7fc8..cf3eda55fe1 100644 --- a/tests/components/nextcloud/conftest.py +++ b/tests/components/nextcloud/conftest.py @@ -1,9 +1,9 @@ """Fixtrues for the Nextcloud integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/nextcloud/snapshots/test_config_flow.ambr b/tests/components/nextcloud/snapshots/test_config_flow.ambr index 3334478ba24..06c4ce216db 100644 --- a/tests/components/nextcloud/snapshots/test_config_flow.ambr +++ b/tests/components/nextcloud/snapshots/test_config_flow.ambr @@ -1,12 +1,4 @@ # serializer version: 1 -# name: test_import - dict({ - 'password': 'nc_pass', - 'url': 'nc_url', - 'username': 'nc_user', - 'verify_ssl': True, - }) -# --- # name: test_reauth dict({ 'password': 'other_password', diff --git a/tests/components/nextcloud/test_config_flow.py b/tests/components/nextcloud/test_config_flow.py index 9a881197cf9..c02516fdc99 100644 --- a/tests/components/nextcloud/test_config_flow.py +++ b/tests/components/nextcloud/test_config_flow.py @@ -10,7 +10,7 @@ from nextcloudmonitor import ( import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.nextcloud import DOMAIN +from homeassistant.components.nextcloud.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant diff --git a/tests/components/nextdns/snapshots/test_binary_sensor.ambr b/tests/components/nextdns/snapshots/test_binary_sensor.ambr index bd4ecbba084..814b4c1ac16 100644 --- a/tests/components/nextdns/snapshots/test_binary_sensor.ambr +++ b/tests/components/nextdns/snapshots/test_binary_sensor.ambr @@ -1,1095 +1,4 @@ # serializer version: 1 -# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AI-Driven threat detection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ai_threat_detection', - 'unique_id': 'xyz12_ai_threat_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile AI-Driven threat detection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow affiliate & tracking links', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'allow_affiliate', - 'unique_id': 'xyz12_allow_affiliate', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Allow affiliate & tracking links', - }), - 'context': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Anonymized EDNS client subnet', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'anonymized_ecs', - 'unique_id': 'xyz12_anonymized_ecs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', - }), - 'context': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_9gag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block 9GAG', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_9gag', - 'unique_id': 'xyz12_block_9gag', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block bypass methods', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bypass_methods', - 'unique_id': 'xyz12_block_bypass_methods', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block bypass methods', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block child sexual abuse material', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_csam', - 'unique_id': 'xyz12_block_csam', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block child sexual abuse material', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block disguised third-party trackers', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disguised_trackers', - 'unique_id': 'xyz12_block_disguised_trackers', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block disguised third-party trackers', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dynamic DNS hostnames', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ddns', - 'unique_id': 'xyz12_block_ddns', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block newly registered domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_nrd', - 'unique_id': 'xyz12_block_nrd', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block newly registered domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_page-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_page', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block page', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_page', - 'unique_id': 'xyz12_block_page', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_page-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block page', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_page', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block parked domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_parked_domains', - 'unique_id': 'xyz12_block_parked_domains', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block parked domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cache_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache boost', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cache_boost', - 'unique_id': 'xyz12_cache_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cache_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cache boost', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cname_flattening-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CNAME flattening', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cname_flattening', - 'unique_id': 'xyz12_cname_flattening', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cname_flattening-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile CNAME flattening', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cryptojacking protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cryptojacking_protection', - 'unique_id': 'xyz12_cryptojacking_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cryptojacking protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS rebinding protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dns_rebinding_protection', - 'unique_id': 'xyz12_dns_rebinding_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS rebinding protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domain generation algorithms protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dga_protection', - 'unique_id': 'xyz12_dga_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Domain generation algorithms protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_safesearch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force SafeSearch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'safesearch', - 'unique_id': 'xyz12_safesearch', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_safesearch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force SafeSearch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force YouTube restricted mode', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'youtube_restricted_mode', - 'unique_id': 'xyz12_youtube_restricted_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force YouTube restricted mode', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Google safe browsing', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'google_safe_browsing', - 'unique_id': 'xyz12_google_safe_browsing', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Google safe browsing', - }), - 'context': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IDN homograph attacks protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'idn_homograph_attacks_protection', - 'unique_id': 'xyz12_idn_homograph_attacks_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IDN homograph attacks protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'logs', - 'unique_id': 'xyz12_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Logs', - }), - 'context': , - 'entity_id': 'switch.fake_profile_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Threat intelligence feeds', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'threat_intelligence_feeds', - 'unique_id': 'xyz12_threat_intelligence_feeds', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Threat intelligence feeds', - }), - 'context': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Typosquatting protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'typosquatting_protection', - 'unique_id': 'xyz12_typosquatting_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Typosquatting protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_web3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_web3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Web3', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'web3', - 'unique_id': 'xyz12_web3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_web3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Web3', - }), - 'context': , - 'entity_id': 'switch.fake_profile_web3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensor[binary_sensor.fake_profile_device_connection_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1184,1094 +93,3 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AI-Driven threat detection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ai_threat_detection', - 'unique_id': 'xyz12_ai_threat_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile AI-Driven threat detection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow affiliate & tracking links', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'allow_affiliate', - 'unique_id': 'xyz12_allow_affiliate', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Allow affiliate & tracking links', - }), - 'context': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Anonymized EDNS client subnet', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'anonymized_ecs', - 'unique_id': 'xyz12_anonymized_ecs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', - }), - 'context': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_9gag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block 9GAG', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_9gag', - 'unique_id': 'xyz12_block_9gag', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block bypass methods', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bypass_methods', - 'unique_id': 'xyz12_block_bypass_methods', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block bypass methods', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block child sexual abuse material', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_csam', - 'unique_id': 'xyz12_block_csam', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block child sexual abuse material', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block disguised third-party trackers', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disguised_trackers', - 'unique_id': 'xyz12_block_disguised_trackers', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block disguised third-party trackers', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dynamic DNS hostnames', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ddns', - 'unique_id': 'xyz12_block_ddns', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block newly registered domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_nrd', - 'unique_id': 'xyz12_block_nrd', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block newly registered domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_page-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_page', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block page', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_page', - 'unique_id': 'xyz12_block_page', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_page-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block page', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_page', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_parked_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block parked domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_parked_domains', - 'unique_id': 'xyz12_block_parked_domains', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_parked_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block parked domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cache_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache boost', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cache_boost', - 'unique_id': 'xyz12_cache_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cache_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cache boost', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cname_flattening-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CNAME flattening', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cname_flattening', - 'unique_id': 'xyz12_cname_flattening', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cname_flattening-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile CNAME flattening', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cryptojacking protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cryptojacking_protection', - 'unique_id': 'xyz12_cryptojacking_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cryptojacking protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS rebinding protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dns_rebinding_protection', - 'unique_id': 'xyz12_dns_rebinding_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS rebinding protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domain generation algorithms protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dga_protection', - 'unique_id': 'xyz12_dga_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Domain generation algorithms protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_safesearch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force SafeSearch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'safesearch', - 'unique_id': 'xyz12_safesearch', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_safesearch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force SafeSearch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force YouTube restricted mode', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'youtube_restricted_mode', - 'unique_id': 'xyz12_youtube_restricted_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force YouTube restricted mode', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Google safe browsing', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'google_safe_browsing', - 'unique_id': 'xyz12_google_safe_browsing', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Google safe browsing', - }), - 'context': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IDN homograph attacks protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'idn_homograph_attacks_protection', - 'unique_id': 'xyz12_idn_homograph_attacks_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IDN homograph attacks protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'logs', - 'unique_id': 'xyz12_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Logs', - }), - 'context': , - 'entity_id': 'switch.fake_profile_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Threat intelligence feeds', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'threat_intelligence_feeds', - 'unique_id': 'xyz12_threat_intelligence_feeds', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Threat intelligence feeds', - }), - 'context': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Typosquatting protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'typosquatting_protection', - 'unique_id': 'xyz12_typosquatting_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Typosquatting protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_web3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_web3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Web3', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'web3', - 'unique_id': 'xyz12_web3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_web3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Web3', - }), - 'context': , - 'entity_id': 'switch.fake_profile_web3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/nextdns/snapshots/test_sensor.ambr b/tests/components/nextdns/snapshots/test_sensor.ambr index 34b40433e3b..14bebea53f8 100644 --- a/tests/components/nextdns/snapshots/test_sensor.ambr +++ b/tests/components/nextdns/snapshots/test_sensor.ambr @@ -1,144 +1,4 @@ # serializer version: 1 -# name: test_sensor[binary_sensor.fake_profile_device_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_connection_status', - 'unique_id': 'xyz12_this_device_nextdns_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[binary_sensor.fake_profile_device_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device profile connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_profile_connection_status', - 'unique_id': 'xyz12_this_device_profile_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device profile connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[button.fake_profile_clear_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.fake_profile_clear_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clear logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'clear_logs', - 'unique_id': 'xyz12_clear_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[button.fake_profile_clear_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Clear logs', - }), - 'context': , - 'entity_id': 'button.fake_profile_clear_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensor[sensor.fake_profile_dns_over_http_3_queries-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1389,3361 +1249,3 @@ 'state': '40', }) # --- -# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AI-Driven threat detection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ai_threat_detection', - 'unique_id': 'xyz12_ai_threat_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile AI-Driven threat detection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow affiliate & tracking links', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'allow_affiliate', - 'unique_id': 'xyz12_allow_affiliate', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Allow affiliate & tracking links', - }), - 'context': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Anonymized EDNS client subnet', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'anonymized_ecs', - 'unique_id': 'xyz12_anonymized_ecs', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', - }), - 'context': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_9gag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block 9GAG', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_9gag', - 'unique_id': 'xyz12_block_9gag', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_9gag-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block 9GAG', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_amazon-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_amazon', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Amazon', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_amazon', - 'unique_id': 'xyz12_block_amazon', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_amazon-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Amazon', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_amazon', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_bereal-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bereal', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block BeReal', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bereal', - 'unique_id': 'xyz12_block_bereal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_bereal-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block BeReal', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bereal', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_blizzard-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_blizzard', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Blizzard', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_blizzard', - 'unique_id': 'xyz12_block_blizzard', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_blizzard-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Blizzard', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_blizzard', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_bypass_methods-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block bypass methods', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bypass_methods', - 'unique_id': 'xyz12_block_bypass_methods', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_bypass_methods-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block bypass methods', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_chatgpt-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_chatgpt', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block ChatGPT', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_chatgpt', - 'unique_id': 'xyz12_block_chatgpt', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_chatgpt-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block ChatGPT', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_chatgpt', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block child sexual abuse material', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_csam', - 'unique_id': 'xyz12_block_csam', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block child sexual abuse material', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_dailymotion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dailymotion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Dailymotion', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_dailymotion', - 'unique_id': 'xyz12_block_dailymotion', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_dailymotion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Dailymotion', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dailymotion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_dating-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dating', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dating', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_dating', - 'unique_id': 'xyz12_block_dating', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_dating-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dating', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dating', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_discord-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_discord', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Discord', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_discord', - 'unique_id': 'xyz12_block_discord', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_discord-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Discord', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_discord', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block disguised third-party trackers', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disguised_trackers', - 'unique_id': 'xyz12_block_disguised_trackers', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block disguised third-party trackers', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_disney_plus-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disney_plus', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Disney Plus', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disneyplus', - 'unique_id': 'xyz12_block_disneyplus', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_disney_plus-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Disney Plus', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disney_plus', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dynamic DNS hostnames', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ddns', - 'unique_id': 'xyz12_block_ddns', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_ebay-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_ebay', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block eBay', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ebay', - 'unique_id': 'xyz12_block_ebay', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_ebay-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block eBay', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_ebay', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_facebook-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_facebook', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Facebook', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_facebook', - 'unique_id': 'xyz12_block_facebook', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_facebook-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Facebook', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_facebook', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_fortnite-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_fortnite', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Fortnite', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_fortnite', - 'unique_id': 'xyz12_block_fortnite', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_fortnite-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Fortnite', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_fortnite', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_gambling-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_gambling', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block gambling', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_gambling', - 'unique_id': 'xyz12_block_gambling', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_gambling-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block gambling', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_gambling', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_google_chat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_google_chat', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Google Chat', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_google_chat', - 'unique_id': 'xyz12_block_google_chat', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_google_chat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Google Chat', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_google_chat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_hbo_max-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_hbo_max', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block HBO Max', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_hbomax', - 'unique_id': 'xyz12_block_hbomax', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_hbo_max-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block HBO Max', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_hbo_max', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_hulu-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_hulu', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Hulu', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xyz12_block_hulu', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_hulu-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Hulu', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_hulu', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_imgur-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_imgur', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Imgur', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_imgur', - 'unique_id': 'xyz12_block_imgur', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_imgur-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Imgur', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_imgur', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_instagram-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_instagram', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Instagram', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_instagram', - 'unique_id': 'xyz12_block_instagram', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_instagram-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Instagram', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_instagram', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_league_of_legends-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_league_of_legends', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block League of Legends', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_leagueoflegends', - 'unique_id': 'xyz12_block_leagueoflegends', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_league_of_legends-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block League of Legends', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_league_of_legends', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_mastodon-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_mastodon', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Mastodon', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_mastodon', - 'unique_id': 'xyz12_block_mastodon', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_mastodon-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Mastodon', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_mastodon', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_messenger-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_messenger', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Messenger', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_messenger', - 'unique_id': 'xyz12_block_messenger', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_messenger-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Messenger', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_messenger', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_minecraft-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_minecraft', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Minecraft', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_minecraft', - 'unique_id': 'xyz12_block_minecraft', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_minecraft-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Minecraft', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_minecraft', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_netflix-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_netflix', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Netflix', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_netflix', - 'unique_id': 'xyz12_block_netflix', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_netflix-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Netflix', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_netflix', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_newly_registered_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block newly registered domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_nrd', - 'unique_id': 'xyz12_block_nrd', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_newly_registered_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block newly registered domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_online_gaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_online_gaming', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block online gaming', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_online_gaming', - 'unique_id': 'xyz12_block_online_gaming', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_online_gaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block online gaming', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_online_gaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_page-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_page', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block page', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_page', - 'unique_id': 'xyz12_block_page', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_page-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block page', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_page', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_block_parked_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block parked domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_parked_domains', - 'unique_id': 'xyz12_block_parked_domains', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_parked_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block parked domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_pinterest-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_pinterest', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Pinterest', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_pinterest', - 'unique_id': 'xyz12_block_pinterest', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_pinterest-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Pinterest', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_pinterest', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_piracy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_piracy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block piracy', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_piracy', - 'unique_id': 'xyz12_block_piracy', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_piracy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block piracy', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_piracy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_playstation_network-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_playstation_network', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block PlayStation Network', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_playstation_network', - 'unique_id': 'xyz12_block_playstation_network', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_playstation_network-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block PlayStation Network', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_playstation_network', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_porn-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_porn', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block porn', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_porn', - 'unique_id': 'xyz12_block_porn', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_porn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block porn', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_porn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_prime_video-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_prime_video', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Prime Video', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_primevideo', - 'unique_id': 'xyz12_block_primevideo', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_prime_video-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Prime Video', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_prime_video', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_reddit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_reddit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Reddit', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_reddit', - 'unique_id': 'xyz12_block_reddit', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_reddit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Reddit', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_reddit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_roblox-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_roblox', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Roblox', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_roblox', - 'unique_id': 'xyz12_block_roblox', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_roblox-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Roblox', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_roblox', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_signal-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_signal', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Signal', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_signal', - 'unique_id': 'xyz12_block_signal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_signal-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Signal', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_signal', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_skype-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_skype', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Skype', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_skype', - 'unique_id': 'xyz12_block_skype', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_skype-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Skype', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_skype', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_snapchat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_snapchat', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Snapchat', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_snapchat', - 'unique_id': 'xyz12_block_snapchat', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_snapchat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Snapchat', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_snapchat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_social_networks-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_social_networks', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block social networks', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_social_networks', - 'unique_id': 'xyz12_block_social_networks', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_social_networks-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block social networks', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_social_networks', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_spotify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_spotify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Spotify', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_spotify', - 'unique_id': 'xyz12_block_spotify', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_spotify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Spotify', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_spotify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_steam-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_steam', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Steam', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_steam', - 'unique_id': 'xyz12_block_steam', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_steam-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Steam', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_steam', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_telegram-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_telegram', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Telegram', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_telegram', - 'unique_id': 'xyz12_block_telegram', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_telegram-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Telegram', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_telegram', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_tiktok-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_tiktok', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block TikTok', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_tiktok', - 'unique_id': 'xyz12_block_tiktok', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_tiktok-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block TikTok', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_tiktok', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_tinder-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_tinder', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Tinder', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_tinder', - 'unique_id': 'xyz12_block_tinder', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_tinder-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Tinder', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_tinder', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_tumblr-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_tumblr', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Tumblr', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_tumblr', - 'unique_id': 'xyz12_block_tumblr', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_tumblr-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Tumblr', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_tumblr', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_twitch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_twitch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Twitch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_twitch', - 'unique_id': 'xyz12_block_twitch', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_twitch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Twitch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_twitch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_video_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_video_streaming', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block video streaming', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_video_streaming', - 'unique_id': 'xyz12_block_video_streaming', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_video_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block video streaming', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_video_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_vimeo-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_vimeo', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Vimeo', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_vimeo', - 'unique_id': 'xyz12_block_vimeo', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_vimeo-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Vimeo', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_vimeo', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_vk-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_vk', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block VK', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_vk', - 'unique_id': 'xyz12_block_vk', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_vk-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block VK', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_vk', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_whatsapp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_whatsapp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block WhatsApp', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_whatsapp', - 'unique_id': 'xyz12_block_whatsapp', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_whatsapp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block WhatsApp', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_whatsapp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block X (formerly Twitter)', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_twitter', - 'unique_id': 'xyz12_block_twitter', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block X (formerly Twitter)', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_xbox_live-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_xbox_live', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Xbox Live', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_xboxlive', - 'unique_id': 'xyz12_block_xboxlive', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_xbox_live-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Xbox Live', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_xbox_live', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_youtube-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_youtube', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block YouTube', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_youtube', - 'unique_id': 'xyz12_block_youtube', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_youtube-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block YouTube', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_youtube', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_zoom-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_zoom', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Zoom', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_zoom', - 'unique_id': 'xyz12_block_zoom', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_zoom-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Zoom', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_zoom', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_cache_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache boost', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cache_boost', - 'unique_id': 'xyz12_cache_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_cache_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cache boost', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_cname_flattening-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CNAME flattening', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cname_flattening', - 'unique_id': 'xyz12_cname_flattening', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_cname_flattening-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile CNAME flattening', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_cryptojacking_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cryptojacking protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cryptojacking_protection', - 'unique_id': 'xyz12_cryptojacking_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_cryptojacking_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cryptojacking protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_dns_rebinding_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS rebinding protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dns_rebinding_protection', - 'unique_id': 'xyz12_dns_rebinding_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_dns_rebinding_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS rebinding protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domain generation algorithms protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dga_protection', - 'unique_id': 'xyz12_dga_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Domain generation algorithms protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_force_safesearch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force SafeSearch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'safesearch', - 'unique_id': 'xyz12_safesearch', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_force_safesearch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force SafeSearch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force YouTube restricted mode', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'youtube_restricted_mode', - 'unique_id': 'xyz12_youtube_restricted_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force YouTube restricted mode', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_google_safe_browsing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Google safe browsing', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'google_safe_browsing', - 'unique_id': 'xyz12_google_safe_browsing', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_google_safe_browsing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Google safe browsing', - }), - 'context': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IDN homograph attacks protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'idn_homograph_attacks_protection', - 'unique_id': 'xyz12_idn_homograph_attacks_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IDN homograph attacks protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'logs', - 'unique_id': 'xyz12_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Logs', - }), - 'context': , - 'entity_id': 'switch.fake_profile_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Threat intelligence feeds', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'threat_intelligence_feeds', - 'unique_id': 'xyz12_threat_intelligence_feeds', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Threat intelligence feeds', - }), - 'context': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_typosquatting_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Typosquatting protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'typosquatting_protection', - 'unique_id': 'xyz12_typosquatting_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_typosquatting_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Typosquatting protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_web3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_web3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Web3', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'web3', - 'unique_id': 'xyz12_web3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_web3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Web3', - }), - 'context': , - 'entity_id': 'switch.fake_profile_web3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/nextdns/snapshots/test_switch.ambr b/tests/components/nextdns/snapshots/test_switch.ambr index 8472f02e8c5..3328e341a2e 100644 --- a/tests/components/nextdns/snapshots/test_switch.ambr +++ b/tests/components/nextdns/snapshots/test_switch.ambr @@ -1,1394 +1,4 @@ # serializer version: 1 -# name: test_switch[binary_sensor.fake_profile_device_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_connection_status', - 'unique_id': 'xyz12_this_device_nextdns_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[binary_sensor.fake_profile_device_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device profile connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_profile_connection_status', - 'unique_id': 'xyz12_this_device_profile_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device profile connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch[button.fake_profile_clear_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.fake_profile_clear_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clear logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'clear_logs', - 'unique_id': 'xyz12_clear_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[button.fake_profile_clear_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Clear logs', - }), - 'context': , - 'entity_id': 'button.fake_profile_clear_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTP/3 queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh3_queries', - 'unique_id': 'xyz12_doh3_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTP/3 queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh3_queries_ratio', - 'unique_id': 'xyz12_doh3_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '13.0', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTPS queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh_queries', - 'unique_id': 'xyz12_doh_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTPS queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTPS queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh_queries_ratio', - 'unique_id': 'xyz12_doh_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTPS queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17.4', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-QUIC queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doq_queries', - 'unique_id': 'xyz12_doq_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-QUIC queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-QUIC queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doq_queries_ratio', - 'unique_id': 'xyz12_doq_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-QUIC queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.7', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-TLS queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dot_queries', - 'unique_id': 'xyz12_dot_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-TLS queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-TLS queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dot_queries_ratio', - 'unique_id': 'xyz12_dot_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-TLS queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '26.1', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'all_queries', - 'unique_id': 'xyz12_all_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries blocked', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'blocked_queries', - 'unique_id': 'xyz12_blocked_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries blocked', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries blocked ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'blocked_queries_ratio', - 'unique_id': 'xyz12_blocked_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries blocked ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.0', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_relayed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries_relayed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries relayed', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relayed_queries', - 'unique_id': 'xyz12_relayed_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_relayed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries relayed', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries_relayed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNSSEC not validated queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'not_validated_queries', - 'unique_id': 'xyz12_not_validated_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNSSEC not validated queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '25', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNSSEC validated queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'validated_queries', - 'unique_id': 'xyz12_validated_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNSSEC validated queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '75', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNSSEC validated queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'validated_queries_ratio', - 'unique_id': 'xyz12_validated_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNSSEC validated queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '75.0', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_encrypted_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Encrypted queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'encrypted_queries', - 'unique_id': 'xyz12_encrypted_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Encrypted queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_encrypted_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Encrypted queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'encrypted_queries_ratio', - 'unique_id': 'xyz12_encrypted_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Encrypted queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60.0', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv4_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_ipv4_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IPv4 queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv4_queries', - 'unique_id': 'xyz12_ipv4_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv4_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IPv4 queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_ipv4_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '90', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_ipv6_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IPv6 queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv6_queries', - 'unique_id': 'xyz12_ipv6_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IPv6 queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_ipv6_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IPv6 queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv6_queries_ratio', - 'unique_id': 'xyz12_ipv6_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IPv6 queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.0', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_tcp_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'TCP queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tcp_queries', - 'unique_id': 'xyz12_tcp_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile TCP queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_tcp_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'TCP queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tcp_queries_ratio', - 'unique_id': 'xyz12_tcp_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile TCP queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_udp_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'UDP queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'udp_queries', - 'unique_id': 'xyz12_udp_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile UDP queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_udp_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_udp_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'UDP queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'udp_queries_ratio', - 'unique_id': 'xyz12_udp_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile UDP queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_udp_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '34.8', - }) -# --- -# name: test_switch[sensor.fake_profile_unencrypted_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_unencrypted_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Unencrypted queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'unencrypted_queries', - 'unique_id': 'xyz12_unencrypted_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_unencrypted_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Unencrypted queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_unencrypted_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- # name: test_switch[switch.fake_profile_ai_driven_threat_detection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/nextdns/test_config_flow.py b/tests/components/nextdns/test_config_flow.py index 9247288eebf..7571eef347e 100644 --- a/tests/components/nextdns/test_config_flow.py +++ b/tests/components/nextdns/test_config_flow.py @@ -4,6 +4,7 @@ from unittest.mock import patch from nextdns import ApiError, InvalidApiKeyError import pytest +from tenacity import RetryError from homeassistant.components.nextdns.const import CONF_PROFILE_ID, DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -57,6 +58,7 @@ async def test_form_create_entry(hass: HomeAssistant) -> None: [ (ApiError("API Error"), "cannot_connect"), (InvalidApiKeyError, "invalid_api_key"), + (RetryError("Retry Error"), "cannot_connect"), (TimeoutError, "cannot_connect"), (ValueError, "unknown"), ], diff --git a/tests/components/nextdns/test_diagnostics.py b/tests/components/nextdns/test_diagnostics.py index 7652bc4f03e..3bb1fc3ee67 100644 --- a/tests/components/nextdns/test_diagnostics.py +++ b/tests/components/nextdns/test_diagnostics.py @@ -1,6 +1,7 @@ """Test NextDNS diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -18,4 +19,6 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" entry = await init_integration(hass) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/nextdns/test_init.py b/tests/components/nextdns/test_init.py index f7b85bb8a54..61a487d917c 100644 --- a/tests/components/nextdns/test_init.py +++ b/tests/components/nextdns/test_init.py @@ -3,6 +3,8 @@ from unittest.mock import patch from nextdns import ApiError +import pytest +from tenacity import RetryError from homeassistant.components.nextdns.const import CONF_PROFILE_ID, DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -24,7 +26,10 @@ async def test_async_setup_entry(hass: HomeAssistant) -> None: assert state.state == "20.0" -async def test_config_not_ready(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + "exc", [ApiError("API Error"), RetryError("Retry Error"), TimeoutError] +) +async def test_config_not_ready(hass: HomeAssistant, exc: Exception) -> None: """Test for setup failure if the connection to the service fails.""" entry = MockConfigEntry( domain=DOMAIN, @@ -35,7 +40,7 @@ async def test_config_not_ready(hass: HomeAssistant) -> None: with patch( "homeassistant.components.nextdns.NextDns.get_profiles", - side_effect=ApiError("API Error"), + side_effect=exc, ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/nextdns/test_switch.py b/tests/components/nextdns/test_switch.py index 059585e9ffe..6e344e34336 100644 --- a/tests/components/nextdns/test_switch.py +++ b/tests/components/nextdns/test_switch.py @@ -8,6 +8,7 @@ from aiohttp.client_exceptions import ClientConnectorError from nextdns import ApiError import pytest from syrupy import SnapshotAssertion +from tenacity import RetryError from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( @@ -94,7 +95,15 @@ async def test_switch_off(hass: HomeAssistant) -> None: mock_switch_on.assert_called_once() -async def test_availability(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + "exc", + [ + ApiError("API Error"), + RetryError("Retry Error"), + TimeoutError, + ], +) +async def test_availability(hass: HomeAssistant, exc: Exception) -> None: """Ensure that we mark the entities unavailable correctly when service causes an error.""" await init_integration(hass) @@ -106,7 +115,7 @@ async def test_availability(hass: HomeAssistant) -> None: future = utcnow() + timedelta(minutes=10) with patch( "homeassistant.components.nextdns.NextDns.get_settings", - side_effect=ApiError("API Error"), + side_effect=exc, ): async_fire_time_changed(hass, future) await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/nibe_heatpump/conftest.py b/tests/components/nibe_heatpump/conftest.py index c44875414e2..47b65772a24 100644 --- a/tests/components/nibe_heatpump/conftest.py +++ b/tests/components/nibe_heatpump/conftest.py @@ -1,12 +1,12 @@ """Test configuration for Nibe Heat Pump.""" +from collections.abc import Generator from contextlib import ExitStack from unittest.mock import AsyncMock, Mock, patch from freezegun.api import FrozenDateTimeFactory from nibe.exceptions import CoilNotFoundException import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/nibe_heatpump/snapshots/test_climate.ambr b/tests/components/nibe_heatpump/snapshots/test_climate.ambr index fb3e2d1003b..2db9a813bff 100644 --- a/tests/components/nibe_heatpump/snapshots/test_climate.ambr +++ b/tests/components/nibe_heatpump/snapshots/test_climate.ambr @@ -97,12 +97,6 @@ 'state': 'unavailable', }) # --- -# name: test_active_accessory[Model.S320-s2-climate.climate_system_21][initial] - None -# --- -# name: test_active_accessory[Model.S320-s2-climate.climate_system_s1][initial] - None -# --- # name: test_basic[Model.F1155-s2-climate.climate_system_s2][cooling] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/nibe_heatpump/test_config_flow.py b/tests/components/nibe_heatpump/test_config_flow.py index 471f7f4c593..de5f577fa7d 100644 --- a/tests/components/nibe_heatpump/test_config_flow.py +++ b/tests/components/nibe_heatpump/test_config_flow.py @@ -38,7 +38,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def _get_connection_form( hass: HomeAssistant, connection_type: str -) -> FlowResultType: +) -> config_entries.ConfigFlowResult: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} diff --git a/tests/components/nice_go/__init__.py b/tests/components/nice_go/__init__.py new file mode 100644 index 00000000000..0208795a12c --- /dev/null +++ b/tests/components/nice_go/__init__.py @@ -0,0 +1,22 @@ +"""Tests for the Nice G.O. integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.nice_go.PLATFORMS", + platforms, + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/nice_go/conftest.py b/tests/components/nice_go/conftest.py new file mode 100644 index 00000000000..31b21083c05 --- /dev/null +++ b/tests/components/nice_go/conftest.py @@ -0,0 +1,78 @@ +"""Common fixtures for the Nice G.O. tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from nice_go import Barrier, BarrierState, ConnectionState +import pytest + +from homeassistant.components.nice_go.const import ( + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, +) +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD + +from tests.common import MockConfigEntry, load_json_array_fixture + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.nice_go.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_nice_go() -> Generator[AsyncMock]: + """Mock a Nice G.O. client.""" + with ( + patch( + "homeassistant.components.nice_go.coordinator.NiceGOApi", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.nice_go.config_flow.NiceGOApi", + new=mock_client, + ), + ): + client = mock_client.return_value + client.authenticate.return_value = "test-refresh-token" + client.authenticate_refresh.return_value = None + client.id_token = None + client.get_all_barriers.return_value = [ + Barrier( + id=barrier["id"], + type=barrier["type"], + controlLevel=barrier["controlLevel"], + attr=barrier["attr"], + state=BarrierState( + **barrier["state"], + connectionState=ConnectionState(**barrier["connectionState"]), + ), + api=client, + ) + for barrier in load_json_array_fixture("get_all_barriers.json", DOMAIN) + ] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + entry_id="acefdd4b3a4a0911067d1cf51414201e", + title="test-email", + data={ + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + CONF_REFRESH_TOKEN: "test-refresh-token", + CONF_REFRESH_TOKEN_CREATION_TIME: 1722184160.738171, + }, + version=1, + unique_id="test-email", + ) diff --git a/tests/components/nice_go/fixtures/device_state_update.json b/tests/components/nice_go/fixtures/device_state_update.json new file mode 100644 index 00000000000..53d89c5411b --- /dev/null +++ b/tests/components/nice_go/fixtures/device_state_update.json @@ -0,0 +1,21 @@ +{ + "data": { + "devicesStatesUpdateFeed": { + "receiver": "ORG/0:2372", + "item": { + "deviceId": "1", + "desired": "{\"key\":\"value\"}", + "reported": "{\"displayName\":\"Test Garage 1\",\"autoDisabled\":false,\"migrationStatus\":\"DONE\",\"deviceId\":\"1\",\"lightStatus\":\"0,100\",\"vcnMode\":false,\"deviceFwVersion\":\"1.2.3.4.5.6\",\"barrierStatus\":\"0,0,1,0,-1,0,3,0\"}", + "timestamp": 123, + "version": 123, + "connectionState": { + "connected": true, + "updatedTimestamp": "123", + "__typename": "DeviceConnectionState" + }, + "__typename": "DeviceState" + }, + "__typename": "DeviceStateUpdateNotice" + } + } +} diff --git a/tests/components/nice_go/fixtures/device_state_update_1.json b/tests/components/nice_go/fixtures/device_state_update_1.json new file mode 100644 index 00000000000..cc718e8b093 --- /dev/null +++ b/tests/components/nice_go/fixtures/device_state_update_1.json @@ -0,0 +1,21 @@ +{ + "data": { + "devicesStatesUpdateFeed": { + "receiver": "ORG/0:2372", + "item": { + "deviceId": "2", + "desired": "{\"key\":\"value\"}", + "reported": "{\"displayName\":\"Test Garage 2\",\"autoDisabled\":false,\"migrationStatus\":\"DONE\",\"deviceId\":\"2\",\"lightStatus\":\"1,100\",\"vcnMode\":false,\"deviceFwVersion\":\"1.2.3.4.5.6\",\"barrierStatus\":\"1,100,2,0,-1,0,3,0\"}", + "timestamp": 123, + "version": 123, + "connectionState": { + "connected": true, + "updatedTimestamp": "123", + "__typename": "DeviceConnectionState" + }, + "__typename": "DeviceState" + }, + "__typename": "DeviceStateUpdateNotice" + } + } +} diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json new file mode 100644 index 00000000000..481c73d91a8 --- /dev/null +++ b/tests/components/nice_go/fixtures/get_all_barriers.json @@ -0,0 +1,64 @@ +[ + { + "id": "1", + "type": "WallStation", + "controlLevel": "Owner", + "attr": [ + { + "key": "organization", + "value": "test_organization" + } + ], + "state": { + "deviceId": "1", + "desired": { "key": "value" }, + "reported": { + "displayName": "Test Garage 1", + "autoDisabled": false, + "migrationStatus": "DONE", + "deviceId": "1", + "lightStatus": "1,100", + "vcnMode": false, + "deviceFwVersion": "1.2.3.4.5.6", + "barrierStatus": "0,0,0,0,-1,0,3,0" + }, + "timestamp": null, + "version": null + }, + "connectionState": { + "connected": true, + "updatedTimestamp": "123" + } + }, + { + "id": "2", + "type": "WallStation", + "controlLevel": "Owner", + "attr": [ + { + "key": "organization", + "value": "test_organization" + } + ], + "state": { + "deviceId": "2", + "desired": { "key": "value" }, + "reported": { + "displayName": "Test Garage 2", + "autoDisabled": false, + "migrationStatus": "DONE", + "deviceId": "2", + "lightStatus": "0,100", + "vcnMode": false, + "deviceFwVersion": "1.2.3.4.5.6", + "barrierStatus": "1,100,0,0,-1,0,3,0" + }, + "timestamp": null, + "version": null + }, + "connectionState": { + "connected": true, + "updatedTimestamp": "123" + } + } +] diff --git a/tests/components/nice_go/snapshots/test_cover.ambr b/tests/components/nice_go/snapshots/test_cover.ambr new file mode 100644 index 00000000000..8f85fea2726 --- /dev/null +++ b/tests/components/nice_go/snapshots/test_cover.ambr @@ -0,0 +1,193 @@ +# serializer version: 1 +# name: test_covers[cover.test_garage_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1-cover', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_covers[cover.test_garage_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '2-cover', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 2', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_covers[cover.test_garage_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'test3-GDO', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 3', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'opening', + }) +# --- +# name: test_covers[cover.test_garage_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'test4-GDO', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 4', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closing', + }) +# --- diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..b7d564b619b --- /dev/null +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -0,0 +1,43 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'coordinator_data': dict({ + '1': dict({ + 'barrier_status': 'closed', + 'connected': True, + 'fw_version': '1.2.3.4.5.6', + 'id': '1', + 'light_status': True, + 'name': 'Test Garage 1', + }), + '2': dict({ + 'barrier_status': 'open', + 'connected': True, + 'fw_version': '1.2.3.4.5.6', + 'id': '2', + 'light_status': False, + 'name': 'Test Garage 2', + }), + }), + 'entry': dict({ + 'data': dict({ + 'email': '**REDACTED**', + 'password': '**REDACTED**', + 'refresh_token': '**REDACTED**', + 'refresh_token_creation_time': 1722184160.738171, + }), + 'disabled_by': None, + 'domain': 'nice_go', + 'entry_id': 'acefdd4b3a4a0911067d1cf51414201e', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': '**REDACTED**', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/nice_go/snapshots/test_init.ambr b/tests/components/nice_go/snapshots/test_init.ambr new file mode 100644 index 00000000000..ff389568d1b --- /dev/null +++ b/tests/components/nice_go/snapshots/test_init.ambr @@ -0,0 +1,16 @@ +# serializer version: 1 +# name: test_on_data_none_parsed + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- diff --git a/tests/components/nice_go/snapshots/test_light.ambr b/tests/components/nice_go/snapshots/test_light.ambr new file mode 100644 index 00000000000..294488e3d46 --- /dev/null +++ b/tests/components/nice_go/snapshots/test_light.ambr @@ -0,0 +1,223 @@ +# serializer version: 1 +# name: test_data[light.test_garage_1_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_1_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': '1-light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_1_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'Test Garage 1 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_1_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_data[light.test_garage_2_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_2_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': '2-light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_2_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'Test Garage 2 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_2_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_data[light.test_garage_3_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_3_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'test3-Light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_3_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'friendly_name': 'Test Garage 3 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_3_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_data[light.test_garage_4_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_4_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'test4-Light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_4_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Test Garage 4 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_4_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nice_go/test_config_flow.py b/tests/components/nice_go/test_config_flow.py new file mode 100644 index 00000000000..67930b9f752 --- /dev/null +++ b/tests/components/nice_go/test_config_flow.py @@ -0,0 +1,111 @@ +"""Test the Nice G.O. config flow.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from nice_go import AuthFailedError +import pytest + +from homeassistant.components.nice_go.const import ( + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_setup_entry: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-email" + assert result["data"][CONF_EMAIL] == "test-email" + assert result["data"][CONF_PASSWORD] == "test-password" + assert result["data"][CONF_REFRESH_TOKEN] == "test-refresh-token" + assert CONF_REFRESH_TOKEN_CREATION_TIME in result["data"] + assert result["result"].unique_id == "test-email" + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [(AuthFailedError, "invalid_auth"), (Exception, "unknown")], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_setup_entry: AsyncMock, + side_effect: Exception, + expected_error: str, +) -> None: + """Test we handle invalid auth.""" + mock_nice_go.authenticate.side_effect = side_effect + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + mock_nice_go.authenticate.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_nice_go: AsyncMock, +) -> None: + """Test that duplicate devices are handled.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/nice_go/test_cover.py b/tests/components/nice_go/test_cover.py new file mode 100644 index 00000000000..a6eb9bd27fb --- /dev/null +++ b/tests/components/nice_go/test_cover.py @@ -0,0 +1,115 @@ +"""Test Nice G.O. cover.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, +) +from homeassistant.components.nice_go.const import DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform + + +async def test_covers( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that data gets parsed and returned appropriately.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_open_cover( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that opening the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_2"}, + blocking=True, + ) + + assert mock_nice_go.open_barrier.call_count == 0 + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_1"}, + blocking=True, + ) + + assert mock_nice_go.open_barrier.call_count == 1 + + +async def test_close_cover( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that closing the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_1"}, + blocking=True, + ) + + assert mock_nice_go.close_barrier.call_count == 0 + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_2"}, + blocking=True, + ) + + assert mock_nice_go.close_barrier.call_count == 1 + + +async def test_update_cover_state( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that closing the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert hass.states.get("cover.test_garage_1").state == STATE_CLOSED + assert hass.states.get("cover.test_garage_2").state == STATE_OPEN + + device_update = load_json_object_fixture("device_state_update.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update) + device_update_1 = load_json_object_fixture("device_state_update_1.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update_1) + + assert hass.states.get("cover.test_garage_1").state == STATE_OPENING + assert hass.states.get("cover.test_garage_2").state == STATE_CLOSING diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py new file mode 100644 index 00000000000..d6877d72724 --- /dev/null +++ b/tests/components/nice_go/test_init.py @@ -0,0 +1,288 @@ +"""Test Nice G.O. init.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock + +from freezegun.api import FrozenDateTimeFactory +from nice_go import ApiError, AuthFailedError, Barrier, BarrierState +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.nice_go.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_unload_entry( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test the unload entry.""" + + await setup_integration(hass, mock_config_entry, []) + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("side_effect", "entry_state"), + [ + ( + AuthFailedError(), + ConfigEntryState.SETUP_ERROR, + ), + (ApiError(), ConfigEntryState.SETUP_RETRY), + ], +) +async def test_setup_failure( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + entry_state: ConfigEntryState, +) -> None: + """Test reauth trigger setup.""" + + mock_nice_go.authenticate_refresh.side_effect = side_effect + + await setup_integration(hass, mock_config_entry, []) + assert mock_config_entry.state is entry_state + + +async def test_firmware_update_required( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test firmware update required.""" + + mock_nice_go.get_all_barriers.return_value = [ + Barrier( + id="test-device-id", + type="test-type", + controlLevel="test-control-level", + attr=[{"key": "test-attr", "value": "test-value"}], + state=BarrierState( + deviceId="test-device-id", + reported={ + "displayName": "test-display-name", + "migrationStatus": "NOT_STARTED", + }, + desired=None, + connectionState=None, + version=None, + timestamp=None, + ), + api=mock_nice_go, + ) + ] + + await setup_integration(hass, mock_config_entry, []) + + issue = issue_registry.async_get_issue( + DOMAIN, + "firmware_update_required_test-device-id", + ) + assert issue + + +async def test_update_refresh_token( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test updating refresh token.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_nice_go.authenticate.call_count == 0 + + mock_nice_go.authenticate.return_value = "new-refresh-token" + freezer.tick(timedelta(days=30)) + async_fire_time_changed(hass) + assert await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.authenticate.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 2 + assert mock_config_entry.data["refresh_token"] == "new-refresh-token" + + +async def test_update_refresh_token_api_error( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test updating refresh token with error.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_nice_go.authenticate.call_count == 0 + + mock_nice_go.authenticate.side_effect = ApiError + freezer.tick(timedelta(days=30)) + async_fire_time_changed(hass) + assert not await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.authenticate.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_config_entry.data["refresh_token"] == "test-refresh-token" + assert "API error" in caplog.text + + +async def test_update_refresh_token_auth_failed( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test updating refresh token with error.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_nice_go.authenticate.call_count == 0 + + mock_nice_go.authenticate.side_effect = AuthFailedError + freezer.tick(timedelta(days=30)) + async_fire_time_changed(hass) + assert not await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.authenticate.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_config_entry.data["refresh_token"] == "test-refresh-token" + assert "Authentication failed" in caplog.text + + +async def test_client_listen_api_error( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test client listen with error.""" + + mock_nice_go.connect.side_effect = ApiError + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert "API error" in caplog.text + + mock_nice_go.connect.side_effect = None + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_nice_go.connect.call_count == 2 + + +async def test_on_data_none_parsed( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test on data with None parsed.""" + + mock_nice_go.event = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await mock_nice_go.event.call_args[0][0]( + { + "data": { + "devicesStatesUpdateFeed": { + "item": { + "deviceId": "1", + "desired": '{"key": "value"}', + "reported": '{"displayName":"test-display-name", "migrationStatus":"NOT_STARTED"}', + "connectionState": { + "connected": None, + "updatedTimestamp": None, + }, + "version": None, + "timestamp": None, + } + } + } + } + ) + + assert hass.states.get("cover.test_garage_1") == snapshot + + +async def test_on_connected( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test on connected.""" + + mock_nice_go.event = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.event.call_count == 2 + + mock_nice_go.subscribe = AsyncMock() + await mock_nice_go.event.call_args_list[0][0][0]() + + assert mock_nice_go.subscribe.call_count == 1 + + +async def test_no_connection_state( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test parsing barrier with no connection state.""" + + mock_nice_go.event = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.event.call_count == 2 + + await mock_nice_go.event.call_args[0][0]( + { + "data": { + "devicesStatesUpdateFeed": { + "item": { + "deviceId": "1", + "desired": '{"key": "value"}', + "reported": '{"displayName":"Test Garage 1", "migrationStatus":"DONE", "barrierStatus": "1,100,0", "deviceFwVersion": "1.0.0", "lightStatus": "1,100"}', + "connectionState": None, + "version": None, + "timestamp": None, + } + } + } + } + ) + + assert hass.states.get("cover.test_garage_1").state == "unavailable" diff --git a/tests/components/notify/conftest.py b/tests/components/notify/conftest.py index 0efb3a4689d..91dc92a27fe 100644 --- a/tests/components/notify/conftest.py +++ b/tests/components/notify/conftest.py @@ -1,7 +1,8 @@ """Fixtures for Notify platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/notify/test_legacy.py b/tests/components/notify/test_legacy.py index d6478c358bf..8be80650053 100644 --- a/tests/components/notify/test_legacy.py +++ b/tests/components/notify/test_legacy.py @@ -226,7 +226,11 @@ async def test_invalid_service( ) -> None: """Test service setup with an invalid service object or platform.""" - def get_service(hass, config, discovery_info=None): + def get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> notify.BaseNotificationService | None: """Return None for an invalid notify service.""" return None @@ -261,7 +265,7 @@ async def test_platform_setup_with_error( async def async_get_service(hass, config, discovery_info=None): """Return None for an invalid notify service.""" - raise Exception("Setup error") # pylint: disable=broad-exception-raised + raise Exception("Setup error") # noqa: TRY002 mock_notify_platform( hass, tmp_path, "testnotify", async_get_service=async_get_service diff --git a/tests/components/notion/conftest.py b/tests/components/notion/conftest.py index 17bea306ad8..6a6e150c960 100644 --- a/tests/components/notion/conftest.py +++ b/tests/components/notion/conftest.py @@ -1,5 +1,6 @@ """Define fixtures for Notion tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, Mock, patch @@ -8,7 +9,6 @@ from aionotion.listener.models import Listener from aionotion.sensor.models import Sensor from aionotion.user.models import UserPreferences import pytest -from typing_extensions import Generator from homeassistant.components.notion import CONF_REFRESH_TOKEN, CONF_USER_UUID, DOMAIN from homeassistant.const import CONF_USERNAME diff --git a/tests/components/notion/test_diagnostics.py b/tests/components/notion/test_diagnostics.py index 023b9369f03..4d87b6292e4 100644 --- a/tests/components/notion/test_diagnostics.py +++ b/tests/components/notion/test_diagnostics.py @@ -4,6 +4,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.notion import DOMAIN from homeassistant.core import HomeAssistant +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -33,6 +34,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": { "bridges": [ diff --git a/tests/components/nsw_fuel_station/test_sensor.py b/tests/components/nsw_fuel_station/test_sensor.py index 898d5757870..dbf52d937f0 100644 --- a/tests/components/nsw_fuel_station/test_sensor.py +++ b/tests/components/nsw_fuel_station/test_sensor.py @@ -23,7 +23,9 @@ VALID_CONFIG_EXPECTED_ENTITY_IDS = ["my_fake_station_p95", "my_fake_station_e10" class MockPrice: """Mock Price implementation.""" - def __init__(self, price, fuel_type, last_updated, price_unit, station_code): + def __init__( + self, price, fuel_type, last_updated, price_unit, station_code + ) -> None: """Initialize a mock price instance.""" self.price = price self.fuel_type = fuel_type @@ -35,7 +37,7 @@ class MockPrice: class MockStation: """Mock Station implementation.""" - def __init__(self, name, code): + def __init__(self, name, code) -> None: """Initialize a mock Station instance.""" self.name = name self.code = code @@ -44,7 +46,7 @@ class MockStation: class MockGetFuelPricesResponse: """Mock GetFuelPricesResponse implementation.""" - def __init__(self, prices, stations): + def __init__(self, prices, stations) -> None: """Initialize a mock GetFuelPricesResponse instance.""" self.prices = prices self.stations = stations diff --git a/tests/components/numato/conftest.py b/tests/components/numato/conftest.py index c6fd13a099e..f3ae4d5f32b 100644 --- a/tests/components/numato/conftest.py +++ b/tests/components/numato/conftest.py @@ -1,17 +1,18 @@ """Fixtures for numato tests.""" from copy import deepcopy +from typing import Any import pytest from homeassistant.components import numato -from . import numato_mock from .common import NUMATO_CFG +from .numato_mock import NumatoModuleMock @pytest.fixture -def config(): +def config() -> dict[str, Any]: """Provide a copy of the numato domain's test configuration. This helps to quickly change certain aspects of the configuration scoped @@ -21,8 +22,8 @@ def config(): @pytest.fixture -def numato_fixture(monkeypatch): +def numato_fixture(monkeypatch: pytest.MonkeyPatch) -> NumatoModuleMock: """Inject the numato mockup into numato homeassistant module.""" - module_mock = numato_mock.NumatoModuleMock() + module_mock = NumatoModuleMock() monkeypatch.setattr(numato, "gpio", module_mock) return module_mock diff --git a/tests/components/numato/numato_mock.py b/tests/components/numato/numato_mock.py index 097a785beb1..208beffe83f 100644 --- a/tests/components/numato/numato_mock.py +++ b/tests/components/numato/numato_mock.py @@ -8,14 +8,14 @@ class NumatoModuleMock: NumatoGpioError = NumatoGpioError - def __init__(self): + def __init__(self) -> None: """Initialize the numato_gpio module mockup class.""" self.devices = {} class NumatoDeviceMock: """Mockup for the numato_gpio.NumatoUsbGpio class.""" - def __init__(self, device): + def __init__(self, device) -> None: """Initialize numato device mockup.""" self.device = device self.callbacks = {} diff --git a/tests/components/numato/test_binary_sensor.py b/tests/components/numato/test_binary_sensor.py index 524589af198..08506349247 100644 --- a/tests/components/numato/test_binary_sensor.py +++ b/tests/components/numato/test_binary_sensor.py @@ -21,7 +21,7 @@ MOCKUP_ENTITY_IDS = { async def test_failing_setups_no_entities( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """When port setup fails, no entity shall be created.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise) diff --git a/tests/components/numato/test_init.py b/tests/components/numato/test_init.py index 35dd102ec9e..4695265f37f 100644 --- a/tests/components/numato/test_init.py +++ b/tests/components/numato/test_init.py @@ -11,7 +11,7 @@ from .common import NUMATO_CFG, mockup_raise, mockup_return async def test_setup_no_devices( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test handling of an 'empty' discovery. @@ -24,7 +24,7 @@ async def test_setup_no_devices( async def test_fail_setup_raising_discovery( - hass: HomeAssistant, numato_fixture, caplog: pytest.LogCaptureFixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test handling of an exception during discovery. @@ -57,7 +57,7 @@ async def test_hass_numato_api_wrong_port_directions( async def test_hass_numato_api_errors( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test whether Home Assistant numato API (re-)raises errors.""" numato_fixture.discover() diff --git a/tests/components/numato/test_sensor.py b/tests/components/numato/test_sensor.py index 30a9f174941..c652df9b086 100644 --- a/tests/components/numato/test_sensor.py +++ b/tests/components/numato/test_sensor.py @@ -1,5 +1,7 @@ """Tests for the numato sensor platform.""" +import pytest + from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import discovery @@ -13,7 +15,7 @@ MOCKUP_ENTITY_IDS = { async def test_failing_setups_no_entities( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """When port setup fails, no entity shall be created.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise) @@ -24,7 +26,7 @@ async def test_failing_setups_no_entities( async def test_failing_sensor_update( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test condition when a sensor update fails.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "adc_read", mockup_raise) diff --git a/tests/components/numato/test_switch.py b/tests/components/numato/test_switch.py index e69b3481b1d..42102ea4869 100644 --- a/tests/components/numato/test_switch.py +++ b/tests/components/numato/test_switch.py @@ -1,5 +1,7 @@ """Tests for the numato switch platform.""" +import pytest + from homeassistant.components import switch from homeassistant.const import ( ATTR_ENTITY_ID, @@ -20,7 +22,7 @@ MOCKUP_ENTITY_IDS = { async def test_failing_setups_no_entities( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """When port setup fails, no entity shall be created.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise) @@ -69,7 +71,7 @@ async def test_regular_hass_operations(hass: HomeAssistant, numato_fixture) -> N async def test_failing_hass_operations( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test failing operations called from within Home Assistant. diff --git a/tests/components/number/test_init.py b/tests/components/number/test_init.py index 6f74a3126c0..721b531e8cd 100644 --- a/tests/components/number/test_init.py +++ b/tests/components/number/test_init.py @@ -1,10 +1,10 @@ """The tests for the Number component.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.components.number import ( ATTR_MAX, @@ -121,7 +121,7 @@ class MockNumberEntityDescr(NumberEntity): Step is calculated based on the smaller max_value and min_value. """ - def __init__(self): + def __init__(self) -> None: """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -145,7 +145,7 @@ class MockNumberEntityAttrWithDescription(NumberEntity): members take precedence over the entity description. """ - def __init__(self): + def __init__(self) -> None: """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -223,7 +223,7 @@ class MockNumberEntityDescrDeprecated(NumberEntity): Step is calculated based on the smaller max_value and min_value. """ - def __init__(self): + def __init__(self) -> None: """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -646,7 +646,7 @@ async def test_restore_number_restore_state( assert entity0.native_min_value == native_min_value assert entity0.native_step == native_step assert entity0.native_value == native_value - assert type(entity0.native_value) == native_value_type + assert type(entity0.native_value) is native_value_type assert entity0.native_unit_of_measurement == uom diff --git a/tests/components/nws/const.py b/tests/components/nws/const.py index e5fc9df909f..39e954af15a 100644 --- a/tests/components/nws/const.py +++ b/tests/components/nws/const.py @@ -66,6 +66,7 @@ CLEAR_NIGHT_OBSERVATION = DEFAULT_OBSERVATION.copy() CLEAR_NIGHT_OBSERVATION["iconTime"] = "night" SENSOR_EXPECTED_OBSERVATION_METRIC = { + "timestamp": "2019-08-12T23:53:00+00:00", "dewpoint": "5", "temperature": "10", "windChill": "5", @@ -80,6 +81,7 @@ SENSOR_EXPECTED_OBSERVATION_METRIC = { } SENSOR_EXPECTED_OBSERVATION_IMPERIAL = { + "timestamp": "2019-08-12T23:53:00+00:00", "dewpoint": str( round( TemperatureConverter.convert( @@ -185,6 +187,7 @@ DEFAULT_FORECAST = [ "temperature": 10, "windSpeedAvg": 10, "windBearing": 180, + "shortForecast": "A short forecast.", "detailedForecast": "A detailed forecast.", "timestamp": "2019-08-12T23:53:00+00:00", "iconTime": "night", diff --git a/tests/components/nws/snapshots/test_diagnostics.ambr b/tests/components/nws/snapshots/test_diagnostics.ambr index 2db73f90054..f8bd82a35c4 100644 --- a/tests/components/nws/snapshots/test_diagnostics.ambr +++ b/tests/components/nws/snapshots/test_diagnostics.ambr @@ -21,6 +21,7 @@ 'number': 1, 'probabilityOfPrecipitation': 89, 'relativeHumidity': 75, + 'shortForecast': 'A short forecast.', 'startTime': '2019-08-12T20:00:00-04:00', 'temperature': 10, 'timestamp': '2019-08-12T23:53:00+00:00', @@ -48,6 +49,7 @@ 'number': 1, 'probabilityOfPrecipitation': 89, 'relativeHumidity': 75, + 'shortForecast': 'A short forecast.', 'startTime': '2019-08-12T20:00:00-04:00', 'temperature': 10, 'timestamp': '2019-08-12T23:53:00+00:00', diff --git a/tests/components/nws/snapshots/test_weather.ambr b/tests/components/nws/snapshots/test_weather.ambr index f4669f47615..1df1c2fa644 100644 --- a/tests/components/nws/snapshots/test_weather.ambr +++ b/tests/components/nws/snapshots/test_weather.ambr @@ -1,95 +1,44 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] +# name: test_detailed_forecast_service[hourly] dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'is_daytime': False, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), + 'weather.abc': dict({ + 'forecast': list([ + dict({ + 'datetime': '2019-08-12T20:00:00-04:00', + 'short_description': 'A short forecast.', + }), + ]), + }), }) # --- -# name: test_forecast_service[get_forecast].1 +# name: test_detailed_forecast_service[twice_daily] dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), + 'weather.abc': dict({ + 'forecast': list([ + dict({ + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'is_daytime': False, + 'short_description': 'A short forecast.', + }), + ]), + }), }) # --- -# name: test_forecast_service[get_forecast].2 +# name: test_detailed_forecast_service_no_data[hourly] dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'is_daytime': False, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), + 'weather.abc': dict({ + 'forecast': list([ + ]), + }), }) # --- -# name: test_forecast_service[get_forecast].3 +# name: test_detailed_forecast_service_no_data[twice_daily] dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].4 - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].5 - dict({ - 'forecast': list([ - ]), + 'weather.abc': dict({ + 'forecast': list([ + ]), + }), }) # --- # name: test_forecast_service[get_forecasts] @@ -99,7 +48,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'is_daytime': False, @@ -119,7 +67,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -138,7 +85,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'is_daytime': False, @@ -158,7 +104,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -177,7 +122,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -202,7 +146,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -217,7 +160,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, diff --git a/tests/components/nws/test_weather.py b/tests/components/nws/test_weather.py index b4f4b5155a1..bbf808dbd1f 100644 --- a/tests/components/nws/test_weather.py +++ b/tests/components/nws/test_weather.py @@ -554,3 +554,83 @@ async def test_forecast_subscription_with_failing_coordinator( ) msg = await client.receive_json() assert not msg["success"] + + +@pytest.mark.parametrize( + ("forecast_type"), + [ + "hourly", + "twice_daily", + ], +) +async def test_detailed_forecast_service( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + mock_simple_nws, + no_sensor, + forecast_type: str, +) -> None: + """Test detailed forecast.""" + + entry = MockConfigEntry( + domain=nws.DOMAIN, + data=NWS_CONFIG, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + response = await hass.services.async_call( + nws.DOMAIN, + "get_forecasts_extra", + { + "entity_id": "weather.abc", + "type": forecast_type, + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + + +@pytest.mark.parametrize( + ("forecast_type"), + [ + "hourly", + "twice_daily", + ], +) +async def test_detailed_forecast_service_no_data( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + mock_simple_nws, + no_sensor, + forecast_type: str, +) -> None: + """Test detailed forecast.""" + instance = mock_simple_nws.return_value + instance.forecast = None + instance.forecast_hourly = None + entry = MockConfigEntry( + domain=nws.DOMAIN, + data=NWS_CONFIG, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + response = await hass.services.async_call( + nws.DOMAIN, + "get_forecasts_extra", + { + "entity_id": "weather.abc", + "type": forecast_type, + }, + blocking=True, + return_response=True, + ) + assert response == snapshot diff --git a/tests/components/nx584/test_binary_sensor.py b/tests/components/nx584/test_binary_sensor.py index 5c57feb471b..9261521f850 100644 --- a/tests/components/nx584/test_binary_sensor.py +++ b/tests/components/nx584/test_binary_sensor.py @@ -216,8 +216,8 @@ def test_nx584_watcher_run_with_zone_events() -> None: """Return nothing twice, then some events.""" if empty_me: empty_me.pop() - else: - return fake_events + return None + return fake_events client = mock.MagicMock() fake_events = [ diff --git a/tests/components/nzbget/conftest.py b/tests/components/nzbget/conftest.py index 8f48a4306c7..8a980d3ddb0 100644 --- a/tests/components/nzbget/conftest.py +++ b/tests/components/nzbget/conftest.py @@ -1,5 +1,6 @@ """Define fixtures available for all tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest @@ -8,7 +9,7 @@ from . import MOCK_HISTORY, MOCK_STATUS, MOCK_VERSION @pytest.fixture -def nzbget_api(hass): +def nzbget_api() -> Generator[MagicMock]: """Mock NZBGetApi for easier testing.""" with patch("homeassistant.components.nzbget.coordinator.NZBGetAPI") as mock_api: instance = mock_api.return_value diff --git a/tests/components/nzbget/test_init.py b/tests/components/nzbget/test_init.py index a119bb953ce..baf0a37546d 100644 --- a/tests/components/nzbget/test_init.py +++ b/tests/components/nzbget/test_init.py @@ -3,6 +3,7 @@ from unittest.mock import patch from pynzbgetapi import NZBGetAPIException +import pytest from homeassistant.components.nzbget.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -13,7 +14,8 @@ from . import ENTRY_CONFIG, _patch_version, init_integration from tests.common import MockConfigEntry -async def test_unload_entry(hass: HomeAssistant, nzbget_api) -> None: +@pytest.mark.usefixtures("nzbget_api") +async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" entry = await init_integration(hass) diff --git a/tests/components/nzbget/test_sensor.py b/tests/components/nzbget/test_sensor.py index 30a7f262b0b..38f7d8a68c3 100644 --- a/tests/components/nzbget/test_sensor.py +++ b/tests/components/nzbget/test_sensor.py @@ -3,6 +3,8 @@ from datetime import timedelta from unittest.mock import patch +import pytest + from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, @@ -16,9 +18,8 @@ from homeassistant.util import dt as dt_util from . import init_integration -async def test_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api -) -> None: +@pytest.mark.usefixtures("nzbget_api") +async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test the creation and values of the sensors.""" now = dt_util.utcnow().replace(microsecond=0) with patch("homeassistant.components.nzbget.sensor.utcnow", return_value=now): diff --git a/tests/components/nzbget/test_switch.py b/tests/components/nzbget/test_switch.py index 1c518486b9f..afb88a7be82 100644 --- a/tests/components/nzbget/test_switch.py +++ b/tests/components/nzbget/test_switch.py @@ -1,5 +1,7 @@ """Test the NZBGet switches.""" +from unittest.mock import MagicMock + from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -16,7 +18,7 @@ from . import init_integration async def test_download_switch( - hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api + hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api: MagicMock ) -> None: """Test the creation and values of the download switch.""" instance = nzbget_api.return_value @@ -44,7 +46,9 @@ async def test_download_switch( assert state.state == STATE_OFF -async def test_download_switch_services(hass: HomeAssistant, nzbget_api) -> None: +async def test_download_switch_services( + hass: HomeAssistant, nzbget_api: MagicMock +) -> None: """Test download switch services.""" instance = nzbget_api.return_value diff --git a/tests/components/obihai/__init__.py b/tests/components/obihai/__init__.py index d43aa6a9bb8..b88f0a5c874 100644 --- a/tests/components/obihai/__init__.py +++ b/tests/components/obihai/__init__.py @@ -32,3 +32,4 @@ def get_schema_suggestion(schema, key): if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] + return None diff --git a/tests/components/obihai/conftest.py b/tests/components/obihai/conftest.py index c4edfdedf65..ef54c12ba26 100644 --- a/tests/components/obihai/conftest.py +++ b/tests/components/obihai/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for Obihai.""" +from collections.abc import Generator from socket import gaierror from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/ollama/__init__.py b/tests/components/ollama/__init__.py index 22a576e94a4..6ad77bb2217 100644 --- a/tests/components/ollama/__init__.py +++ b/tests/components/ollama/__init__.py @@ -1,7 +1,7 @@ """Tests for the Ollama integration.""" from homeassistant.components import ollama -from homeassistant.components.ollama.const import DEFAULT_PROMPT +from homeassistant.helpers import llm TEST_USER_DATA = { ollama.CONF_URL: "http://localhost:11434", @@ -9,6 +9,6 @@ TEST_USER_DATA = { } TEST_OPTIONS = { - ollama.CONF_PROMPT: DEFAULT_PROMPT, + ollama.CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT, ollama.CONF_MAX_HISTORY: 2, } diff --git a/tests/components/ollama/conftest.py b/tests/components/ollama/conftest.py index db1689bd416..b28b8850cd5 100644 --- a/tests/components/ollama/conftest.py +++ b/tests/components/ollama/conftest.py @@ -5,7 +5,9 @@ from unittest.mock import patch import pytest from homeassistant.components import ollama +from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant +from homeassistant.helpers import llm from homeassistant.setup import async_setup_component from . import TEST_OPTIONS, TEST_USER_DATA @@ -25,6 +27,17 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: return entry +@pytest.fixture +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: + """Mock a config entry with assist.""" + hass.config_entries.async_update_entry( + mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} + ) + return mock_config_entry + + @pytest.fixture async def mock_init_component(hass: HomeAssistant, mock_config_entry: MockConfigEntry): """Initialize integration.""" @@ -35,6 +48,7 @@ async def mock_init_component(hass: HomeAssistant, mock_config_entry: MockConfig ): assert await async_setup_component(hass, ollama.DOMAIN, {}) await hass.async_block_till_done() + yield @pytest.fixture(autouse=True) diff --git a/tests/components/ollama/snapshots/test_conversation.ambr b/tests/components/ollama/snapshots/test_conversation.ambr new file mode 100644 index 00000000000..e4dd7cd00bb --- /dev/null +++ b/tests/components/ollama/snapshots/test_conversation.ambr @@ -0,0 +1,34 @@ +# serializer version: 1 +# name: test_unknown_hass_api + dict({ + 'conversation_id': None, + 'response': IntentResponse( + card=dict({ + }), + error_code=, + failed_results=list([ + ]), + intent=None, + intent_targets=list([ + ]), + language='en', + matched_states=list([ + ]), + reprompt=dict({ + }), + response_type=, + speech=dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Error preparing LLM API: API non-existing not found', + }), + }), + speech_slots=dict({ + }), + success_results=list([ + ]), + unmatched_states=list([ + ]), + ), + }) +# --- diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index b6f0be3c414..cb56b398342 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -1,21 +1,19 @@ """Tests for the Ollama integration.""" -from unittest.mock import AsyncMock, patch +from typing import Any +from unittest.mock import AsyncMock, Mock, patch from ollama import Message, ResponseError import pytest +from syrupy.assertion import SnapshotAssertion +import voluptuous as vol from homeassistant.components import conversation, ollama from homeassistant.components.conversation import trace -from homeassistant.components.homeassistant.exposed_entities import async_expose_entity -from homeassistant.const import ATTR_FRIENDLY_NAME, MATCH_ALL +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import Context, HomeAssistant -from homeassistant.helpers import ( - area_registry as ar, - device_registry as dr, - entity_registry as er, - intent, -) +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import intent, llm from tests.common import MockConfigEntry @@ -25,9 +23,6 @@ async def test_chat( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, agent_id: str, ) -> None: """Test that the chat function is called with the appropriate arguments.""" @@ -35,48 +30,8 @@ async def test_chat( if agent_id is None: agent_id = mock_config_entry.entry_id - # Create some areas, devices, and entities - area_kitchen = area_registry.async_get_or_create("kitchen_id") - area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") - area_bedroom = area_registry.async_get_or_create("bedroom_id") - area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") - area_office = area_registry.async_get_or_create("office_id") - area_office = area_registry.async_update(area_office.id, name="office") - entry = MockConfigEntry() entry.add_to_hass(hass) - kitchen_device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections=set(), - identifiers={("demo", "id-1234")}, - ) - device_registry.async_update_device(kitchen_device.id, area_id=area_kitchen.id) - - kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") - kitchen_light = entity_registry.async_update_entity( - kitchen_light.entity_id, device_id=kitchen_device.id - ) - hass.states.async_set( - kitchen_light.entity_id, "on", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} - ) - - bedroom_light = entity_registry.async_get_or_create("light", "demo", "5678") - bedroom_light = entity_registry.async_update_entity( - bedroom_light.entity_id, area_id=area_bedroom.id - ) - hass.states.async_set( - bedroom_light.entity_id, "on", attributes={ATTR_FRIENDLY_NAME: "bedroom light"} - ) - - # Hide the office light - office_light = entity_registry.async_get_or_create("light", "demo", "ABCD") - office_light = entity_registry.async_update_entity( - office_light.entity_id, area_id=area_office.id - ) - hass.states.async_set( - office_light.entity_id, "on", attributes={ATTR_FRIENDLY_NAME: "office light"} - ) - async_expose_entity(hass, conversation.DOMAIN, office_light.entity_id, False) with patch( "ollama.AsyncClient.chat", @@ -100,12 +55,6 @@ async def test_chat( Message({"role": "user", "content": "test message"}), ] - # Verify only exposed devices/areas are in prompt - assert "kitchen light" in prompt - assert "bedroom light" in prompt - assert "office light" not in prompt - assert "office" not in prompt - assert ( result.response.response_type == intent.IntentResponseType.ACTION_DONE ), result @@ -122,7 +71,254 @@ async def test_chat( ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] - assert "The current time is" in detail_event["data"]["messages"][0]["content"] + assert "Current time is" in detail_event["data"]["messages"][0]["content"] + + +async def test_template_variables( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that template variables work.""" + context = Context(user_id="12345") + mock_user = Mock() + mock_user.id = "12345" + mock_user.name = "Test User" + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + "prompt": ( + "The user name is {{ user_name }}. " + "The user id is {{ llm_context.context.user_id }}." + ), + }, + ) + with ( + patch("ollama.AsyncClient.list"), + patch( + "ollama.AsyncClient.chat", + return_value={"message": {"role": "assistant", "content": "test response"}}, + ) as mock_chat, + patch("homeassistant.auth.AuthManager.async_get_user", return_value=mock_user), + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + result = await conversation.async_converse( + hass, "hello", None, context, agent_id=mock_config_entry.entry_id + ) + + assert ( + result.response.response_type == intent.IntentResponseType.ACTION_DONE + ), result + + args = mock_chat.call_args.kwargs + prompt = args["messages"][0]["content"] + + assert "The user name is Test User." in prompt + assert "The user id is 12345." in prompt + + +@pytest.mark.parametrize( + ("tool_args", "expected_tool_args"), + [ + ({"param1": "test_value"}, {"param1": "test_value"}), + ({"param1": 2}, {"param1": 2}), + ( + {"param1": "test_value", "floor": ""}, + {"param1": "test_value"}, # Omit empty arguments + ), + ( + {"domain": '["light"]'}, + {"domain": ["light"]}, # Repair invalid json arguments + ), + ( + {"domain": "['light']"}, + {"domain": "['light']"}, # Preserve invalid json that can't be parsed + ), + ], +) +@patch("homeassistant.components.ollama.conversation.llm.AssistAPI._async_get_tools") +async def test_function_call( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + tool_args: dict[str, Any], + expected_tool_args: dict[str, Any], +) -> None: + """Test function call from the assistant.""" + agent_id = mock_config_entry_with_assist.entry_id + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.return_value = "Test response" + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + if message["role"] == "tool": + return { + "message": { + "role": "assistant", + "content": "I have successfully called the function", + } + } + + return { + "message": { + "role": "assistant", + "tool_calls": [ + { + "function": { + "name": "test_tool", + "arguments": tool_args, + } + } + ], + } + } + + with patch( + "ollama.AsyncClient.chat", + side_effect=completion_result, + ) as mock_chat: + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert mock_chat.call_count == 2 + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert ( + result.response.speech["plain"]["speech"] + == "I have successfully called the function" + ) + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args=expected_tool_args, + ), + llm.LLMContext( + platform="ollama", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + +@patch("homeassistant.components.ollama.conversation.llm.AssistAPI._async_get_tools") +async def test_function_exception( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test function call with exception.""" + agent_id = mock_config_entry_with_assist.entry_id + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.side_effect = HomeAssistantError("Test tool exception") + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + if message["role"] == "tool": + return { + "message": { + "role": "assistant", + "content": "There was an error calling the function", + } + } + + return { + "message": { + "role": "assistant", + "tool_calls": [ + { + "function": { + "name": "test_tool", + "arguments": {"param1": "test_value"}, + } + } + ], + } + } + + with patch( + "ollama.AsyncClient.chat", + side_effect=completion_result, + ) as mock_chat: + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert mock_chat.call_count == 2 + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert ( + result.response.speech["plain"]["speech"] + == "There was an error calling the function" + ) + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args={"param1": "test_value"}, + ), + llm.LLMContext( + platform="ollama", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + +async def test_unknown_hass_api( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + mock_init_component, +) -> None: + """Test when we reference an API that no longer exists.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "non-existing", + }, + ) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id + ) + + assert result == snapshot async def test_message_history_trimming( @@ -359,3 +555,26 @@ async def test_conversation_agent( mock_config_entry.entry_id ) assert agent.supported_languages == MATCH_ALL + + state = hass.states.get("conversation.mock_title") + assert state + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 + + +async def test_conversation_agent_with_assist( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test OllamaConversationEntity.""" + agent = conversation.get_agent_manager(hass).async_get_agent( + mock_config_entry_with_assist.entry_id + ) + assert agent.supported_languages == MATCH_ALL + + state = hass.states.get("conversation.mock_title") + assert state + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == conversation.ConversationEntityFeature.CONTROL + ) diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index e9ba720adb3..dd53d6cbce6 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -1,13 +1,13 @@ """Test the onboarding views.""" import asyncio +from collections.abc import AsyncGenerator from http import HTTPStatus import os from typing import Any from unittest.mock import Mock, patch import pytest -from typing_extensions import AsyncGenerator from homeassistant.components import onboarding from homeassistant.components.onboarding import const, views @@ -28,7 +28,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def auth_active(hass): +def auth_active(hass: HomeAssistant) -> None: """Ensure auth is always active.""" hass.loop.run_until_complete( register_auth_provider(hass, {"type": "homeassistant"}) diff --git a/tests/components/ondilo_ico/conftest.py b/tests/components/ondilo_ico/conftest.py index 6a03d6961c2..a847c1df069 100644 --- a/tests/components/ondilo_ico/conftest.py +++ b/tests/components/ondilo_ico/conftest.py @@ -1,10 +1,10 @@ """Provide basic Ondilo fixture.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.ondilo_ico.const import DOMAIN diff --git a/tests/components/ondilo_ico/snapshots/test_init.ambr b/tests/components/ondilo_ico/snapshots/test_init.ambr index c488b1e3c15..44008ac907e 100644 --- a/tests/components/ondilo_ico/snapshots/test_init.ambr +++ b/tests/components/ondilo_ico/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': 'Ondilo', 'model': 'ICO', + 'model_id': None, 'name': 'Pool 1', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '1.7.1-stable', @@ -51,8 +53,10 @@ }), 'manufacturer': 'Ondilo', 'model': 'ICO', + 'model_id': None, 'name': 'Pool 2', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '1.7.1-stable', diff --git a/tests/components/onewire/conftest.py b/tests/components/onewire/conftest.py index 47b50ab10e0..65a86b58f2f 100644 --- a/tests/components/onewire/conftest.py +++ b/tests/components/onewire/conftest.py @@ -1,10 +1,10 @@ """Provide common 1-Wire fixtures.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pyownet.protocol import ConnError import pytest -from typing_extensions import Generator from homeassistant.components.onewire.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntry diff --git a/tests/components/onewire/snapshots/test_binary_sensor.ambr b/tests/components/onewire/snapshots/test_binary_sensor.ambr index 999794ec20d..450cc4c7486 100644 --- a/tests/components/onewire/snapshots/test_binary_sensor.ambr +++ b/tests/components/onewire/snapshots/test_binary_sensor.ambr @@ -34,8 +34,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2405', + 'model_id': None, 'name': '05.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -74,8 +76,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18S20', + 'model_id': None, 'name': '10.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -114,8 +118,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2406', + 'model_id': None, 'name': '12.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -254,8 +260,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -294,8 +302,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2409', + 'model_id': None, 'name': '1F.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -322,8 +332,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -362,8 +374,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1822', + 'model_id': None, 'name': '22.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -402,8 +416,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': '26.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -442,8 +458,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -482,8 +500,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222222', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -522,8 +542,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222223', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -562,8 +584,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2408', + 'model_id': None, 'name': '29.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -954,8 +978,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2760', + 'model_id': None, 'name': '30.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -994,8 +1020,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2413', + 'model_id': None, 'name': '3A.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1122,8 +1150,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1825', + 'model_id': None, 'name': '3B.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1162,8 +1192,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS28EA00', + 'model_id': None, 'name': '42.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1202,8 +1234,10 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0068', + 'model_id': None, 'name': '7E.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1242,8 +1276,10 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0066', + 'model_id': None, 'name': '7E.222222222222', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1282,8 +1318,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': 'A6.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1322,8 +1360,10 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HobbyBoards_EF', + 'model_id': None, 'name': 'EF.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1362,8 +1402,10 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_MOISTURE_METER', + 'model_id': None, 'name': 'EF.111111111112', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1402,8 +1444,10 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_HUB', + 'model_id': None, 'name': 'EF.111111111113', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/onewire/snapshots/test_sensor.ambr b/tests/components/onewire/snapshots/test_sensor.ambr index 59ed167197d..5ad4cf2ef4b 100644 --- a/tests/components/onewire/snapshots/test_sensor.ambr +++ b/tests/components/onewire/snapshots/test_sensor.ambr @@ -34,8 +34,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2405', + 'model_id': None, 'name': '05.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -74,8 +76,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18S20', + 'model_id': None, 'name': '10.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -163,8 +167,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2406', + 'model_id': None, 'name': '12.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -313,8 +319,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -449,8 +457,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2409', + 'model_id': None, 'name': '1F.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -477,8 +487,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -613,8 +625,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1822', + 'model_id': None, 'name': '22.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -702,8 +716,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': '26.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1281,8 +1297,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1370,8 +1388,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222222', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1459,8 +1479,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222223', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1548,8 +1570,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2408', + 'model_id': None, 'name': '29.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1588,8 +1612,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2760', + 'model_id': None, 'name': '30.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1824,8 +1850,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2413', + 'model_id': None, 'name': '3A.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1864,8 +1892,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1825', + 'model_id': None, 'name': '3B.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1953,8 +1983,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS28EA00', + 'model_id': None, 'name': '42.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -2042,8 +2074,10 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0068', + 'model_id': None, 'name': '7E.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -2278,8 +2312,10 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0066', + 'model_id': None, 'name': '7E.222222222222', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -2416,8 +2452,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': 'A6.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -2995,8 +3033,10 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HobbyBoards_EF', + 'model_id': None, 'name': 'EF.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -3182,8 +3222,10 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_MOISTURE_METER', + 'model_id': None, 'name': 'EF.111111111112', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -3418,8 +3460,10 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_HUB', + 'model_id': None, 'name': 'EF.111111111113', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/onewire/snapshots/test_switch.ambr b/tests/components/onewire/snapshots/test_switch.ambr index 8fd1e2aeef6..3bc7a2d3def 100644 --- a/tests/components/onewire/snapshots/test_switch.ambr +++ b/tests/components/onewire/snapshots/test_switch.ambr @@ -34,8 +34,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2405', + 'model_id': None, 'name': '05.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -118,8 +120,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18S20', + 'model_id': None, 'name': '10.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -158,8 +162,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2406', + 'model_id': None, 'name': '12.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -386,8 +392,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -426,8 +434,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2409', + 'model_id': None, 'name': '1F.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -454,8 +464,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -494,8 +506,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1822', + 'model_id': None, 'name': '22.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -534,8 +548,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': '26.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -618,8 +634,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -658,8 +676,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222222', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -698,8 +718,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222223', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -738,8 +760,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2408', + 'model_id': None, 'name': '29.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1482,8 +1506,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2760', + 'model_id': None, 'name': '30.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1522,8 +1548,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2413', + 'model_id': None, 'name': '3A.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1650,8 +1678,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1825', + 'model_id': None, 'name': '3B.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1690,8 +1720,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS28EA00', + 'model_id': None, 'name': '42.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1730,8 +1762,10 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0068', + 'model_id': None, 'name': '7E.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1770,8 +1804,10 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0066', + 'model_id': None, 'name': '7E.222222222222', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1810,8 +1846,10 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': 'A6.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1894,8 +1932,10 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HobbyBoards_EF', + 'model_id': None, 'name': 'EF.111111111111', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1934,8 +1974,10 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_MOISTURE_METER', + 'model_id': None, 'name': 'EF.111111111112', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -2326,8 +2368,10 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_HUB', + 'model_id': None, 'name': 'EF.111111111113', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/onewire/test_binary_sensor.py b/tests/components/onewire/test_binary_sensor.py index 8b1129529d5..31895f705ff 100644 --- a/tests/components/onewire/test_binary_sensor.py +++ b/tests/components/onewire/test_binary_sensor.py @@ -1,10 +1,10 @@ """Tests for 1-Wire binary sensors.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/onewire/test_diagnostics.py b/tests/components/onewire/test_diagnostics.py index 62b045c4516..ecdae859597 100644 --- a/tests/components/onewire/test_diagnostics.py +++ b/tests/components/onewire/test_diagnostics.py @@ -1,10 +1,10 @@ """Test 1-Wire diagnostics.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/onewire/test_sensor.py b/tests/components/onewire/test_sensor.py index df0a81920c9..ba0e21701f8 100644 --- a/tests/components/onewire/test_sensor.py +++ b/tests/components/onewire/test_sensor.py @@ -1,5 +1,6 @@ """Tests for 1-Wire sensors.""" +from collections.abc import Generator from copy import deepcopy import logging from unittest.mock import MagicMock, _patch_dict, patch @@ -7,7 +8,6 @@ from unittest.mock import MagicMock, _patch_dict, patch from pyownet.protocol import OwnetError import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/onewire/test_switch.py b/tests/components/onewire/test_switch.py index b1b8e5ddbd0..936e83f66ec 100644 --- a/tests/components/onewire/test_switch.py +++ b/tests/components/onewire/test_switch.py @@ -1,10 +1,10 @@ """Tests for 1-Wire switches.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/onvif/test_diagnostics.py b/tests/components/onvif/test_diagnostics.py index d58c8008ea6..ce8febe2341 100644 --- a/tests/components/onvif/test_diagnostics.py +++ b/tests/components/onvif/test_diagnostics.py @@ -1,6 +1,7 @@ """Test ONVIF diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -19,4 +20,6 @@ async def test_diagnostics( entry, _, _ = await setup_onvif_integration(hass) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/open_meteo/conftest.py b/tests/components/open_meteo/conftest.py index 0d3e1274693..22138846915 100644 --- a/tests/components/open_meteo/conftest.py +++ b/tests/components/open_meteo/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import MagicMock, patch from open_meteo import Forecast import pytest -from typing_extensions import Generator from homeassistant.components.open_meteo.const import DOMAIN from homeassistant.const import CONF_ZONE diff --git a/tests/components/openai_conversation/conftest.py b/tests/components/openai_conversation/conftest.py index 6d770b51ce9..4639d0dc8e0 100644 --- a/tests/components/openai_conversation/conftest.py +++ b/tests/components/openai_conversation/conftest.py @@ -13,7 +13,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a config entry.""" entry = MockConfigEntry( title="OpenAI", @@ -27,7 +27,9 @@ def mock_config_entry(hass): @pytest.fixture -def mock_config_entry_with_assist(hass, mock_config_entry): +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: """Mock a config entry with assist.""" hass.config_entries.async_update_entry( mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} @@ -36,7 +38,9 @@ def mock_config_entry_with_assist(hass, mock_config_entry): @pytest.fixture -async def mock_init_component(hass, mock_config_entry): +async def mock_init_component( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: """Initialize integration.""" with patch( "openai.resources.models.AsyncModels.list", diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py index 1008482847c..e0665bc449f 100644 --- a/tests/components/openai_conversation/test_conversation.py +++ b/tests/components/openai_conversation/test_conversation.py @@ -27,6 +27,33 @@ from homeassistant.util import ulid from tests.common import MockConfigEntry +async def test_entity( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test entity properties.""" + state = hass.states.get("conversation.openai") + assert state + assert state.attributes["supported_features"] == 0 + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "assist", + }, + ) + await hass.config_entries.async_reload(mock_config_entry.entry_id) + + state = hass.states.get("conversation.openai") + assert state + assert ( + state.attributes["supported_features"] + == conversation.ConversationEntityFeature.CONTROL + ) + + async def test_error_handling( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component ) -> None: @@ -267,7 +294,7 @@ async def test_function_call( assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, trace.ConversationTraceEventType.AGENT_DETAIL, - trace.ConversationTraceEventType.LLM_TOOL_CALL, + trace.ConversationTraceEventType.TOOL_CALL, ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] @@ -276,6 +303,7 @@ async def test_function_call( "Today's date is 2024-06-03." in trace_events[1]["data"]["messages"][0]["content"] ) + assert [t.name for t in detail_event["data"]["tools"]] == ["test_tool"] # Call it again, make sure we have updated prompt with ( @@ -493,6 +521,8 @@ async def test_unknown_hass_api( }, ) + await hass.async_block_till_done() + result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id ) diff --git a/tests/components/openai_conversation/test_init.py b/tests/components/openai_conversation/test_init.py index c9431aa1083..d78ce398c92 100644 --- a/tests/components/openai_conversation/test_init.py +++ b/tests/components/openai_conversation/test_init.py @@ -60,33 +60,6 @@ from tests.common import MockConfigEntry "style": "natural", }, ), - ( - {"prompt": "Picture of a dog", "size": "256"}, - { - "prompt": "Picture of a dog", - "size": "1024x1024", - "quality": "standard", - "style": "vivid", - }, - ), - ( - {"prompt": "Picture of a dog", "size": "512"}, - { - "prompt": "Picture of a dog", - "size": "1024x1024", - "quality": "standard", - "style": "vivid", - }, - ), - ( - {"prompt": "Picture of a dog", "size": "1024"}, - { - "prompt": "Picture of a dog", - "size": "1024x1024", - "quality": "standard", - "style": "vivid", - }, - ), ], ) async def test_generate_image_service( diff --git a/tests/components/openalpr_cloud/test_image_processing.py b/tests/components/openalpr_cloud/test_image_processing.py index 7115c3e7bf0..143513f9852 100644 --- a/tests/components/openalpr_cloud/test_image_processing.py +++ b/tests/components/openalpr_cloud/test_image_processing.py @@ -6,7 +6,7 @@ import pytest from homeassistant.components import camera, image_processing as ip from homeassistant.components.openalpr_cloud.image_processing import OPENALPR_API_URL -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from homeassistant.setup import async_setup_component from tests.common import assert_setup_component, async_capture_events, load_fixture @@ -15,13 +15,13 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(autouse=True) -async def setup_homeassistant(hass: HomeAssistant): +async def setup_homeassistant(hass: HomeAssistant) -> None: """Set up the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) @pytest.fixture -async def setup_openalpr_cloud(hass): +async def setup_openalpr_cloud(hass: HomeAssistant) -> None: """Set up openalpr cloud.""" config = { ip.DOMAIN: { @@ -43,7 +43,7 @@ async def setup_openalpr_cloud(hass): @pytest.fixture -async def alpr_events(hass): +async def alpr_events(hass: HomeAssistant) -> list[Event]: """Listen for events.""" return async_capture_events(hass, "image_processing.found_plate") diff --git a/tests/components/openexchangerates/conftest.py b/tests/components/openexchangerates/conftest.py index 6bd7da2c7af..770432ebac3 100644 --- a/tests/components/openexchangerates/conftest.py +++ b/tests/components/openexchangerates/conftest.py @@ -1,9 +1,9 @@ """Provide common fixtures for tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.openexchangerates.const import DOMAIN diff --git a/tests/components/openexchangerates/test_config_flow.py b/tests/components/openexchangerates/test_config_flow.py index 30ea619d646..ec06c662201 100644 --- a/tests/components/openexchangerates/test_config_flow.py +++ b/tests/components/openexchangerates/test_config_flow.py @@ -1,6 +1,7 @@ """Test the Open Exchange Rates config flow.""" import asyncio +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch @@ -9,7 +10,6 @@ from aioopenexchangerates import ( OpenExchangeRatesClientError, ) import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.openexchangerates.const import DOMAIN diff --git a/tests/components/opengarage/conftest.py b/tests/components/opengarage/conftest.py index c960e723289..2367692096b 100644 --- a/tests/components/opengarage/conftest.py +++ b/tests/components/opengarage/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.opengarage.const import CONF_DEVICE_KEY, DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT, CONF_VERIFY_SSL diff --git a/tests/components/opensky/conftest.py b/tests/components/opensky/conftest.py index c48f3bec8d8..4664c48ef9e 100644 --- a/tests/components/opensky/conftest.py +++ b/tests/components/opensky/conftest.py @@ -1,10 +1,10 @@ """Configure tests for the OpenSky integration.""" +from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, patch import pytest from python_opensky import StatesResponse -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.opensky.const import ( CONF_ALTITUDE, diff --git a/tests/components/openuv/conftest.py b/tests/components/openuv/conftest.py index 69563c94c64..9bb1970bc2f 100644 --- a/tests/components/openuv/conftest.py +++ b/tests/components/openuv/conftest.py @@ -1,10 +1,11 @@ """Define test fixtures for OpenUV.""" +from collections.abc import Generator import json +from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.openuv import CONF_FROM_WINDOW, CONF_TO_WINDOW, DOMAIN from homeassistant.const import ( @@ -13,6 +14,7 @@ from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, ) +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -41,7 +43,9 @@ def client_fixture(data_protection_window, data_uv_index): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -54,7 +58,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_API_KEY: TEST_API_KEY, @@ -89,7 +93,9 @@ async def mock_pyopenuv_fixture(client): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_pyopenuv): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_pyopenuv: None +) -> None: """Define a fixture to set up openuv.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/openuv/test_diagnostics.py b/tests/components/openuv/test_diagnostics.py index 4b5114bccd1..4fe851eea53 100644 --- a/tests/components/openuv/test_diagnostics.py +++ b/tests/components/openuv/test_diagnostics.py @@ -4,6 +4,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -35,6 +36,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": { "protection_window": { diff --git a/tests/components/openweathermap/test_config_flow.py b/tests/components/openweathermap/test_config_flow.py index be02a6b01a9..f18aa432e2f 100644 --- a/tests/components/openweathermap/test_config_flow.py +++ b/tests/components/openweathermap/test_config_flow.py @@ -45,7 +45,7 @@ CONFIG = { VALID_YAML_CONFIG = {CONF_API_KEY: "foo"} -def _create_mocked_owm_client(is_valid: bool): +def _create_mocked_owm_factory(is_valid: bool): current_weather = CurrentWeather( date_time=datetime.fromtimestamp(1714063536, tz=UTC), temperature=6.84, @@ -118,18 +118,18 @@ def _create_mocked_owm_client(is_valid: bool): def mock_owm_client(): """Mock config_flow OWMClient.""" with patch( - "homeassistant.components.openweathermap.OWMClient", - ) as owm_client_mock: - yield owm_client_mock + "homeassistant.components.openweathermap.create_owm_client", + ) as mock: + yield mock @pytest.fixture(name="config_flow_owm_client_mock") def mock_config_flow_owm_client(): """Mock config_flow OWMClient.""" with patch( - "homeassistant.components.openweathermap.utils.OWMClient", - ) as config_flow_owm_client_mock: - yield config_flow_owm_client_mock + "homeassistant.components.openweathermap.utils.create_owm_client", + ) as mock: + yield mock async def test_successful_config_flow( @@ -138,7 +138,7 @@ async def test_successful_config_flow( config_flow_owm_client_mock, ) -> None: """Test that the form is served with valid input.""" - mock = _create_mocked_owm_client(True) + mock = _create_mocked_owm_factory(True) owm_client_mock.return_value = mock config_flow_owm_client_mock.return_value = mock @@ -177,7 +177,7 @@ async def test_abort_config_flow( config_flow_owm_client_mock, ) -> None: """Test that the form is served with same data.""" - mock = _create_mocked_owm_client(True) + mock = _create_mocked_owm_factory(True) owm_client_mock.return_value = mock config_flow_owm_client_mock.return_value = mock @@ -200,7 +200,7 @@ async def test_config_flow_options_change( config_flow_owm_client_mock, ) -> None: """Test that the options form.""" - mock = _create_mocked_owm_client(True) + mock = _create_mocked_owm_factory(True) owm_client_mock.return_value = mock config_flow_owm_client_mock.return_value = mock @@ -261,7 +261,7 @@ async def test_form_invalid_api_key( config_flow_owm_client_mock, ) -> None: """Test that the form is served with no input.""" - config_flow_owm_client_mock.return_value = _create_mocked_owm_client(False) + config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(False) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=CONFIG ) @@ -269,7 +269,7 @@ async def test_form_invalid_api_key( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_api_key"} - config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True) + config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=CONFIG ) @@ -282,7 +282,7 @@ async def test_form_api_call_error( config_flow_owm_client_mock, ) -> None: """Test setting up with api call error.""" - config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True) + config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True) config_flow_owm_client_mock.side_effect = RequestError("oops") result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=CONFIG diff --git a/tests/components/opower/test_config_flow.py b/tests/components/opower/test_config_flow.py index a236494f2c9..8134539b0a5 100644 --- a/tests/components/opower/test_config_flow.py +++ b/tests/components/opower/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Opower config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from opower import CannotConnect, InvalidAuth import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.opower.const import DOMAIN diff --git a/tests/components/oralb/conftest.py b/tests/components/oralb/conftest.py index fa4ba463357..3e5f38ffb73 100644 --- a/tests/components/oralb/conftest.py +++ b/tests/components/oralb/conftest.py @@ -1,9 +1,10 @@ """OralB session fixtures.""" +from collections.abc import Generator +from typing import Any from unittest import mock import pytest -from typing_extensions import Generator class MockServices: @@ -19,7 +20,7 @@ class MockBleakClient: services = MockServices() - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/otbr/conftest.py b/tests/components/otbr/conftest.py index ba0f43c4a71..56f29bdc79b 100644 --- a/tests/components/otbr/conftest.py +++ b/tests/components/otbr/conftest.py @@ -19,7 +19,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="otbr_config_entry_multipan") -async def otbr_config_entry_multipan_fixture(hass): +async def otbr_config_entry_multipan_fixture(hass: HomeAssistant) -> None: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, @@ -46,7 +46,7 @@ async def otbr_config_entry_multipan_fixture(hass): @pytest.fixture(name="otbr_config_entry_thread") -async def otbr_config_entry_thread_fixture(hass): +async def otbr_config_entry_thread_fixture(hass: HomeAssistant) -> None: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_THREAD, diff --git a/tests/components/otbr/test_websocket_api.py b/tests/components/otbr/test_websocket_api.py index df55d38d3b7..5361b56c688 100644 --- a/tests/components/otbr/test_websocket_api.py +++ b/tests/components/otbr/test_websocket_api.py @@ -36,11 +36,14 @@ async def test_get_info( websocket_client, ) -> None: """Test async_get_info.""" + extended_pan_id = "ABCD1234" with ( patch( "python_otbr_api.OTBR.get_active_dataset", - return_value=python_otbr_api.ActiveDataSet(channel=16), + return_value=python_otbr_api.ActiveDataSet( + channel=16, extended_pan_id=extended_pan_id + ), ), patch( "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 @@ -58,12 +61,16 @@ async def test_get_info( msg = await websocket_client.receive_json() assert msg["success"] + extended_address = TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex() assert msg["result"] == { - "url": BASE_URL, - "active_dataset_tlvs": DATASET_CH16.hex().lower(), - "channel": 16, - "border_agent_id": TEST_BORDER_AGENT_ID.hex(), - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + extended_address: { + "url": BASE_URL, + "active_dataset_tlvs": DATASET_CH16.hex().lower(), + "channel": 16, + "border_agent_id": TEST_BORDER_AGENT_ID.hex(), + "extended_address": extended_address, + "extended_pan_id": extended_pan_id.lower(), + } } @@ -121,6 +128,10 @@ async def test_create_network( patch( "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 ) as get_active_dataset_tlvs_mock, + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -129,7 +140,12 @@ async def test_create_network( return_value=0x1234, ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert msg["success"] @@ -156,7 +172,9 @@ async def test_create_network_no_entry( """Test create network.""" await async_setup_component(hass, "otbr", {}) websocket_client = await hass_ws_client(hass) - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + {"type": "otbr/create_network", "extended_address": "blah"} + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -170,11 +188,22 @@ async def test_create_network_fails_1( websocket_client, ) -> None: """Test create network.""" - with patch( - "python_otbr_api.OTBR.set_enabled", - side_effect=python_otbr_api.OTBRError, + with ( + patch( + "python_otbr_api.OTBR.set_enabled", + side_effect=python_otbr_api.OTBRError, + ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -197,8 +226,17 @@ async def test_create_network_fails_2( side_effect=python_otbr_api.OTBRError, ), patch("python_otbr_api.OTBR.factory_reset"), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -223,8 +261,17 @@ async def test_create_network_fails_3( patch( "python_otbr_api.OTBR.factory_reset", ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -248,8 +295,17 @@ async def test_create_network_fails_4( patch( "python_otbr_api.OTBR.factory_reset", ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -268,8 +324,17 @@ async def test_create_network_fails_5( patch("python_otbr_api.OTBR.create_active_dataset"), patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=None), patch("python_otbr_api.OTBR.factory_reset"), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -291,14 +356,69 @@ async def test_create_network_fails_6( "python_otbr_api.OTBR.factory_reset", side_effect=python_otbr_api.OTBRError, ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "factory_reset_failed" +async def test_create_network_fails_7( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test create network.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + side_effect=python_otbr_api.OTBRError, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "get_extended_address_failed" + + +async def test_create_network_fails_8( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test create network.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": "blah", + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "unknown_router" + + async def test_set_network( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -312,6 +432,10 @@ async def test_set_network( dataset_id = list(dataset_store.datasets)[1] with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "python_otbr_api.OTBR.set_active_dataset_tlvs" ) as set_active_dataset_tlvs_mock, @@ -320,6 +444,7 @@ async def test_set_network( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -345,6 +470,7 @@ async def test_set_network_no_entry( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": "abc", } ) @@ -368,14 +494,19 @@ async def test_set_network_channel_conflict( multiprotocol_addon_manager_mock.async_get_channel.return_value = 15 - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_network", - "dataset_id": dataset_id, - } - ) + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "dataset_id": dataset_id, + } + ) - msg = await websocket_client.receive_json() + msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "channel_conflict" @@ -389,14 +520,19 @@ async def test_set_network_unknown_dataset( ) -> None: """Test set network.""" - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_network", - "dataset_id": "abc", - } - ) + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "dataset_id": "abc", + } + ) - msg = await websocket_client.receive_json() + msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "unknown_dataset" @@ -413,13 +549,20 @@ async def test_set_network_fails_1( dataset_store = await thread.dataset_store.async_get_store(hass) dataset_id = list(dataset_store.datasets)[1] - with patch( - "python_otbr_api.OTBR.set_enabled", - side_effect=python_otbr_api.OTBRError, + with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch( + "python_otbr_api.OTBR.set_enabled", + side_effect=python_otbr_api.OTBRError, + ), ): await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -441,6 +584,10 @@ async def test_set_network_fails_2( dataset_id = list(dataset_store.datasets)[1] with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "python_otbr_api.OTBR.set_enabled", ), @@ -452,6 +599,7 @@ async def test_set_network_fails_2( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -473,6 +621,10 @@ async def test_set_network_fails_3( dataset_id = list(dataset_store.datasets)[1] with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "python_otbr_api.OTBR.set_enabled", side_effect=[None, python_otbr_api.OTBRError], @@ -484,6 +636,7 @@ async def test_set_network_fails_3( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -493,6 +646,54 @@ async def test_set_network_fails_3( assert msg["error"]["code"] == "set_enabled_failed" +async def test_set_network_fails_4( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test set network.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + side_effect=python_otbr_api.OTBRError, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "dataset_id": "abc", + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "get_extended_address_failed" + + +async def test_set_network_fails_5( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test set network.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "extended_address": "blah", + "dataset_id": "abc", + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "unknown_router" + + async def test_set_channel( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -501,9 +702,19 @@ async def test_set_channel( ) -> None: """Test set channel.""" - with patch("python_otbr_api.OTBR.set_channel"): + with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch("python_otbr_api.OTBR.set_channel"), + ): await websocket_client.send_json_auto_id( - {"type": "otbr/set_channel", "channel": 12} + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } ) msg = await websocket_client.receive_json() @@ -519,9 +730,19 @@ async def test_set_channel_multiprotocol( ) -> None: """Test set channel.""" - with patch("python_otbr_api.OTBR.set_channel"): + with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch("python_otbr_api.OTBR.set_channel"), + ): await websocket_client.send_json_auto_id( - {"type": "otbr/set_channel", "channel": 12} + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } ) msg = await websocket_client.receive_json() @@ -538,7 +759,11 @@ async def test_set_channel_no_entry( await async_setup_component(hass, "otbr", {}) websocket_client = await hass_ws_client(hass) await websocket_client.send_json_auto_id( - {"type": "otbr/set_channel", "channel": 12} + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } ) msg = await websocket_client.receive_json() @@ -546,21 +771,79 @@ async def test_set_channel_no_entry( assert msg["error"]["code"] == "not_loaded" -async def test_set_channel_fails( +async def test_set_channel_fails_1( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_thread, websocket_client, ) -> None: """Test set channel.""" - with patch( - "python_otbr_api.OTBR.set_channel", - side_effect=python_otbr_api.OTBRError, + with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch( + "python_otbr_api.OTBR.set_channel", + side_effect=python_otbr_api.OTBRError, + ), ): await websocket_client.send_json_auto_id( - {"type": "otbr/set_channel", "channel": 12} + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } ) msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "set_channel_failed" + + +async def test_set_channel_fails_2( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test set channel.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + side_effect=python_otbr_api.OTBRError, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "get_extended_address_failed" + + +async def test_set_channel_fails_3( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test set channel.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_channel", + "extended_address": "blah", + "channel": 12, + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "unknown_router" diff --git a/tests/components/otp/conftest.py b/tests/components/otp/conftest.py index 7443d772c69..7926be1e48e 100644 --- a/tests/components/otp/conftest.py +++ b/tests/components/otp/conftest.py @@ -14,7 +14,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.otp.async_setup_entry", return_value=True @@ -23,7 +23,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_pyotp() -> Generator[MagicMock, None, None]: +def mock_pyotp() -> Generator[MagicMock]: """Mock a pyotp.""" with ( patch( diff --git a/tests/components/otp/test_config_flow.py b/tests/components/otp/test_config_flow.py index eefb1a6f4e0..f9fac433ff9 100644 --- a/tests/components/otp/test_config_flow.py +++ b/tests/components/otp/test_config_flow.py @@ -12,6 +12,10 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType TEST_DATA = { + CONF_NAME: "OTP Sensor", + CONF_TOKEN: "2FX5 FBSY RE6V EC2F SHBQ CRKO 2GND VZ52", +} +TEST_DATA_RESULT = { CONF_NAME: "OTP Sensor", CONF_TOKEN: "2FX5FBSYRE6VEC2FSHBQCRKO2GNDVZ52", } @@ -41,7 +45,11 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: result["flow_id"], TEST_DATA, ) - await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "OTP Sensor" + assert result["data"] == TEST_DATA_RESULT + assert len(mock_setup_entry.mock_calls) == 1 @pytest.mark.parametrize( @@ -85,7 +93,7 @@ async def test_errors_and_recover( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "OTP Sensor" - assert result["data"] == TEST_DATA + assert result["data"] == TEST_DATA_RESULT assert len(mock_setup_entry.mock_calls) == 1 @@ -96,13 +104,13 @@ async def test_flow_import(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, - data=TEST_DATA, + data=TEST_DATA_RESULT, ) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "OTP Sensor" - assert result["data"] == TEST_DATA + assert result["data"] == TEST_DATA_RESULT @pytest.mark.usefixtures("mock_pyotp") @@ -134,7 +142,7 @@ async def test_generate_new_token( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "OTP Sensor" - assert result["data"] == TEST_DATA + assert result["data"] == TEST_DATA_RESULT assert len(mock_setup_entry.mock_calls) == 1 @@ -181,5 +189,5 @@ async def test_generate_new_token_errors( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "OTP Sensor" - assert result["data"] == TEST_DATA + assert result["data"] == TEST_DATA_RESULT assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/ourgroceries/__init__.py b/tests/components/ourgroceries/__init__.py index 6f90cb7ea1b..4ebbea46229 100644 --- a/tests/components/ourgroceries/__init__.py +++ b/tests/components/ourgroceries/__init__.py @@ -1,6 +1,10 @@ """Tests for the OurGroceries integration.""" +from typing import Any -def items_to_shopping_list(items: list, version_id: str = "1") -> dict[dict[list]]: + +def items_to_shopping_list( + items: list, version_id: str = "1" +) -> dict[str, dict[str, Any]]: """Convert a list of items into a shopping list.""" return {"list": {"versionId": version_id, "items": items}} diff --git a/tests/components/ourgroceries/conftest.py b/tests/components/ourgroceries/conftest.py index bc8c632b511..b3fb4e9bcc6 100644 --- a/tests/components/ourgroceries/conftest.py +++ b/tests/components/ourgroceries/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the OurGroceries tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.ourgroceries import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/ourgroceries/test_todo.py b/tests/components/ourgroceries/test_todo.py index 672e2e14447..d364881b624 100644 --- a/tests/components/ourgroceries/test_todo.py +++ b/tests/components/ourgroceries/test_todo.py @@ -7,8 +7,14 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.ourgroceries.coordinator import SCAN_INTERVAL -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -69,9 +75,9 @@ async def test_add_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda"}, - target={"entity_id": "todo.test_list"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) @@ -108,9 +114,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "12345", "status": "completed"}, - target={"entity_id": "todo.test_list"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "12345", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.toggle_item_crossed_off.called @@ -132,9 +138,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "12345", "status": "needs_action"}, - target={"entity_id": "todo.test_list"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "12345", ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.toggle_item_crossed_off.called @@ -181,9 +187,9 @@ async def test_update_todo_item_summary( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "12345", "rename": "Milk"}, - target={"entity_id": "todo.test_list"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "12345", ATTR_RENAME: "Milk"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.change_item_on_list @@ -218,9 +224,9 @@ async def test_remove_todo_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["12345", "54321"]}, - target={"entity_id": "todo.test_list"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["12345", "54321"]}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.remove_item_from_list.call_count == 2 diff --git a/tests/components/overkiz/conftest.py b/tests/components/overkiz/conftest.py index 8ab26e3587b..151d0719ddb 100644 --- a/tests/components/overkiz/conftest.py +++ b/tests/components/overkiz/conftest.py @@ -1,9 +1,9 @@ """Configuration for overkiz tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.overkiz.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/owntracks/test_device_tracker.py b/tests/components/owntracks/test_device_tracker.py index 8246a7f51ac..bc2ae7ce4d8 100644 --- a/tests/components/owntracks/test_device_tracker.py +++ b/tests/components/owntracks/test_device_tracker.py @@ -1,6 +1,7 @@ """The tests for the Owntracks device tracker.""" import base64 +from collections.abc import Callable, Generator import json import pickle from unittest.mock import patch @@ -18,6 +19,8 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, async_fire_mqtt_message from tests.typing import ClientSessionGenerator, MqttMockHAClient +type OwnTracksContextFactory = Callable[[], owntracks.OwnTracksContext] + USER = "greg" DEVICE = "phone" @@ -285,15 +288,13 @@ BAD_MESSAGE = {"_type": "unsupported", "tst": 1} BAD_JSON_PREFIX = "--$this is bad json#--" BAD_JSON_SUFFIX = "** and it ends here ^^" -# pylint: disable=len-as-condition - @pytest.fixture def setup_comp( hass: HomeAssistant, mock_device_tracker_conf: list[Device], mqtt_mock: MqttMockHAClient, -): +) -> None: """Initialize components.""" hass.loop.run_until_complete(async_setup_component(hass, "device_tracker", {})) @@ -316,7 +317,7 @@ async def setup_owntracks(hass, config, ctx_cls=owntracks.OwnTracksContext): @pytest.fixture -def context(hass, setup_comp): +def context(hass: HomeAssistant, setup_comp: None) -> OwnTracksContextFactory: """Set up the mocked context.""" orig_context = owntracks.OwnTracksContext context = None @@ -409,14 +410,16 @@ def assert_mobile_tracker_accuracy(hass, accuracy, beacon=IBEACON_DEVICE): assert state.attributes.get("gps_accuracy") == accuracy -async def test_location_invalid_devid(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_invalid_devid(hass: HomeAssistant) -> None: """Test the update of a location.""" await send_message(hass, "owntracks/paulus/nexus-5x", LOCATION_MESSAGE) state = hass.states.get("device_tracker.paulus_nexus_5x") assert state.state == "outer" -async def test_location_update(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_update(hass: HomeAssistant) -> None: """Test the update of a location.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -426,7 +429,8 @@ async def test_location_update(hass: HomeAssistant, context) -> None: assert_location_state(hass, "outer") -async def test_location_update_no_t_key(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_update_no_t_key(hass: HomeAssistant) -> None: """Test the update of a location when message does not contain 't'.""" message = LOCATION_MESSAGE.copy() message.pop("t") @@ -438,7 +442,8 @@ async def test_location_update_no_t_key(hass: HomeAssistant, context) -> None: assert_location_state(hass, "outer") -async def test_location_inaccurate_gps(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_inaccurate_gps(hass: HomeAssistant) -> None: """Test the location for inaccurate GPS information.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_INACCURATE) @@ -448,7 +453,8 @@ async def test_location_inaccurate_gps(hass: HomeAssistant, context) -> None: assert_location_longitude(hass, LOCATION_MESSAGE["lon"]) -async def test_location_zero_accuracy_gps(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_zero_accuracy_gps(hass: HomeAssistant) -> None: """Ignore the location for zero accuracy GPS information.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_ZERO_ACCURACY) @@ -460,7 +466,9 @@ async def test_location_zero_accuracy_gps(hass: HomeAssistant, context) -> None: # ------------------------------------------------------------------------ # GPS based event entry / exit testing -async def test_event_gps_entry_exit(hass: HomeAssistant, context) -> None: +async def test_event_gps_entry_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" # Entering the owntracks circular region named "inner" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -498,7 +506,9 @@ async def test_event_gps_entry_exit(hass: HomeAssistant, context) -> None: assert_location_accuracy(hass, LOCATION_MESSAGE["acc"]) -async def test_event_gps_with_spaces(hass: HomeAssistant, context) -> None: +async def test_event_gps_with_spaces( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" message = build_message({"desc": "inner 2"}, REGION_GPS_ENTER_MESSAGE) await send_message(hass, EVENT_TOPIC, message) @@ -511,7 +521,8 @@ async def test_event_gps_with_spaces(hass: HomeAssistant, context) -> None: assert not context().regions_entered[USER] -async def test_event_gps_entry_inaccurate(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_inaccurate(hass: HomeAssistant) -> None: """Test the event for inaccurate entry.""" # Set location to the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -524,7 +535,9 @@ async def test_event_gps_entry_inaccurate(hass: HomeAssistant, context) -> None: assert_location_state(hass, "inner") -async def test_event_gps_entry_exit_inaccurate(hass: HomeAssistant, context) -> None: +async def test_event_gps_entry_exit_inaccurate( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the event for inaccurate exit.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -544,7 +557,9 @@ async def test_event_gps_entry_exit_inaccurate(hass: HomeAssistant, context) -> assert not context().regions_entered[USER] -async def test_event_gps_entry_exit_zero_accuracy(hass: HomeAssistant, context) -> None: +async def test_event_gps_entry_exit_zero_accuracy( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test entry/exit events with accuracy zero.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE_ZERO) @@ -564,9 +579,8 @@ async def test_event_gps_entry_exit_zero_accuracy(hass: HomeAssistant, context) assert not context().regions_entered[USER] -async def test_event_gps_exit_outside_zone_sets_away( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_exit_outside_zone_sets_away(hass: HomeAssistant) -> None: """Test the event for exit zone.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) assert_location_state(hass, "inner") @@ -579,7 +593,8 @@ async def test_event_gps_exit_outside_zone_sets_away( assert_location_state(hass, STATE_NOT_HOME) -async def test_event_gps_entry_exit_right_order(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_exit_right_order(hass: HomeAssistant) -> None: """Test the event for ordering.""" # Enter inner zone # Set location to the outer zone. @@ -604,7 +619,8 @@ async def test_event_gps_entry_exit_right_order(hass: HomeAssistant, context) -> assert_location_state(hass, "outer") -async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant) -> None: """Test the event for wrong order.""" # Enter inner zone await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -627,7 +643,8 @@ async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant, context) -> assert_location_state(hass, "outer") -async def test_event_gps_entry_unknown_zone(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_unknown_zone(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # Just treat as location update message = build_message({"desc": "unknown"}, REGION_GPS_ENTER_MESSAGE) @@ -636,7 +653,8 @@ async def test_event_gps_entry_unknown_zone(hass: HomeAssistant, context) -> Non assert_location_state(hass, "inner") -async def test_event_gps_exit_unknown_zone(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_exit_unknown_zone(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # Just treat as location update message = build_message({"desc": "unknown"}, REGION_GPS_LEAVE_MESSAGE) @@ -645,7 +663,8 @@ async def test_event_gps_exit_unknown_zone(hass: HomeAssistant, context) -> None assert_location_state(hass, "outer") -async def test_event_entry_zone_loading_dash(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_entry_zone_loading_dash(hass: HomeAssistant) -> None: """Test the event for zone landing.""" # Make sure the leading - is ignored # Owntracks uses this to switch on hold @@ -654,7 +673,9 @@ async def test_event_entry_zone_loading_dash(hass: HomeAssistant, context) -> No assert_location_state(hass, "inner") -async def test_events_only_on(hass: HomeAssistant, context) -> None: +async def test_events_only_on( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test events_only config suppresses location updates.""" # Sending a location message that is not home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_NOT_HOME) @@ -675,7 +696,9 @@ async def test_events_only_on(hass: HomeAssistant, context) -> None: assert_location_state(hass, STATE_NOT_HOME) -async def test_events_only_off(hass: HomeAssistant, context) -> None: +async def test_events_only_off( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test when events_only is False.""" # Sending a location message that is not home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_NOT_HOME) @@ -696,7 +719,8 @@ async def test_events_only_off(hass: HomeAssistant, context) -> None: assert_location_state(hass, "outer") -async def test_event_source_type_entry_exit(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_source_type_entry_exit(hass: HomeAssistant) -> None: """Test the entry and exit events of source type.""" # Entering the owntracks circular region named "inner" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -726,7 +750,9 @@ async def test_event_source_type_entry_exit(hass: HomeAssistant, context) -> Non # Region Beacon based event entry / exit testing -async def test_event_region_entry_exit(hass: HomeAssistant, context) -> None: +async def test_event_region_entry_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" # Seeing a beacon named "inner" await send_message(hass, EVENT_TOPIC, REGION_BEACON_ENTER_MESSAGE) @@ -765,7 +791,9 @@ async def test_event_region_entry_exit(hass: HomeAssistant, context) -> None: assert_location_accuracy(hass, LOCATION_MESSAGE["acc"]) -async def test_event_region_with_spaces(hass: HomeAssistant, context) -> None: +async def test_event_region_with_spaces( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" message = build_message({"desc": "inner 2"}, REGION_BEACON_ENTER_MESSAGE) await send_message(hass, EVENT_TOPIC, message) @@ -778,9 +806,8 @@ async def test_event_region_with_spaces(hass: HomeAssistant, context) -> None: assert not context().regions_entered[USER] -async def test_event_region_entry_exit_right_order( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_region_entry_exit_right_order(hass: HomeAssistant) -> None: """Test the event for ordering.""" # Enter inner zone # Set location to the outer zone. @@ -811,9 +838,8 @@ async def test_event_region_entry_exit_right_order( assert_location_state(hass, "inner") -async def test_event_region_entry_exit_wrong_order( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_region_entry_exit_wrong_order(hass: HomeAssistant) -> None: """Test the event for wrong order.""" # Enter inner zone await send_message(hass, EVENT_TOPIC, REGION_BEACON_ENTER_MESSAGE) @@ -840,9 +866,8 @@ async def test_event_region_entry_exit_wrong_order( assert_location_state(hass, "inner_2") -async def test_event_beacon_unknown_zone_no_location( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_beacon_unknown_zone_no_location(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # A beacon which does not match a HA zone is the # definition of a mobile beacon. In this case, "unknown" @@ -867,7 +892,8 @@ async def test_event_beacon_unknown_zone_no_location( assert_mobile_tracker_state(hass, "unknown", "unknown") -async def test_event_beacon_unknown_zone(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_beacon_unknown_zone(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # A beacon which does not match a HA zone is the # definition of a mobile beacon. In this case, "unknown" @@ -887,9 +913,8 @@ async def test_event_beacon_unknown_zone(hass: HomeAssistant, context) -> None: assert_mobile_tracker_state(hass, "outer", "unknown") -async def test_event_beacon_entry_zone_loading_dash( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_beacon_entry_zone_loading_dash(hass: HomeAssistant) -> None: """Test the event for beacon zone landing.""" # Make sure the leading - is ignored # Owntracks uses this to switch on hold @@ -901,7 +926,8 @@ async def test_event_beacon_entry_zone_loading_dash( # ------------------------------------------------------------------------ # Mobile Beacon based event entry / exit testing -async def test_mobile_enter_move_beacon(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_mobile_enter_move_beacon(hass: HomeAssistant) -> None: """Test the movement of a beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -925,7 +951,8 @@ async def test_mobile_enter_move_beacon(hass: HomeAssistant, context) -> None: assert_mobile_tracker_latitude(hass, not_home_lat) -async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant) -> None: """Test the enter and the exit of a mobile beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -948,7 +975,8 @@ async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant, context) -> assert_mobile_tracker_state(hass, "outer") -async def test_mobile_exit_move_beacon(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_mobile_exit_move_beacon(hass: HomeAssistant) -> None: """Test the exit move of a beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -970,7 +998,9 @@ async def test_mobile_exit_move_beacon(hass: HomeAssistant, context) -> None: assert_mobile_tracker_state(hass, "outer") -async def test_mobile_multiple_async_enter_exit(hass: HomeAssistant, context) -> None: +async def test_mobile_multiple_async_enter_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the multiple entering.""" # Test race condition for _ in range(20): @@ -990,7 +1020,9 @@ async def test_mobile_multiple_async_enter_exit(hass: HomeAssistant, context) -> assert len(context().mobile_beacons_active["greg_phone"]) == 0 -async def test_mobile_multiple_enter_exit(hass: HomeAssistant, context) -> None: +async def test_mobile_multiple_enter_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the multiple entering.""" await send_message(hass, EVENT_TOPIC, MOBILE_BEACON_ENTER_EVENT_MESSAGE) await send_message(hass, EVENT_TOPIC, MOBILE_BEACON_ENTER_EVENT_MESSAGE) @@ -999,7 +1031,8 @@ async def test_mobile_multiple_enter_exit(hass: HomeAssistant, context) -> None: assert len(context().mobile_beacons_active["greg_phone"]) == 0 -async def test_complex_movement(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_complex_movement(hass: HomeAssistant) -> None: """Test a complex sequence representative of real-world use.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -1121,9 +1154,8 @@ async def test_complex_movement(hass: HomeAssistant, context) -> None: assert_mobile_tracker_state(hass, "outer") -async def test_complex_movement_sticky_keys_beacon( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_complex_movement_sticky_keys_beacon(hass: HomeAssistant) -> None: """Test a complex sequence which was previously broken.""" # I am not_home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -1235,7 +1267,8 @@ async def test_complex_movement_sticky_keys_beacon( assert_mobile_tracker_latitude(hass, INNER_ZONE["latitude"]) -async def test_waypoint_import_simple(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_simple(hass: HomeAssistant) -> None: """Test a simple import of list of waypoints.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message) @@ -1246,7 +1279,8 @@ async def test_waypoint_import_simple(hass: HomeAssistant, context) -> None: assert wayp is not None -async def test_waypoint_import_block(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_block(hass: HomeAssistant) -> None: """Test import of list of waypoints for blocked user.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC_BLOCKED, waypoints_message) @@ -1277,7 +1311,8 @@ async def test_waypoint_import_no_whitelist(hass: HomeAssistant, setup_comp) -> assert wayp is not None -async def test_waypoint_import_bad_json(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_bad_json(hass: HomeAssistant) -> None: """Test importing a bad JSON payload.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message, True) @@ -1288,7 +1323,8 @@ async def test_waypoint_import_bad_json(hass: HomeAssistant, context) -> None: assert wayp is None -async def test_waypoint_import_existing(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_existing(hass: HomeAssistant) -> None: """Test importing a zone that exists.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message) @@ -1301,7 +1337,8 @@ async def test_waypoint_import_existing(hass: HomeAssistant, context) -> None: assert wayp == new_wayp -async def test_single_waypoint_import(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_single_waypoint_import(hass: HomeAssistant) -> None: """Test single waypoint message.""" waypoint_message = WAYPOINT_MESSAGE.copy() await send_message(hass, WAYPOINT_TOPIC, waypoint_message) @@ -1309,7 +1346,8 @@ async def test_single_waypoint_import(hass: HomeAssistant, context) -> None: assert wayp is not None -async def test_not_implemented_message(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_not_implemented_message(hass: HomeAssistant) -> None: """Handle not implemented message type.""" patch_handler = patch( "homeassistant.components.owntracks.messages.async_handle_not_impl_msg", @@ -1320,7 +1358,8 @@ async def test_not_implemented_message(hass: HomeAssistant, context) -> None: patch_handler.stop() -async def test_unsupported_message(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_unsupported_message(hass: HomeAssistant) -> None: """Handle not implemented message type.""" patch_handler = patch( "homeassistant.components.owntracks.messages.async_handle_unsupported_msg", @@ -1387,7 +1426,7 @@ def mock_cipher(): @pytest.fixture -def config_context(hass, setup_comp): +def config_context(setup_comp: None) -> Generator[None]: """Set up the mocked context.""" patch_load = patch( "homeassistant.components.device_tracker.async_load_config", diff --git a/tests/components/panasonic_viera/test_remote.py b/tests/components/panasonic_viera/test_remote.py index 05254753d3f..3ae241fc5e9 100644 --- a/tests/components/panasonic_viera/test_remote.py +++ b/tests/components/panasonic_viera/test_remote.py @@ -46,7 +46,7 @@ async def test_onoff(hass: HomeAssistant, mock_remote) -> None: await hass.services.async_call(REMOTE_DOMAIN, SERVICE_TURN_ON, data) await hass.async_block_till_done() - power = getattr(Keys.power, "value", Keys.power) + power = getattr(Keys.POWER, "value", Keys.POWER) assert mock_remote.send_key.call_args_list == [call(power), call(power)] diff --git a/tests/components/permobil/conftest.py b/tests/components/permobil/conftest.py index ed6a843b206..d3630d3f366 100644 --- a/tests/components/permobil/conftest.py +++ b/tests/components/permobil/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the MyPermobil tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from mypermobil import MyPermobil import pytest -from typing_extensions import Generator from .const import MOCK_REGION_NAME, MOCK_TOKEN, MOCK_URL diff --git a/tests/components/persistent_notification/conftest.py b/tests/components/persistent_notification/conftest.py index d665c0075b3..29ba5a6008a 100644 --- a/tests/components/persistent_notification/conftest.py +++ b/tests/components/persistent_notification/conftest.py @@ -3,10 +3,11 @@ import pytest import homeassistant.components.persistent_notification as pn +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) -async def setup_integration(hass): +async def setup_integration(hass: HomeAssistant) -> None: """Set up persistent notification integration.""" assert await async_setup_component(hass, pn.DOMAIN, {}) diff --git a/tests/components/person/conftest.py b/tests/components/person/conftest.py index ecec42b003d..a6dc95ccc9e 100644 --- a/tests/components/person/conftest.py +++ b/tests/components/person/conftest.py @@ -18,7 +18,7 @@ DEVICE_TRACKER_2 = "device_tracker.test_tracker_2" @pytest.fixture -def storage_collection(hass): +def storage_collection(hass: HomeAssistant) -> person.PersonStorageCollection: """Return an empty storage collection.""" id_manager = collection.IDManager() return person.PersonStorageCollection( diff --git a/tests/components/philips_js/conftest.py b/tests/components/philips_js/conftest.py index b6c78fe9e5e..4a79fce85a2 100644 --- a/tests/components/philips_js/conftest.py +++ b/tests/components/philips_js/conftest.py @@ -1,16 +1,18 @@ """Standard setup for tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, create_autospec, patch from haphilipsjs import PhilipsTV import pytest -from typing_extensions import Generator from homeassistant.components.philips_js.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from . import MOCK_CONFIG, MOCK_ENTITY_ID, MOCK_NAME, MOCK_SERIAL_NO, MOCK_SYSTEM -from tests.common import MockConfigEntry, mock_device_registry +from tests.common import MockConfigEntry @pytest.fixture @@ -27,11 +29,6 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture(autouse=True) -async def setup_notification(hass): - """Configure notification system.""" - - @pytest.fixture(autouse=True) def mock_tv(): """Disable component actual use.""" @@ -62,7 +59,7 @@ def mock_tv(): @pytest.fixture -async def mock_config_entry(hass): +async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Get standard player.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_CONFIG, title=MOCK_NAME, unique_id=MOCK_SERIAL_NO @@ -72,13 +69,7 @@ async def mock_config_entry(hass): @pytest.fixture -def mock_device_reg(hass): - """Get standard device.""" - return mock_device_registry(hass) - - -@pytest.fixture -async def mock_entity(hass, mock_device_reg, mock_config_entry): +async def mock_entity(hass: HomeAssistant, mock_config_entry: MockConfigEntry) -> str: """Get standard player.""" assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -86,9 +77,13 @@ async def mock_entity(hass, mock_device_reg, mock_config_entry): @pytest.fixture -def mock_device(hass, mock_device_reg, mock_entity, mock_config_entry): +def mock_device( + device_registry: dr.DeviceRegistry, + mock_entity: str, + mock_config_entry: MockConfigEntry, +) -> dr.DeviceEntry: """Get standard device.""" - return mock_device_reg.async_get_or_create( + return device_registry.async_get_or_create( config_entry_id=mock_config_entry.entry_id, identifiers={(DOMAIN, MOCK_SERIAL_NO)}, ) diff --git a/tests/components/philips_js/test_device_trigger.py b/tests/components/philips_js/test_device_trigger.py index b9b7439d2fa..8f2e5543f1e 100644 --- a/tests/components/philips_js/test_device_trigger.py +++ b/tests/components/philips_js/test_device_trigger.py @@ -9,7 +9,7 @@ from homeassistant.components.philips_js.const import DOMAIN from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_get_device_automations, async_mock_service +from tests.common import async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -17,12 +17,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers(hass: HomeAssistant, mock_device) -> None: """Test we get the expected triggers.""" expected_triggers = [ @@ -42,7 +36,11 @@ async def test_get_triggers(hass: HomeAssistant, mock_device) -> None: async def test_if_fires_on_turn_on_request( - hass: HomeAssistant, calls: list[ServiceCall], mock_tv, mock_entity, mock_device + hass: HomeAssistant, + service_calls: list[ServiceCall], + mock_tv, + mock_entity, + mock_device, ) -> None: """Test for turn_on and turn_off triggers firing.""" @@ -80,6 +78,10 @@ async def test_if_fires_on_turn_on_request( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == mock_device.id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[0].domain == "media_player" + assert service_calls[0].service == "turn_on" + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["some"] == mock_device.id + assert service_calls[1].data["id"] == 0 diff --git a/tests/components/philips_js/test_diagnostics.py b/tests/components/philips_js/test_diagnostics.py index cb3235b9780..d61546e52c3 100644 --- a/tests/components/philips_js/test_diagnostics.py +++ b/tests/components/philips_js/test_diagnostics.py @@ -63,4 +63,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("entry_id")) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/pi_hole/__init__.py b/tests/components/pi_hole/__init__.py index 38231778624..993f6a2571c 100644 --- a/tests/components/pi_hole/__init__.py +++ b/tests/components/pi_hole/__init__.py @@ -33,7 +33,7 @@ ZERO_DATA = { "unique_domains": 0, } -SAMPLE_VERSIONS = { +SAMPLE_VERSIONS_WITH_UPDATES = { "core_current": "v5.5", "core_latest": "v5.6", "core_update": True, @@ -45,6 +45,18 @@ SAMPLE_VERSIONS = { "FTL_update": True, } +SAMPLE_VERSIONS_NO_UPDATES = { + "core_current": "v5.5", + "core_latest": "v5.5", + "core_update": False, + "web_current": "v5.7", + "web_latest": "v5.7", + "web_update": False, + "FTL_current": "v5.10", + "FTL_latest": "v5.10", + "FTL_update": False, +} + HOST = "1.2.3.4" PORT = 80 LOCATION = "location" @@ -103,7 +115,9 @@ CONFIG_ENTRY_WITHOUT_API_KEY = { SWITCH_ENTITY_ID = "switch.pi_hole" -def _create_mocked_hole(raise_exception=False, has_versions=True, has_data=True): +def _create_mocked_hole( + raise_exception=False, has_versions=True, has_update=True, has_data=True +): mocked_hole = MagicMock() type(mocked_hole).get_data = AsyncMock( side_effect=HoleError("") if raise_exception else None @@ -118,7 +132,10 @@ def _create_mocked_hole(raise_exception=False, has_versions=True, has_data=True) else: mocked_hole.data = [] if has_versions: - mocked_hole.versions = SAMPLE_VERSIONS + if has_update: + mocked_hole.versions = SAMPLE_VERSIONS_WITH_UPDATES + else: + mocked_hole.versions = SAMPLE_VERSIONS_NO_UPDATES else: mocked_hole.versions = None return mocked_hole diff --git a/tests/components/pi_hole/test_config_flow.py b/tests/components/pi_hole/test_config_flow.py index 326b01b9a7a..d13712d6f76 100644 --- a/tests/components/pi_hole/test_config_flow.py +++ b/tests/components/pi_hole/test_config_flow.py @@ -96,7 +96,7 @@ async def test_flow_user_without_api_key(hass: HomeAssistant) -> None: async def test_flow_user_invalid(hass: HomeAssistant) -> None: """Test user initialized flow with invalid server.""" - mocked_hole = _create_mocked_hole(True) + mocked_hole = _create_mocked_hole(raise_exception=True) with _patch_config_flow_hole(mocked_hole): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=CONFIG_FLOW_USER diff --git a/tests/components/pi_hole/test_diagnostics.py b/tests/components/pi_hole/test_diagnostics.py index c9fc9a0a9b8..8d5a83e4622 100644 --- a/tests/components/pi_hole/test_diagnostics.py +++ b/tests/components/pi_hole/test_diagnostics.py @@ -1,6 +1,7 @@ """Test pi_hole component.""" from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components import pi_hole from homeassistant.core import HomeAssistant @@ -28,4 +29,6 @@ async def test_diagnostics( await hass.async_block_till_done() - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/pi_hole/test_update.py b/tests/components/pi_hole/test_update.py index 091b553c475..705e9f9c08d 100644 --- a/tests/components/pi_hole/test_update.py +++ b/tests/components/pi_hole/test_update.py @@ -1,7 +1,7 @@ """Test pi_hole component.""" from homeassistant.components import pi_hole -from homeassistant.const import STATE_ON, STATE_UNKNOWN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant from . import CONFIG_DATA_DEFAULTS, _create_mocked_hole, _patch_init_hole @@ -80,3 +80,44 @@ async def test_update_no_versions(hass: HomeAssistant) -> None: assert state.attributes["installed_version"] is None assert state.attributes["latest_version"] is None assert state.attributes["release_url"] is None + + +async def test_update_no_updates(hass: HomeAssistant) -> None: + """Tests update entity when no latest data available.""" + mocked_hole = _create_mocked_hole(has_versions=True, has_update=False) + entry = MockConfigEntry(domain=pi_hole.DOMAIN, data=CONFIG_DATA_DEFAULTS) + entry.add_to_hass(hass) + with _patch_init_hole(mocked_hole): + assert await hass.config_entries.async_setup(entry.entry_id) + + await hass.async_block_till_done() + + state = hass.states.get("update.pi_hole_core_update_available") + assert state.name == "Pi-Hole Core update available" + assert state.state == STATE_OFF + assert state.attributes["installed_version"] == "v5.5" + assert state.attributes["latest_version"] == "v5.5" + assert ( + state.attributes["release_url"] + == "https://github.com/pi-hole/pi-hole/releases/tag/v5.5" + ) + + state = hass.states.get("update.pi_hole_ftl_update_available") + assert state.name == "Pi-Hole FTL update available" + assert state.state == STATE_OFF + assert state.attributes["installed_version"] == "v5.10" + assert state.attributes["latest_version"] == "v5.10" + assert ( + state.attributes["release_url"] + == "https://github.com/pi-hole/FTL/releases/tag/v5.10" + ) + + state = hass.states.get("update.pi_hole_web_update_available") + assert state.name == "Pi-Hole Web update available" + assert state.state == STATE_OFF + assert state.attributes["installed_version"] == "v5.7" + assert state.attributes["latest_version"] == "v5.7" + assert ( + state.attributes["release_url"] + == "https://github.com/pi-hole/AdminLTE/releases/tag/v5.7" + ) diff --git a/tests/components/picnic/test_todo.py b/tests/components/picnic/test_todo.py index cdd30967058..2db5bc90159 100644 --- a/tests/components/picnic/test_todo.py +++ b/tests/components/picnic/test_todo.py @@ -5,7 +5,8 @@ from unittest.mock import MagicMock, Mock import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN +from homeassistant.components.todo import ATTR_ITEM, DOMAIN, TodoServices +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -91,9 +92,9 @@ async def test_create_todo_list_item( await hass.services.async_call( DOMAIN, - "add_item", - {"item": "Melk"}, - target={"entity_id": ENTITY_ID}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Melk"}, + target={ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, ) @@ -119,8 +120,8 @@ async def test_create_todo_list_item_not_found( with pytest.raises(ServiceValidationError): await hass.services.async_call( DOMAIN, - "add_item", - {"item": "Melk"}, - target={"entity_id": ENTITY_ID}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Melk"}, + target={ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, ) diff --git a/tests/components/pilight/test_init.py b/tests/components/pilight/test_init.py index c48135f59eb..dfc62d30619 100644 --- a/tests/components/pilight/test_init.py +++ b/tests/components/pilight/test_init.py @@ -40,7 +40,7 @@ class PilightDaemonSim: "message": {"id": 0, "unit": 0, "off": 1}, } - def __init__(self, host, port): + def __init__(self, host, port) -> None: """Init pilight client, ignore parameters.""" def send_code(self, call): diff --git a/tests/components/pilight/test_sensor.py b/tests/components/pilight/test_sensor.py index 97e031736e5..9f529117642 100644 --- a/tests/components/pilight/test_sensor.py +++ b/tests/components/pilight/test_sensor.py @@ -12,7 +12,7 @@ from tests.common import assert_setup_component, mock_component @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "pilight") diff --git a/tests/components/ping/snapshots/test_binary_sensor.ambr b/tests/components/ping/snapshots/test_binary_sensor.ambr index 98ea9a8a847..24717938874 100644 --- a/tests/components/ping/snapshots/test_binary_sensor.ambr +++ b/tests/components/ping/snapshots/test_binary_sensor.ambr @@ -1,64 +1,4 @@ # serializer version: 1 -# name: test_sensor - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.10_10_10_10', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': '10.10.10.10', - 'platform': 'ping', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor.1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': '10.10.10.10', - 'round_trip_time_avg': 4.333, - 'round_trip_time_max': 10, - 'round_trip_time_mdev': '', - 'round_trip_time_min': 1, - }), - 'context': , - 'entity_id': 'binary_sensor.10_10_10_10', - 'last_changed': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor.2 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': '10.10.10.10', - }), - 'context': , - 'entity_id': 'binary_sensor.10_10_10_10', - 'last_changed': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_setup_and_update EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ping/test_device_tracker.py b/tests/components/ping/test_device_tracker.py index 5aa425226b3..4a5d6ba94ed 100644 --- a/tests/components/ping/test_device_tracker.py +++ b/tests/components/ping/test_device_tracker.py @@ -1,12 +1,12 @@ """Test the binary sensor platform of ping.""" +from collections.abc import Generator from datetime import timedelta from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory from icmplib import Host import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er diff --git a/tests/components/plaato/test_config_flow.py b/tests/components/plaato/test_config_flow.py index efda354f20d..ceadab7f832 100644 --- a/tests/components/plaato/test_config_flow.py +++ b/tests/components/plaato/test_config_flow.py @@ -64,8 +64,8 @@ async def test_show_config_form_device_type_airlock(hass: HomeAssistant) -> None assert result["type"] is FlowResultType.FORM assert result["step_id"] == "api_method" - assert result["data_schema"].schema.get(CONF_TOKEN) == str - assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool + assert result["data_schema"].schema.get(CONF_TOKEN) is str + assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is bool async def test_show_config_form_device_type_keg(hass: HomeAssistant) -> None: @@ -78,7 +78,7 @@ async def test_show_config_form_device_type_keg(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "api_method" - assert result["data_schema"].schema.get(CONF_TOKEN) == str + assert result["data_schema"].schema.get(CONF_TOKEN) is str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None diff --git a/tests/components/plex/conftest.py b/tests/components/plex/conftest.py index a061d9c1105..53c032cb08b 100644 --- a/tests/components/plex/conftest.py +++ b/tests/components/plex/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Plex tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest import requests_mock -from typing_extensions import Generator from homeassistant.components.plex.const import DOMAIN, PLEX_SERVER_CONFIG, SERVERS from homeassistant.const import CONF_URL diff --git a/tests/components/plex/mock_classes.py b/tests/components/plex/mock_classes.py index c6f1aeda9b7..92844f755d6 100644 --- a/tests/components/plex/mock_classes.py +++ b/tests/components/plex/mock_classes.py @@ -67,7 +67,7 @@ GDM_CLIENT_PAYLOAD = [ class MockGDM: """Mock a GDM instance.""" - def __init__(self, disabled=False): + def __init__(self, disabled=False) -> None: """Initialize the object.""" self.entries = [] self.disabled = disabled diff --git a/tests/components/plex/test_config_flow.py b/tests/components/plex/test_config_flow.py index 08733a7dd17..202d62d70e0 100644 --- a/tests/components/plex/test_config_flow.py +++ b/tests/components/plex/test_config_flow.py @@ -537,7 +537,7 @@ async def test_manual_config(hass: HomeAssistant, mock_plex_calls) -> None: class WrongCertValidaitionException(requests.exceptions.SSLError): """Mock the exception showing an unmatched error.""" - def __init__(self): # pylint: disable=super-init-not-called + def __init__(self) -> None: # pylint: disable=super-init-not-called self.__context__ = ssl.SSLCertVerificationError( "some random message that doesn't match" ) diff --git a/tests/components/plex/test_init.py b/tests/components/plex/test_init.py index 15af78faf65..490091998ff 100644 --- a/tests/components/plex/test_init.py +++ b/tests/components/plex/test_init.py @@ -209,7 +209,7 @@ async def test_setup_when_certificate_changed( class WrongCertHostnameException(requests.exceptions.SSLError): """Mock the exception showing a mismatched hostname.""" - def __init__(self): # pylint: disable=super-init-not-called + def __init__(self) -> None: # pylint: disable=super-init-not-called self.__context__ = ssl.SSLCertVerificationError( f"hostname '{old_domain}' doesn't match" ) diff --git a/tests/components/plex/test_playback.py b/tests/components/plex/test_playback.py index 183a779c940..c4206bd5f3e 100644 --- a/tests/components/plex/test_playback.py +++ b/tests/components/plex/test_playback.py @@ -28,7 +28,7 @@ class MockPlexMedia: viewOffset = 333 _server = Mock(_baseurl=PLEX_DIRECT_URL) - def __init__(self, title, mediatype): + def __init__(self, title, mediatype) -> None: """Initialize the instance.""" self.listType = mediatype self.title = title diff --git a/tests/components/plex/test_update.py b/tests/components/plex/test_update.py index 942162665af..7ad2481a726 100644 --- a/tests/components/plex/test_update.py +++ b/tests/components/plex/test_update.py @@ -9,7 +9,8 @@ from homeassistant.components.update import ( SERVICE_INSTALL, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant, HomeAssistantError +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry, async_fire_time_changed diff --git a/tests/components/plugwise/conftest.py b/tests/components/plugwise/conftest.py index 83826a0a543..ec857a965e5 100644 --- a/tests/components/plugwise/conftest.py +++ b/tests/components/plugwise/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator import json from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from plugwise import PlugwiseData import pytest -from typing_extensions import Generator from homeassistant.components.plugwise.const import DOMAIN from homeassistant.const import ( diff --git a/tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json b/tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json index 47c8e4dceb0..9c17df5072d 100644 --- a/tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json +++ b/tests/components/plugwise/fixtures/adam_multiple_devices_per_zone/all_data.json @@ -479,6 +479,7 @@ "warning": "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device." } }, + "reboot": true, "smile_name": "Adam" } } diff --git a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json index d496edb4149..5088281404a 100644 --- a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json +++ b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json @@ -99,6 +99,7 @@ "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", "item_count": 66, "notifications": {}, + "reboot": true, "smile_name": "Smile Anna" } } diff --git a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json index 6cd3241a637..759d0094dbb 100644 --- a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json @@ -66,7 +66,7 @@ "model": "ThermoTouch", "name": "Anna", "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "None", + "select_schedule": "off", "sensors": { "setpoint": 23.5, "temperature": 25.8 @@ -165,6 +165,7 @@ "heater_id": "056ee145a816487eaa69243c3280f8bf", "item_count": 147, "notifications": {}, + "reboot": true, "smile_name": "Adam" } } diff --git a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json index 0e9df1a5079..e2c23df42d6 100644 --- a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json @@ -71,7 +71,7 @@ "model": "ThermoTouch", "name": "Anna", "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "None", + "select_schedule": "off", "sensors": { "setpoint": 20.0, "temperature": 19.1 @@ -164,6 +164,7 @@ "heater_id": "056ee145a816487eaa69243c3280f8bf", "item_count": 147, "notifications": {}, + "reboot": true, "smile_name": "Adam" } } diff --git a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json index 378a5e0a760..7888d777804 100644 --- a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json @@ -3,7 +3,6 @@ "1346fbd8498d4dbcab7e18d51b771f3d": { "active_preset": "no_frost", "available": true, - "available_schedules": ["None"], "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", @@ -13,7 +12,6 @@ "model": "Lisa", "name": "Slaapkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "None", "sensors": { "battery": 92, "setpoint": 13.0, @@ -99,7 +97,6 @@ "6f3e9d7084214c21b9dfa46f6eeb8700": { "active_preset": "home", "available": true, - "available_schedules": ["None"], "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", @@ -109,7 +106,6 @@ "model": "Lisa", "name": "Kinderkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "None", "sensors": { "battery": 79, "setpoint": 13.0, @@ -156,7 +152,6 @@ "a6abc6a129ee499c88a4d420cc413b47": { "active_preset": "home", "available": true, - "available_schedules": ["None"], "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", @@ -166,7 +161,6 @@ "model": "Lisa", "name": "Logeerkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "None", "sensors": { "battery": 80, "setpoint": 13.0, @@ -269,7 +263,6 @@ "f61f1a2535f54f52ad006a3d18e459ca": { "active_preset": "home", "available": true, - "available_schedules": ["None"], "control_state": "off", "dev_class": "zone_thermometer", "firmware": "2020-09-01T02:00:00+02:00", @@ -279,7 +272,6 @@ "model": "Jip", "name": "Woonkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "None", "sensors": { "battery": 100, "humidity": 56.2, @@ -306,8 +298,9 @@ "cooling_present": false, "gateway_id": "b5c2386c6f6342669e50fe49dd05b188", "heater_id": "e4684553153b44afbef2200885f379dc", - "item_count": 221, + "item_count": 213, "notifications": {}, + "reboot": true, "smile_name": "Adam" } } diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json index ef7af8a362b..cb30b919797 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json @@ -99,6 +99,7 @@ "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", "item_count": 66, "notifications": {}, + "reboot": true, "smile_name": "Smile Anna" } } diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json index 8f2e6a75f3f..660f6b5a76b 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json @@ -99,6 +99,7 @@ "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", "item_count": 66, "notifications": {}, + "reboot": true, "smile_name": "Smile Anna" } } diff --git a/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json b/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json index 318035a5d2c..7f152779252 100644 --- a/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json +++ b/tests/components/plugwise/fixtures/p1v4_442_single/all_data.json @@ -44,6 +44,7 @@ "gateway_id": "a455b61e52394b2db5081ce025a430f3", "item_count": 31, "notifications": {}, + "reboot": true, "smile_name": "Smile P1" } } diff --git a/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json b/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json index ecda8049163..582c883a3a7 100644 --- a/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json +++ b/tests/components/plugwise/fixtures/p1v4_442_triple/all_data.json @@ -57,6 +57,7 @@ "warning": "The Smile P1 is not connected to a smart meter." } }, + "reboot": true, "smile_name": "Smile P1" } } diff --git a/tests/components/plugwise/snapshots/test_diagnostics.ambr b/tests/components/plugwise/snapshots/test_diagnostics.ambr index 0fa3df4e660..44f4023d014 100644 --- a/tests/components/plugwise/snapshots/test_diagnostics.ambr +++ b/tests/components/plugwise/snapshots/test_diagnostics.ambr @@ -511,6 +511,7 @@ 'warning': "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device.", }), }), + 'reboot': True, 'smile_name': 'Adam', }), }) diff --git a/tests/components/plugwise/test_button.py b/tests/components/plugwise/test_button.py new file mode 100644 index 00000000000..23003b3ffe6 --- /dev/null +++ b/tests/components/plugwise/test_button.py @@ -0,0 +1,39 @@ +"""Tests for Plugwise button entities.""" + +from unittest.mock import MagicMock + +from homeassistant.components.button import ( + DOMAIN as BUTTON_DOMAIN, + SERVICE_PRESS, + ButtonDeviceClass, +) +from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_adam_reboot_button( + hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry +) -> None: + """Test creation of button entities.""" + state = hass.states.get("button.adam_reboot") + assert state + assert state.state == STATE_UNKNOWN + assert state.attributes.get(ATTR_DEVICE_CLASS) == ButtonDeviceClass.RESTART + + registry = er.async_get(hass) + entry = registry.async_get("button.adam_reboot") + assert entry + assert entry.unique_id == "fe799307f1624099878210aa0b9f1475-reboot" + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.adam_reboot"}, + blocking=True, + ) + + assert mock_smile_adam.reboot_gateway.call_count == 1 + mock_smile_adam.reboot_gateway.assert_called_with() diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index 5cdc468a957..70cef16bcdc 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -6,9 +6,15 @@ from unittest.mock import MagicMock, patch from plugwise.exceptions import PlugwiseError import pytest -from homeassistant.components.climate import HVACMode +from homeassistant.components.climate import ( + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed @@ -153,8 +159,8 @@ async def test_adam_climate_adjust_negative_testing( with pytest.raises(HomeAssistantError): await hass.services.async_call( - "climate", - "set_temperature", + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, {"entity_id": "climate.zone_lisa_wk", "temperature": 25}, blocking=True, ) @@ -165,8 +171,8 @@ async def test_adam_climate_entity_climate_changes( ) -> None: """Test handling of user requests in adam climate device environment.""" await hass.services.async_call( - "climate", - "set_temperature", + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, {"entity_id": "climate.zone_lisa_wk", "temperature": 25}, blocking=True, ) @@ -176,8 +182,8 @@ async def test_adam_climate_entity_climate_changes( ) await hass.services.async_call( - "climate", - "set_temperature", + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, { "entity_id": "climate.zone_lisa_wk", "hvac_mode": "heat", @@ -190,17 +196,17 @@ async def test_adam_climate_entity_climate_changes( "c50f167537524366a5af7aa3942feb1e", {"setpoint": 25.0} ) - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError): await hass.services.async_call( - "climate", - "set_temperature", + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, {"entity_id": "climate.zone_lisa_wk", "temperature": 150}, blocking=True, ) await hass.services.async_call( - "climate", - "set_preset_mode", + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, {"entity_id": "climate.zone_lisa_wk", "preset_mode": "away"}, blocking=True, ) @@ -210,8 +216,8 @@ async def test_adam_climate_entity_climate_changes( ) await hass.services.async_call( - "climate", - "set_hvac_mode", + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, {"entity_id": "climate.zone_lisa_wk", "hvac_mode": "heat"}, blocking=True, ) @@ -222,8 +228,8 @@ async def test_adam_climate_entity_climate_changes( with pytest.raises(HomeAssistantError): await hass.services.async_call( - "climate", - "set_hvac_mode", + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, { "entity_id": "climate.zone_thermostat_jessie", "hvac_mode": "dry", @@ -242,8 +248,8 @@ async def test_adam_climate_off_mode_change( assert state assert state.state == HVACMode.OFF await hass.services.async_call( - "climate", - "set_hvac_mode", + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, { "entity_id": "climate.slaapkamer", "hvac_mode": "heat", @@ -258,8 +264,8 @@ async def test_adam_climate_off_mode_change( assert state assert state.state == HVACMode.HEAT await hass.services.async_call( - "climate", - "set_hvac_mode", + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, { "entity_id": "climate.kinderkamer", "hvac_mode": "off", @@ -274,8 +280,8 @@ async def test_adam_climate_off_mode_change( assert state assert state.state == HVACMode.HEAT await hass.services.async_call( - "climate", - "set_hvac_mode", + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, { "entity_id": "climate.logeerkamer", "hvac_mode": "heat", @@ -353,8 +359,8 @@ async def test_anna_climate_entity_climate_changes( ) -> None: """Test handling of user requests in anna climate device environment.""" await hass.services.async_call( - "climate", - "set_temperature", + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, {"entity_id": "climate.anna", "target_temp_high": 30, "target_temp_low": 20}, blocking=True, ) @@ -365,8 +371,8 @@ async def test_anna_climate_entity_climate_changes( ) await hass.services.async_call( - "climate", - "set_preset_mode", + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, {"entity_id": "climate.anna", "preset_mode": "away"}, blocking=True, ) @@ -376,8 +382,8 @@ async def test_anna_climate_entity_climate_changes( ) await hass.services.async_call( - "climate", - "set_hvac_mode", + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, {"entity_id": "climate.anna", "hvac_mode": "auto"}, blocking=True, ) @@ -385,8 +391,8 @@ async def test_anna_climate_entity_climate_changes( assert mock_smile_anna.set_schedule_state.call_count == 0 await hass.services.async_call( - "climate", - "set_hvac_mode", + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, {"entity_id": "climate.anna", "hvac_mode": "heat_cool"}, blocking=True, ) @@ -395,7 +401,7 @@ async def test_anna_climate_entity_climate_changes( "c784ee9fdab44e1395b8dee7d7a497d5", "off" ) data = mock_smile_anna.async_update.return_value - data.devices["3cb70739631c4d17a86b8b12e8a5161b"]["available_schedules"] = ["None"] + data.devices["3cb70739631c4d17a86b8b12e8a5161b"].pop("available_schedules") with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() diff --git a/tests/components/plugwise/test_init.py b/tests/components/plugwise/test_init.py index 9c709f1c4f6..26aedf864dc 100644 --- a/tests/components/plugwise/test_init.py +++ b/tests/components/plugwise/test_init.py @@ -7,6 +7,7 @@ from plugwise.exceptions import ( ConnectionFailedError, InvalidAuthentication, InvalidXMLError, + PlugwiseError, ResponseError, UnsupportedDeviceError, ) @@ -38,7 +39,7 @@ TOM = { "hardware": "1", "location": "f871b8c4d63549319221e294e4f88074", "model": "Tom/Floor", - "name": "Tom Badkamer", + "name": "Tom Zolder", "sensors": { "battery": 99, "temperature": 18.6, @@ -83,6 +84,7 @@ async def test_load_unload_config_entry( (ConnectionFailedError, ConfigEntryState.SETUP_RETRY), (InvalidAuthentication, ConfigEntryState.SETUP_ERROR), (InvalidXMLError, ConfigEntryState.SETUP_RETRY), + (PlugwiseError, ConfigEntryState.SETUP_RETRY), (ResponseError, ConfigEntryState.SETUP_RETRY), (UnsupportedDeviceError, ConfigEntryState.SETUP_ERROR), ], @@ -219,7 +221,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 28 + == 29 ) assert ( len( @@ -242,7 +244,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 33 + == 34 ) assert ( len( @@ -256,3 +258,30 @@ async def test_update_device( for device_entry in list(device_registry.devices.values()): item_list.extend(x[1] for x in device_entry.identifiers) assert "01234567890abcdefghijklmnopqrstu" in item_list + + # Remove the existing Tom/Floor + data.devices.pop("1772a4ea304041adb83f357b751341ff") + with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): + async_fire_time_changed(hass, utcnow + timedelta(minutes=1)) + await hass.async_block_till_done() + + assert ( + len( + er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + ) + == 29 + ) + assert ( + len( + dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + ) + == 6 + ) + item_list: list[str] = [] + for device_entry in list(device_registry.devices.values()): + item_list.extend(x[1] for x in device_entry.identifiers) + assert "1772a4ea304041adb83f357b751341ff" not in item_list diff --git a/tests/components/plugwise/test_number.py b/tests/components/plugwise/test_number.py index 6fa65b3e65a..e10a7caa9e9 100644 --- a/tests/components/plugwise/test_number.py +++ b/tests/components/plugwise/test_number.py @@ -36,9 +36,9 @@ async def test_anna_max_boiler_temp_change( blocking=True, ) - assert mock_smile_anna.set_number_setpoint.call_count == 1 - mock_smile_anna.set_number_setpoint.assert_called_with( - "maximum_boiler_temperature", "1cbf783bb11e4a7c8a6843dee3a86927", 65.0 + assert mock_smile_anna.set_number.call_count == 1 + mock_smile_anna.set_number.assert_called_with( + "1cbf783bb11e4a7c8a6843dee3a86927", "maximum_boiler_temperature", 65.0 ) @@ -65,9 +65,9 @@ async def test_adam_dhw_setpoint_change( blocking=True, ) - assert mock_smile_adam_2.set_number_setpoint.call_count == 1 - mock_smile_adam_2.set_number_setpoint.assert_called_with( - "max_dhw_temperature", "056ee145a816487eaa69243c3280f8bf", 55.0 + assert mock_smile_adam_2.set_number.call_count == 1 + mock_smile_adam_2.set_number.assert_called_with( + "056ee145a816487eaa69243c3280f8bf", "max_dhw_temperature", 55.0 ) @@ -97,7 +97,7 @@ async def test_adam_temperature_offset_change( blocking=True, ) - assert mock_smile_adam.set_temperature_offset.call_count == 1 - mock_smile_adam.set_temperature_offset.assert_called_with( - "temperature_offset", "6a3bf693d05e48e0b460c815a4fdd09d", 1.0 + assert mock_smile_adam.set_number.call_count == 1 + mock_smile_adam.set_number.assert_called_with( + "6a3bf693d05e48e0b460c815a4fdd09d", "temperature_offset", 1.0 ) diff --git a/tests/components/plugwise/test_select.py b/tests/components/plugwise/test_select.py index 86b21af9e8b..b9dec283bc4 100644 --- a/tests/components/plugwise/test_select.py +++ b/tests/components/plugwise/test_select.py @@ -38,11 +38,12 @@ async def test_adam_change_select_entity( blocking=True, ) - assert mock_smile_adam.set_schedule_state.call_count == 1 - mock_smile_adam.set_schedule_state.assert_called_with( + assert mock_smile_adam.set_select.call_count == 1 + mock_smile_adam.set_select.assert_called_with( + "select_schedule", "c50f167537524366a5af7aa3942feb1e", - "on", "Badkamer Schema", + "on", ) @@ -69,5 +70,10 @@ async def test_adam_select_regulation_mode( }, blocking=True, ) - assert mock_smile_adam_3.set_regulation_mode.call_count == 1 - mock_smile_adam_3.set_regulation_mode.assert_called_with("heating") + assert mock_smile_adam_3.set_select.call_count == 1 + mock_smile_adam_3.set_select.assert_called_with( + "select_regulation_mode", + "bc93488efab249e5bc54fd7e175a6f91", + "heating", + "on", + ) diff --git a/tests/components/plugwise/test_switch.py b/tests/components/plugwise/test_switch.py index 6b2393476ae..5da76bb0ebd 100644 --- a/tests/components/plugwise/test_switch.py +++ b/tests/components/plugwise/test_switch.py @@ -7,6 +7,12 @@ import pytest from homeassistant.components.plugwise.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_ON, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -20,11 +26,11 @@ async def test_adam_climate_switch_entities( """Test creation of climate related switch entities.""" state = hass.states.get("switch.cv_pomp_relay") assert state - assert state.state == "on" + assert state.state == STATE_ON state = hass.states.get("switch.fibaro_hc2_relay") assert state - assert state.state == "on" + assert state.state == STATE_ON async def test_adam_climate_switch_negative_testing( @@ -35,8 +41,8 @@ async def test_adam_climate_switch_negative_testing( with pytest.raises(HomeAssistantError): await hass.services.async_call( - "switch", - "turn_off", + SWITCH_DOMAIN, + SERVICE_TURN_OFF, {"entity_id": "switch.cv_pomp_relay"}, blocking=True, ) @@ -48,8 +54,8 @@ async def test_adam_climate_switch_negative_testing( with pytest.raises(HomeAssistantError): await hass.services.async_call( - "switch", - "turn_on", + SWITCH_DOMAIN, + SERVICE_TURN_ON, {"entity_id": "switch.fibaro_hc2_relay"}, blocking=True, ) @@ -65,8 +71,8 @@ async def test_adam_climate_switch_changes( ) -> None: """Test changing of climate related switch entities.""" await hass.services.async_call( - "switch", - "turn_off", + SWITCH_DOMAIN, + SERVICE_TURN_OFF, {"entity_id": "switch.cv_pomp_relay"}, blocking=True, ) @@ -77,8 +83,8 @@ async def test_adam_climate_switch_changes( ) await hass.services.async_call( - "switch", - "toggle", + SWITCH_DOMAIN, + SERVICE_TOGGLE, {"entity_id": "switch.fibaro_hc2_relay"}, blocking=True, ) @@ -89,8 +95,8 @@ async def test_adam_climate_switch_changes( ) await hass.services.async_call( - "switch", - "turn_on", + SWITCH_DOMAIN, + SERVICE_TURN_ON, {"entity_id": "switch.fibaro_hc2_relay"}, blocking=True, ) @@ -107,11 +113,11 @@ async def test_stretch_switch_entities( """Test creation of climate related switch entities.""" state = hass.states.get("switch.koelkast_92c4a_relay") assert state - assert state.state == "on" + assert state.state == STATE_ON state = hass.states.get("switch.droger_52559_relay") assert state - assert state.state == "on" + assert state.state == STATE_ON async def test_stretch_switch_changes( @@ -119,8 +125,8 @@ async def test_stretch_switch_changes( ) -> None: """Test changing of power related switch entities.""" await hass.services.async_call( - "switch", - "turn_off", + SWITCH_DOMAIN, + SERVICE_TURN_OFF, {"entity_id": "switch.koelkast_92c4a_relay"}, blocking=True, ) @@ -130,8 +136,8 @@ async def test_stretch_switch_changes( ) await hass.services.async_call( - "switch", - "toggle", + SWITCH_DOMAIN, + SERVICE_TOGGLE, {"entity_id": "switch.droger_52559_relay"}, blocking=True, ) @@ -141,8 +147,8 @@ async def test_stretch_switch_changes( ) await hass.services.async_call( - "switch", - "turn_on", + SWITCH_DOMAIN, + SERVICE_TURN_ON, {"entity_id": "switch.droger_52559_relay"}, blocking=True, ) diff --git a/tests/components/poolsense/conftest.py b/tests/components/poolsense/conftest.py index ac16ef23ff3..6a842df7cfd 100644 --- a/tests/components/poolsense/conftest.py +++ b/tests/components/poolsense/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Poolsense tests.""" +from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.poolsense.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/powerwall/test_switch.py b/tests/components/powerwall/test_switch.py index b01f60210a6..b4ff0ca724e 100644 --- a/tests/components/powerwall/test_switch.py +++ b/tests/components/powerwall/test_switch.py @@ -1,6 +1,6 @@ """Test for Powerwall off-grid switch.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest from tesla_powerwall import GridStatus, PowerwallError @@ -24,7 +24,7 @@ ENTITY_ID = "switch.mysite_off_grid_operation" @pytest.fixture(name="mock_powerwall") -async def mock_powerwall_fixture(hass): +async def mock_powerwall_fixture(hass: HomeAssistant) -> MagicMock: """Set up base powerwall fixture.""" mock_powerwall = await _mock_powerwall_with_fixtures(hass) diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 2eca84b43fe..3f0e0b92056 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -176,12 +176,12 @@ async def test_dump_log_object( await hass.async_block_till_done() class DumpLogDummy: - def __init__(self, fail): + def __init__(self, fail) -> None: self.fail = fail def __repr__(self): if self.fail: - raise Exception("failed") # pylint: disable=broad-exception-raised + raise Exception("failed") # noqa: TRY002 return "" obj1 = DumpLogDummy(False) @@ -284,14 +284,14 @@ async def test_lru_stats(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) return 1 class DomainData: - def __init__(self): + def __init__(self) -> None: self._data = LRU(1) domain_data = DomainData() assert hass.services.has_service(DOMAIN, SERVICE_LRU_STATS) class LRUCache: - def __init__(self): + def __init__(self) -> None: self._data = {"sqlalchemy_test": 1} sqlalchemy_lru_cache = LRUCache() diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index 499d1a5df14..12643c39dfa 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -16,6 +16,7 @@ from homeassistant.components import ( counter, cover, device_tracker, + fan, humidifier, input_boolean, input_number, @@ -30,11 +31,23 @@ from homeassistant.components import ( ) from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, + ATTR_FAN_MODE, + ATTR_FAN_MODES, ATTR_HUMIDITY, ATTR_HVAC_ACTION, + ATTR_HVAC_MODES, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, ) +from homeassistant.components.fan import ( + ATTR_DIRECTION, + ATTR_OSCILLATING, + ATTR_PERCENTAGE, + ATTR_PRESET_MODE, + ATTR_PRESET_MODES, + DIRECTION_FORWARD, + DIRECTION_REVERSE, +) from homeassistant.components.humidifier import ATTR_AVAILABLE_MODES from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( @@ -395,6 +408,18 @@ async def test_climate( 'entity="climate.fritzdect",' 'friendly_name="Fritz!DECT"} 0.0' in body ) + assert ( + 'climate_preset_mode{domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="away"} 1.0' in body + ) + assert ( + 'climate_fan_mode{domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="auto"} 1.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -562,6 +587,51 @@ async def test_lock( ) +@pytest.mark.parametrize("namespace", [""]) +async def test_fan( + client: ClientSessionGenerator, fan_entities: dict[str, er.RegistryEntry] +) -> None: + """Test prometheus metrics for fan.""" + body = await generate_latest_metrics(client) + + assert ( + 'fan_state{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1"} 1.0' in body + ) + + assert ( + 'fan_speed_percent{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1"} 33.0' in body + ) + + assert ( + 'fan_is_oscillating{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1"} 1.0' in body + ) + + assert ( + 'fan_direction_reversed{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1"} 0.0' in body + ) + + assert ( + 'fan_preset_mode{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1",' + 'mode="LO"} 1.0' in body + ) + + assert ( + 'fan_direction_reversed{domain="fan",' + 'entity="fan.fan_2",' + 'friendly_name="Reverse Fan"} 1.0' in body + ) + + @pytest.mark.parametrize("namespace", [""]) async def test_cover( client: ClientSessionGenerator, cover_entities: dict[str, er.RegistryEntry] @@ -1359,6 +1429,11 @@ async def climate_fixture( ATTR_TARGET_TEMP_LOW: 21, ATTR_TARGET_TEMP_HIGH: 24, ATTR_HVAC_ACTION: climate.HVACAction.COOLING, + ATTR_HVAC_MODES: ["off", "heat", "cool", "heat_cool"], + ATTR_PRESET_MODE: "away", + ATTR_PRESET_MODES: ["away", "home", "sleep"], + ATTR_FAN_MODE: "auto", + ATTR_FAN_MODES: ["auto", "on"], } set_state_with_entry( hass, climate_2, climate.HVACAction.HEATING, climate_2_attributes @@ -1788,6 +1863,46 @@ async def switch_fixture( return data +@pytest.fixture(name="fan_entities") +async def fan_fixture( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> dict[str, er.RegistryEntry]: + """Simulate fan entities.""" + data = {} + fan_1 = entity_registry.async_get_or_create( + domain=fan.DOMAIN, + platform="test", + unique_id="fan_1", + suggested_object_id="fan_1", + original_name="Fan 1", + ) + fan_1_attributes = { + ATTR_DIRECTION: DIRECTION_FORWARD, + ATTR_OSCILLATING: True, + ATTR_PERCENTAGE: 33, + ATTR_PRESET_MODE: "LO", + ATTR_PRESET_MODES: ["LO", "OFF", "HI"], + } + set_state_with_entry(hass, fan_1, STATE_ON, fan_1_attributes) + data["fan_1"] = fan_1 + data["fan_1_attributes"] = fan_1_attributes + + fan_2 = entity_registry.async_get_or_create( + domain=fan.DOMAIN, + platform="test", + unique_id="fan_2", + suggested_object_id="fan_2", + original_name="Reverse Fan", + ) + fan_2_attributes = {ATTR_DIRECTION: DIRECTION_REVERSE} + set_state_with_entry(hass, fan_2, STATE_ON, fan_2_attributes) + data["fan_2"] = fan_2 + data["fan_2_attributes"] = fan_2_attributes + + await hass.async_block_till_done() + return data + + @pytest.fixture(name="person_entities") async def person_fixture( hass: HomeAssistant, entity_registry: er.EntityRegistry diff --git a/tests/components/prosegur/test_alarm_control_panel.py b/tests/components/prosegur/test_alarm_control_panel.py index b65b86b3049..f66d070f218 100644 --- a/tests/components/prosegur/test_alarm_control_panel.py +++ b/tests/components/prosegur/test_alarm_control_panel.py @@ -1,10 +1,10 @@ """Tests for the Prosegur alarm control panel device.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from pyprosegur.installation import Status import pytest -from typing_extensions import Generator from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.const import ( diff --git a/tests/components/proximity/test_diagnostics.py b/tests/components/proximity/test_diagnostics.py index a60c592fcab..e4f22236808 100644 --- a/tests/components/proximity/test_diagnostics.py +++ b/tests/components/proximity/test_diagnostics.py @@ -72,5 +72,12 @@ async def test_entry_diagnostics( assert await get_diagnostics_for_config_entry( hass, hass_client, mock_entry ) == snapshot( - exclude=props("entry_id", "last_changed", "last_reported", "last_updated") + exclude=props( + "entry_id", + "last_changed", + "last_reported", + "last_updated", + "created_at", + "modified_at", + ) ) diff --git a/tests/components/proximity/test_init.py b/tests/components/proximity/test_init.py index 6c2b54cae29..eeb181e0670 100644 --- a/tests/components/proximity/test_init.py +++ b/tests/components/proximity/test_init.py @@ -2,15 +2,12 @@ import pytest -from homeassistant.components import automation, script -from homeassistant.components.automation import automations_with_entity from homeassistant.components.proximity.const import ( CONF_IGNORED_ZONES, CONF_TOLERANCE, CONF_TRACKED_ENTITIES, DOMAIN, ) -from homeassistant.components.script import scripts_with_entity from homeassistant.const import ( ATTR_FRIENDLY_NAME, CONF_ZONE, @@ -20,109 +17,81 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.helpers.issue_registry as ir -from homeassistant.setup import async_setup_component from homeassistant.util import slugify from tests.common import MockConfigEntry +async def async_setup_single_entry( + hass: HomeAssistant, + zone: str, + tracked_entites: list[str], + ignored_zones: list[str], + tolerance: int, +) -> MockConfigEntry: + """Set up the proximity component with a single entry.""" + mock_config = MockConfigEntry( + domain=DOMAIN, + title="Home", + data={ + CONF_ZONE: zone, + CONF_TRACKED_ENTITIES: tracked_entites, + CONF_IGNORED_ZONES: ignored_zones, + CONF_TOLERANCE: tolerance, + }, + ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) + await hass.async_block_till_done() + return mock_config + + @pytest.mark.parametrize( - ("friendly_name", "config"), + "config", [ - ( - "home", - { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - }, - ), - ( - "work", - { - "devices": ["device_tracker.test1"], - "tolerance": "1", - "zone": "work", - }, - ), + { + CONF_IGNORED_ZONES: ["zone.work"], + CONF_TRACKED_ENTITIES: ["device_tracker.test1", "device_tracker.test2"], + CONF_TOLERANCE: 1, + CONF_ZONE: "zone.home", + }, + { + CONF_IGNORED_ZONES: [], + CONF_TRACKED_ENTITIES: ["device_tracker.test1"], + CONF_TOLERANCE: 1, + CONF_ZONE: "zone.work", + }, ], ) -async def test_proximities( - hass: HomeAssistant, friendly_name: str, config: dict -) -> None: +async def test_proximities(hass: HomeAssistant, config: dict) -> None: """Test a list of proximities.""" - assert await async_setup_component( - hass, DOMAIN, {"proximity": {friendly_name: config}} + title = hass.states.get(config[CONF_ZONE]).name + mock_config = MockConfigEntry( + domain=DOMAIN, + title=title, + data=config, ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) await hass.async_block_till_done() - # proximity entity - state = hass.states.get(f"proximity.{friendly_name}") - assert state.state == "not set" - assert state.attributes.get("nearest") == "not set" - assert state.attributes.get("dir_of_travel") == "not set" - hass.states.async_set(f"proximity.{friendly_name}", "0") - await hass.async_block_till_done() - state = hass.states.get(f"proximity.{friendly_name}") - assert state.state == "0" + zone_name = slugify(title) # sensor entities - state = hass.states.get(f"sensor.{friendly_name}_nearest_device") + state = hass.states.get(f"sensor.{zone_name}_nearest_device") assert state.state == STATE_UNKNOWN - for device in config["devices"]: - entity_base_name = f"sensor.{friendly_name}_{slugify(device.split('.')[-1])}" + for device in config[CONF_TRACKED_ENTITIES]: + entity_base_name = f"sensor.{zone_name}_{slugify(device.split('.')[-1])}" state = hass.states.get(f"{entity_base_name}_distance") assert state.state == STATE_UNAVAILABLE state = hass.states.get(f"{entity_base_name}_direction_of_travel") assert state.state == STATE_UNAVAILABLE -async def test_legacy_setup(hass: HomeAssistant) -> None: - """Test legacy setup only on imported entries.""" - config = { - "proximity": { - "home": { - "devices": ["device_tracker.test1"], - "tolerance": "1", - }, - } - } - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - assert hass.states.get("proximity.home") - - mock_config = MockConfigEntry( - domain=DOMAIN, - title="work", - data={ - CONF_ZONE: "zone.work", - CONF_TRACKED_ENTITIES: ["device_tracker.test2"], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_work", - ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - - assert not hass.states.get("proximity.work") - - async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: """Test for tracker in zone.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -131,12 +100,6 @@ async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.state == "0" - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "arrived" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -150,17 +113,7 @@ async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: async def test_device_tracker_test1_away(hass: HomeAssistant) -> None: """Test for tracker state away.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -170,11 +123,6 @@ async def test_device_tracker_test1_away(hass: HomeAssistant) -> None: await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -190,20 +138,7 @@ async def test_device_tracker_test1_awayfurther( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state away further.""" - - await hass.async_block_till_done() - - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -212,11 +147,6 @@ async def test_device_tracker_test1_awayfurther( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -234,11 +164,6 @@ async def test_device_tracker_test1_awayfurther( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "away_from" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -254,19 +179,7 @@ async def test_device_tracker_test1_awaycloser( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state away closer.""" - await hass.async_block_till_done() - - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -275,11 +188,6 @@ async def test_device_tracker_test1_awaycloser( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -297,11 +205,6 @@ async def test_device_tracker_test1_awaycloser( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "towards" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -315,27 +218,11 @@ async def test_device_tracker_test1_awaycloser( async def test_all_device_trackers_in_ignored_zone(hass: HomeAssistant) -> None: """Test for tracker in ignored zone.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set("device_tracker.test1", "work", {"friendly_name": "test1"}) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.state == "not set" - assert state.attributes.get("nearest") == "not set" - assert state.attributes.get("dir_of_travel") == "not set" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == STATE_UNKNOWN @@ -349,28 +236,13 @@ async def test_all_device_trackers_in_ignored_zone(hass: HomeAssistant) -> None: async def test_device_tracker_test1_no_coordinates(hass: HomeAssistant) -> None: """Test for tracker with no coordinates.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "not set" - assert state.attributes.get("dir_of_travel") == "not set" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == STATE_UNKNOWN @@ -384,19 +256,8 @@ async def test_device_tracker_test1_no_coordinates(hass: HomeAssistant) -> None: async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> None: """Test for tracker states.""" - assert await async_setup_component( - hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": 1000, - "zone": "home", - } - } - }, + await async_setup_single_entry( + hass, "zone.home", ["device_tracker.test1"], ["zone.work"], 1000 ) hass.states.async_set( @@ -406,11 +267,6 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -428,11 +284,6 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "stationary" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -446,17 +297,13 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No async def test_device_trackers_in_zone(hass: HomeAssistant) -> None: """Test for trackers in zone.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry( + hass, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, + ) hass.states.async_set( "device_tracker.test1", @@ -471,14 +318,6 @@ async def test_device_trackers_in_zone(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.state == "0" - assert (state.attributes.get("nearest") == "test1, test2") or ( - state.attributes.get("nearest") == "test2, test1" - ) - assert state.attributes.get("dir_of_travel") == "arrived" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1, test2" @@ -495,30 +334,18 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( hass: HomeAssistant, config_zones ) -> None: """Test for tracker ordering.""" - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await hass.async_block_till_done() - - assert await async_setup_component( + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -528,11 +355,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -556,11 +378,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -582,28 +399,19 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( hass: HomeAssistant, config_zones ) -> None: """Test for tracker ordering.""" - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await hass.async_block_till_done() - assert await async_setup_component( + + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -613,11 +421,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test2" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -641,11 +444,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -670,23 +468,15 @@ async def test_device_tracker_test1_awayfurther_test2_in_ignored_zone( hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set("device_tracker.test2", "work", {"friendly_name": "test2"}) - await hass.async_block_till_done() - assert await async_setup_component( - hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, - ) + await async_setup_single_entry( + hass, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, + ) hass.states.async_set( "device_tracker.test1", "not_home", @@ -694,11 +484,6 @@ async def test_device_tracker_test1_awayfurther_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -720,29 +505,19 @@ async def test_device_tracker_test1_awayfurther_test2_first( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state.""" - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await hass.async_block_till_done() - assert await async_setup_component( + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -776,11 +551,6 @@ async def test_device_tracker_test1_awayfurther_test2_first( hass.states.async_set("device_tracker.test1", "work", {"friendly_name": "test1"}) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test2" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -803,7 +573,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) -> None: """Test for tracker states.""" await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) @@ -813,18 +582,12 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - assert await async_setup_component( + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -834,11 +597,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -862,11 +620,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test2" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -890,11 +643,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -914,22 +662,10 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( async def test_nearest_sensors(hass: HomeAssistant, config_zones) -> None: """Test for nearest sensors.""" - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: ["device_tracker.test1", "device_tracker.test2"], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + await async_setup_single_entry( + hass, "zone.home", ["device_tracker.test1", "device_tracker.test2"], [], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", @@ -1038,71 +774,6 @@ async def test_nearest_sensors(hass: HomeAssistant, config_zones) -> None: assert state.state == STATE_UNKNOWN -async def test_create_deprecated_proximity_issue( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test we create an issue for deprecated proximity entities used in automations and scripts.""" - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "alias": "test", - "trigger": {"platform": "state", "entity_id": "proximity.home"}, - "action": { - "service": "automation.turn_on", - "target": {"entity_id": "automation.test"}, - }, - } - }, - ) - assert await async_setup_component( - hass, - script.DOMAIN, - { - script.DOMAIN: { - "test": { - "sequence": [ - { - "condition": "state", - "entity_id": "proximity.home", - "state": "home", - }, - ], - } - } - }, - ) - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - }, - "work": {"tolerance": "1", "zone": "work"}, - } - } - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - automation_entities = automations_with_entity(hass, "proximity.home") - assert len(automation_entities) == 1 - assert automation_entities[0] == "automation.test" - - script_entites = scripts_with_entity(hass, "proximity.home") - - assert len(script_entites) == 1 - assert script_entites[0] == "script.test" - assert issue_registry.async_get_issue(DOMAIN, "deprecated_proximity_entity_home") - - assert not issue_registry.async_get_issue( - DOMAIN, "deprecated_proximity_entity_work" - ) - - async def test_create_removed_tracked_entity_issue( hass: HomeAssistant, issue_registry: ir.IssueRegistry, @@ -1119,22 +790,10 @@ async def test_create_removed_tracked_entity_issue( hass.states.async_set(t1.entity_id, "not_home") hass.states.async_set(t2.entity_id, "not_home") - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: [t1.entity_id, t2.entity_id], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + await async_setup_single_entry( + hass, "zone.home", [t1.entity_id, t2.entity_id], [], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - sensor_t1 = f"sensor.home_{t1.entity_id.split('.')[-1]}_distance" sensor_t2 = f"sensor.home_{t2.entity_id.split('.')[-1]}_distance" @@ -1168,22 +827,10 @@ async def test_track_renamed_tracked_entity( hass.states.async_set(t1.entity_id, "not_home") - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: [t1.entity_id], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + mock_config = await async_setup_single_entry( + hass, "zone.home", [t1.entity_id], ["zone.work"], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - sensor_t1 = f"sensor.home_{t1.entity_id.split('.')[-1]}_distance" entity = entity_registry.async_get(sensor_t1) @@ -1216,31 +863,60 @@ async def test_sensor_unique_ids( hass.states.async_set("device_tracker.test2", "not_home") - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: [t1.entity_id, "device_tracker.test2"], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + mock_config = await async_setup_single_entry( + hass, "zone.home", [t1.entity_id, "device_tracker.test2"], ["zone.work"], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - sensor_t1 = "sensor.home_test_tracker_1_distance" entity = entity_registry.async_get(sensor_t1) assert entity assert entity.unique_id == f"{mock_config.entry_id}_{t1.id}_dist_to_zone" state = hass.states.get(sensor_t1) - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "home Test tracker 1 Distance" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Home Test tracker 1 Distance" entity = entity_registry.async_get("sensor.home_test2_distance") assert entity assert ( entity.unique_id == f"{mock_config.entry_id}_device_tracker.test2_dist_to_zone" ) + + +async def test_tracked_zone_is_removed(hass: HomeAssistant) -> None: + """Test that tracked zone is removed.""" + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) + + hass.states.async_set( + "device_tracker.test1", + "home", + {"friendly_name": "test1", "latitude": 2.1, "longitude": 1.1}, + ) + await hass.async_block_till_done() + + # check sensor entities + state = hass.states.get("sensor.home_nearest_device") + assert state.state == "test1" + + entity_base_name = "sensor.home_test1" + state = hass.states.get(f"{entity_base_name}_distance") + assert state.state == "0" + state = hass.states.get(f"{entity_base_name}_direction_of_travel") + assert state.state == "arrived" + + # remove tracked zone and move tracked entity + assert hass.states.async_remove("zone.home") + hass.states.async_set( + "device_tracker.test1", + "home", + {"friendly_name": "test1", "latitude": 2.2, "longitude": 1.2}, + ) + await hass.async_block_till_done() + + # check sensor entities + state = hass.states.get("sensor.home_nearest_device") + assert state.state == STATE_UNKNOWN + + entity_base_name = "sensor.home_test1" + state = hass.states.get(f"{entity_base_name}_distance") + assert state.state == STATE_UNAVAILABLE + state = hass.states.get(f"{entity_base_name}_direction_of_travel") + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/prusalink/conftest.py b/tests/components/prusalink/conftest.py index 104e4d47afa..9bcf45056cd 100644 --- a/tests/components/prusalink/conftest.py +++ b/tests/components/prusalink/conftest.py @@ -1,16 +1,19 @@ """Fixtures for PrusaLink.""" +from collections.abc import Generator +from typing import Any from unittest.mock import patch import pytest from homeassistant.components.prusalink import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a PrusaLink config entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -23,7 +26,7 @@ def mock_config_entry(hass): @pytest.fixture -def mock_version_api(hass): +def mock_version_api() -> Generator[dict[str, str]]: """Mock PrusaLink version API.""" resp = { "api": "2.0.0", @@ -36,7 +39,7 @@ def mock_version_api(hass): @pytest.fixture -def mock_info_api(hass): +def mock_info_api() -> Generator[dict[str, Any]]: """Mock PrusaLink info API.""" resp = { "nozzle_diameter": 0.40, @@ -50,7 +53,7 @@ def mock_info_api(hass): @pytest.fixture -def mock_get_legacy_printer(hass): +def mock_get_legacy_printer() -> Generator[dict[str, Any]]: """Mock PrusaLink printer API.""" resp = {"telemetry": {"material": "PLA"}} with patch("pyprusalink.PrusaLink.get_legacy_printer", return_value=resp): @@ -58,7 +61,7 @@ def mock_get_legacy_printer(hass): @pytest.fixture -def mock_get_status_idle(hass): +def mock_get_status_idle() -> Generator[dict[str, Any]]: """Mock PrusaLink printer API.""" resp = { "storage": { @@ -86,7 +89,7 @@ def mock_get_status_idle(hass): @pytest.fixture -def mock_get_status_printing(hass): +def mock_get_status_printing() -> Generator[dict[str, Any]]: """Mock PrusaLink printer API.""" resp = { "job": { @@ -114,7 +117,7 @@ def mock_get_status_printing(hass): @pytest.fixture -def mock_job_api_idle(hass): +def mock_job_api_idle() -> Generator[dict[str, Any]]: """Mock PrusaLink job API having no job.""" resp = {} with patch("pyprusalink.PrusaLink.get_job", return_value=resp): @@ -122,7 +125,7 @@ def mock_job_api_idle(hass): @pytest.fixture -def mock_job_api_idle_mk3(hass): +def mock_job_api_idle_mk3() -> Generator[dict[str, Any]]: """Mock PrusaLink job API having a job with idle state (MK3).""" resp = { "id": 129, @@ -148,7 +151,7 @@ def mock_job_api_idle_mk3(hass): @pytest.fixture -def mock_job_api_printing(hass): +def mock_job_api_printing() -> Generator[dict[str, Any]]: """Mock PrusaLink printing.""" resp = { "id": 129, @@ -174,7 +177,9 @@ def mock_job_api_printing(hass): @pytest.fixture -def mock_job_api_paused(hass, mock_get_status_printing, mock_job_api_printing): +def mock_job_api_paused( + mock_get_status_printing: dict[str, Any], mock_job_api_printing: dict[str, Any] +) -> None: """Mock PrusaLink paused printing.""" mock_job_api_printing["state"] = "PAUSED" mock_get_status_printing["printer"]["state"] = "PAUSED" @@ -182,10 +187,10 @@ def mock_job_api_paused(hass, mock_get_status_printing, mock_job_api_printing): @pytest.fixture def mock_api( - mock_version_api, - mock_info_api, - mock_get_legacy_printer, - mock_get_status_idle, - mock_job_api_idle, -): + mock_version_api: dict[str, str], + mock_info_api: dict[str, Any], + mock_get_legacy_printer: dict[str, Any], + mock_get_status_idle: dict[str, Any], + mock_job_api_idle: dict[str, Any], +) -> None: """Mock PrusaLink API.""" diff --git a/tests/components/prusalink/test_binary_sensor.py b/tests/components/prusalink/test_binary_sensor.py new file mode 100644 index 00000000000..c39b15471c6 --- /dev/null +++ b/tests/components/prusalink/test_binary_sensor.py @@ -0,0 +1,33 @@ +"""Test Prusalink sensors.""" + +from unittest.mock import PropertyMock, patch + +import pytest + +from homeassistant.const import STATE_OFF, Platform +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + + +@pytest.fixture(autouse=True) +def setup_binary_sensor_platform_only(): + """Only setup sensor platform.""" + with ( + patch("homeassistant.components.prusalink.PLATFORMS", [Platform.BINARY_SENSOR]), + patch( + "homeassistant.helpers.entity.Entity.entity_registry_enabled_default", + PropertyMock(return_value=True), + ), + ): + yield + + +async def test_binary_sensors_no_job( + hass: HomeAssistant, mock_config_entry, mock_api +) -> None: + """Test sensors while no job active.""" + assert await async_setup_component(hass, "prusalink", {}) + + state = hass.states.get("binary_sensor.mock_title_mmu") + assert state is not None + assert state.state == STATE_OFF diff --git a/tests/components/prusalink/test_init.py b/tests/components/prusalink/test_init.py index 2cdc6894eeb..bd0fb84cafd 100644 --- a/tests/components/prusalink/test_init.py +++ b/tests/components/prusalink/test_init.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import patch +from httpx import ConnectError from pyprusalink.types import InvalidAuth, PrusaLinkError import pytest @@ -36,7 +37,10 @@ async def test_unloading( assert state.state == "unavailable" -@pytest.mark.parametrize("exception", [InvalidAuth, PrusaLinkError]) +@pytest.mark.parametrize( + "exception", + [InvalidAuth, PrusaLinkError, ConnectError("All connection attempts failed")], +) async def test_failed_update( hass: HomeAssistant, mock_config_entry: ConfigEntry, exception ) -> None: diff --git a/tests/components/prusalink/test_sensor.py b/tests/components/prusalink/test_sensor.py index b15e9198da6..c0693626600 100644 --- a/tests/components/prusalink/test_sensor.py +++ b/tests/components/prusalink/test_sensor.py @@ -101,6 +101,10 @@ async def test_sensors_no_job(hass: HomeAssistant, mock_config_entry, mock_api) assert state is not None assert state.state == "PLA" + state = hass.states.get("sensor.mock_title_nozzle_diameter") + assert state is not None + assert state.state == "0.4" + state = hass.states.get("sensor.mock_title_print_flow") assert state is not None assert state.state == "100" @@ -205,6 +209,10 @@ async def test_sensors_idle_job_mk3( assert state is not None assert state.state == "PLA" + state = hass.states.get("sensor.mock_title_nozzle_diameter") + assert state is not None + assert state.state == "0.4" + state = hass.states.get("sensor.mock_title_print_flow") assert state is not None assert state.state == "100" diff --git a/tests/components/ps4/conftest.py b/tests/components/ps4/conftest.py index bc84ea3b4db..c95cc78f53a 100644 --- a/tests/components/ps4/conftest.py +++ b/tests/components/ps4/conftest.py @@ -1,10 +1,10 @@ """Test configuration for PS4.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from pyps4_2ndscreen.ddp import DEFAULT_UDP_PORT, DDPProtocol import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/pure_energie/conftest.py b/tests/components/pure_energie/conftest.py index 7174befbf5b..9aa3a4cc1b4 100644 --- a/tests/components/pure_energie/conftest.py +++ b/tests/components/pure_energie/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Pure Energie integration tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from gridnet import Device as GridNetDevice, SmartBridge import pytest -from typing_extensions import Generator from homeassistant.components.pure_energie.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/purpleair/conftest.py b/tests/components/purpleair/conftest.py index 1305c98308d..3d6776dd12e 100644 --- a/tests/components/purpleair/conftest.py +++ b/tests/components/purpleair/conftest.py @@ -1,5 +1,7 @@ """Define fixtures for PurpleAir tests.""" +from collections.abc import Generator +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiopurpleair.endpoints.sensors import NearbySensorResult @@ -7,6 +9,7 @@ from aiopurpleair.models.sensors import GetSensorsResponse import pytest from homeassistant.components.purpleair import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -16,7 +19,7 @@ TEST_SENSOR_INDEX2 = 567890 @pytest.fixture(name="api") -def api_fixture(get_sensors_response): +def api_fixture(get_sensors_response: GetSensorsResponse) -> Mock: """Define a fixture to return a mocked aiopurple API object.""" return Mock( async_check_api_key=AsyncMock(), @@ -34,7 +37,11 @@ def api_fixture(get_sensors_response): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config_entry_data, config_entry_options): +def config_entry_fixture( + hass: HomeAssistant, + config_entry_data: dict[str, Any], + config_entry_options: dict[str, Any], +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -48,7 +55,7 @@ def config_entry_fixture(hass, config_entry_data, config_entry_options): @pytest.fixture(name="config_entry_data") -def config_entry_data_fixture(): +def config_entry_data_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { "api_key": TEST_API_KEY, @@ -56,7 +63,7 @@ def config_entry_data_fixture(): @pytest.fixture(name="config_entry_options") -def config_entry_options_fixture(): +def config_entry_options_fixture() -> dict[str, Any]: """Define a config entry options fixture.""" return { "sensor_indices": [TEST_SENSOR_INDEX1], @@ -64,7 +71,7 @@ def config_entry_options_fixture(): @pytest.fixture(name="get_sensors_response", scope="package") -def get_sensors_response_fixture(): +def get_sensors_response_fixture() -> GetSensorsResponse: """Define a fixture to mock an aiopurpleair GetSensorsResponse object.""" return GetSensorsResponse.parse_raw( load_fixture("get_sensors_response.json", "purpleair") @@ -72,7 +79,7 @@ def get_sensors_response_fixture(): @pytest.fixture(name="mock_aiopurpleair") -async def mock_aiopurpleair_fixture(api): +def mock_aiopurpleair_fixture(api: Mock) -> Generator[Mock]: """Define a fixture to patch aiopurpleair.""" with ( patch("homeassistant.components.purpleair.config_flow.API", return_value=api), @@ -82,7 +89,9 @@ async def mock_aiopurpleair_fixture(api): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_aiopurpleair): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_aiopurpleair: Mock +) -> None: """Define a fixture to set up purpleair.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/purpleair/test_diagnostics.py b/tests/components/purpleair/test_diagnostics.py index 13dcd1338e0..599549bb723 100644 --- a/tests/components/purpleair/test_diagnostics.py +++ b/tests/components/purpleair/test_diagnostics.py @@ -3,6 +3,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -34,6 +35,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": { "fields": [ diff --git a/tests/components/pushover/test_init.py b/tests/components/pushover/test_init.py index c3a653042ce..85266e34d13 100644 --- a/tests/components/pushover/test_init.py +++ b/tests/components/pushover/test_init.py @@ -5,6 +5,7 @@ from unittest.mock import MagicMock, patch from pushover_complete import BadAPIRequestError import pytest import requests_mock +from urllib3.exceptions import MaxRetryError from homeassistant.components.pushover.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -93,3 +94,18 @@ async def test_async_setup_entry_failed_json_error( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_async_setup_entry_failed_urrlib3_error( + hass: HomeAssistant, mock_pushover: MagicMock +) -> None: + """Test pushover failed setup due to conn error.""" + entry = MockConfigEntry( + domain=DOMAIN, + data=MOCK_CONFIG, + ) + entry.add_to_hass(hass) + mock_pushover.side_effect = MaxRetryError(MagicMock(), MagicMock()) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/pvoutput/conftest.py b/tests/components/pvoutput/conftest.py index d19f09d9e6c..a55bb21d2ae 100644 --- a/tests/components/pvoutput/conftest.py +++ b/tests/components/pvoutput/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pvo import Status, System import pytest -from typing_extensions import Generator from homeassistant.components.pvoutput.const import CONF_SYSTEM_ID, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/pyload/conftest.py b/tests/components/pyload/conftest.py index 67694bcb4b9..c0f181396ab 100644 --- a/tests/components/pyload/conftest.py +++ b/tests/components/pyload/conftest.py @@ -1,11 +1,12 @@ """Fixtures for pyLoad integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from pyloadapi.types import LoginResponse, StatusServerResponse import pytest +from homeassistant.components.pyload.const import DEFAULT_NAME, DOMAIN from homeassistant.const import ( CONF_HOST, CONF_MONITORED_VARIABLES, @@ -15,39 +16,74 @@ from homeassistant.const import ( CONF_PORT, CONF_SSL, CONF_USERNAME, + CONF_VERIFY_SSL, ) from homeassistant.helpers.typing import ConfigType +from tests.common import MockConfigEntry + +USER_INPUT = { + CONF_HOST: "pyload.local", + CONF_PASSWORD: "test-password", + CONF_PORT: 8000, + CONF_SSL: True, + CONF_USERNAME: "test-username", + CONF_VERIFY_SSL: False, +} + +YAML_INPUT = { + CONF_HOST: "pyload.local", + CONF_MONITORED_VARIABLES: ["speed"], + CONF_NAME: "test-name", + CONF_PASSWORD: "test-password", + CONF_PLATFORM: "pyload", + CONF_PORT: 8000, + CONF_SSL: True, + CONF_USERNAME: "test-username", +} +REAUTH_INPUT = { + CONF_PASSWORD: "new-password", + CONF_USERNAME: "new-username", +} + +NEW_INPUT = { + CONF_HOST: "pyload.local", + CONF_PASSWORD: "new-password", + CONF_PORT: 8000, + CONF_SSL: True, + CONF_USERNAME: "new-username", + CONF_VERIFY_SSL: False, +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.pyload.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + @pytest.fixture def pyload_config() -> ConfigType: """Mock pyload configuration entry.""" - return { - "sensor": { - CONF_PLATFORM: "pyload", - CONF_HOST: "localhost", - CONF_PORT: 8000, - CONF_USERNAME: "username", - CONF_PASSWORD: "password", - CONF_SSL: True, - CONF_MONITORED_VARIABLES: ["speed"], - CONF_NAME: "pyload", - } - } + return {"sensor": YAML_INPUT} @pytest.fixture -def mock_pyloadapi() -> Generator[AsyncMock, None, None]: +def mock_pyloadapi() -> Generator[MagicMock]: """Mock PyLoadAPI.""" with ( patch( - "homeassistant.components.pyload.sensor.PyLoadAPI", - autospec=True, + "homeassistant.components.pyload.PyLoadAPI", autospec=True ) as mock_client, + patch("homeassistant.components.pyload.config_flow.PyLoadAPI", new=mock_client), ): client = mock_client.return_value client.username = "username" - client.login.return_value = LoginResponse.from_dict( + client.api_url = "https://pyload.local:8000/" + client.login.return_value = LoginResponse( { "_permanent": True, "authenticated": True, @@ -59,7 +95,8 @@ def mock_pyloadapi() -> Generator[AsyncMock, None, None]: "_flashes": [["message", "Logged in successfully"]], } ) - client.get_status.return_value = StatusServerResponse.from_dict( + + client.get_status.return_value = StatusServerResponse( { "pause": False, "active": 1, @@ -71,5 +108,14 @@ def mock_pyloadapi() -> Generator[AsyncMock, None, None]: "captcha": False, } ) + client.version.return_value = "0.5.0" client.free_space.return_value = 99999999999 yield client + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Mock pyLoad configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, title=DEFAULT_NAME, data=USER_INPUT, entry_id="XXXXXXXXXXXXXX" + ) diff --git a/tests/components/pyload/snapshots/test_button.ambr b/tests/components/pyload/snapshots/test_button.ambr new file mode 100644 index 00000000000..bf1e1f59c98 --- /dev/null +++ b/tests/components/pyload/snapshots/test_button.ambr @@ -0,0 +1,185 @@ +# serializer version: 1 +# name: test_state[button.pyload_abort_all_running_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.pyload_abort_all_running_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Abort all running downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_abort_downloads', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[button.pyload_abort_all_running_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Abort all running downloads', + }), + 'context': , + 'entity_id': 'button.pyload_abort_all_running_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_state[button.pyload_delete_finished_files_packages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.pyload_delete_finished_files_packages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Delete finished files/packages', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_delete_finished', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[button.pyload_delete_finished_files_packages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Delete finished files/packages', + }), + 'context': , + 'entity_id': 'button.pyload_delete_finished_files_packages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_state[button.pyload_restart_all_failed_files-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.pyload_restart_all_failed_files', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Restart all failed files', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_restart_failed', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[button.pyload_restart_all_failed_files-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Restart all failed files', + }), + 'context': , + 'entity_id': 'button.pyload_restart_all_failed_files', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_state[button.pyload_restart_pyload_core-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.pyload_restart_pyload_core', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Restart pyload core', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_restart', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[button.pyload_restart_pyload_core-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Restart pyload core', + }), + 'context': , + 'entity_id': 'button.pyload_restart_pyload_core', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/pyload/snapshots/test_diagnostics.ambr b/tests/components/pyload/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e2b51ad184a --- /dev/null +++ b/tests/components/pyload/snapshots/test_diagnostics.ambr @@ -0,0 +1,24 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'config_entry_data': dict({ + 'host': '**REDACTED**', + 'password': '**REDACTED**', + 'port': 8000, + 'ssl': True, + 'username': '**REDACTED**', + 'verify_ssl': False, + }), + 'pyload_data': dict({ + 'active': 1, + 'captcha': False, + 'download': True, + 'free_space': 99999999999, + 'pause': False, + 'queue': 6, + 'reconnect': False, + 'speed': 5405963.0, + 'total': 37, + }), + }) +# --- diff --git a/tests/components/pyload/snapshots/test_sensor.ambr b/tests/components/pyload/snapshots/test_sensor.ambr index 226221240d2..69d0387fc8f 100644 --- a/tests/components/pyload/snapshots/test_sensor.ambr +++ b/tests/components/pyload/snapshots/test_sensor.ambr @@ -1,16 +1,1033 @@ # serializer version: 1 -# name: test_setup +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_active_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_active_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Active downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_active', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_active_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Active downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_active_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_downloads_in_queue-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_downloads_in_queue', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Downloads in queue', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_queue', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_downloads_in_queue-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Downloads in queue', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_downloads_in_queue', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free space', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_free_space', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'pyLoad Free space', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pyload_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_speed', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_speed-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'data_rate', - 'friendly_name': 'pyload Speed', - 'unit_of_measurement': , + 'friendly_name': 'pyLoad Speed', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.pyload_speed', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '5.405963', + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_active_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_active_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Active downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_active', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_active_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Active downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_active_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_downloads_in_queue-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_downloads_in_queue', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Downloads in queue', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_queue', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_downloads_in_queue-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Downloads in queue', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_downloads_in_queue', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free space', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_free_space', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'pyLoad Free space', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pyload_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_speed', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'pyLoad Speed', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pyload_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_active_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_active_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Active downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_active', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_active_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Active downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_active_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_downloads_in_queue-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_downloads_in_queue', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Downloads in queue', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_queue', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_downloads_in_queue-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Downloads in queue', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_downloads_in_queue', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free space', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_free_space', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'pyLoad Free space', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pyload_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_speed', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'pyLoad Speed', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pyload_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_setup[sensor.pyload_active_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_active_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Active downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_active', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_setup[sensor.pyload_active_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Active downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_active_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_setup[sensor.pyload_downloads_in_queue-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_downloads_in_queue', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Downloads in queue', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_queue', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_setup[sensor.pyload_downloads_in_queue-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Downloads in queue', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_downloads_in_queue', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_setup[sensor.pyload_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free space', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_free_space', + 'unit_of_measurement': , + }) +# --- +# name: test_setup[sensor.pyload_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'pyLoad Free space', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pyload_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '93.1322574606165', + }) +# --- +# name: test_setup[sensor.pyload_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_speed', + 'unit_of_measurement': , + }) +# --- +# name: test_setup[sensor.pyload_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'pyLoad Speed', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pyload_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '43.247704', + }) +# --- +# name: test_setup[sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_setup[sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37', }) # --- diff --git a/tests/components/pyload/snapshots/test_switch.ambr b/tests/components/pyload/snapshots/test_switch.ambr new file mode 100644 index 00000000000..0fcc45f8586 --- /dev/null +++ b/tests/components/pyload/snapshots/test_switch.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_state[switch.pyload_auto_reconnect-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.pyload_auto_reconnect', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Auto-Reconnect', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_reconnect', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[switch.pyload_auto_reconnect-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'pyLoad Auto-Reconnect', + }), + 'context': , + 'entity_id': 'switch.pyload_auto_reconnect', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_state[switch.pyload_pause_resume_queue-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.pyload_pause_resume_queue', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pause/Resume queue', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_download', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[switch.pyload_pause_resume_queue-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'pyLoad Pause/Resume queue', + }), + 'context': , + 'entity_id': 'switch.pyload_pause_resume_queue', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/pyload/test_button.py b/tests/components/pyload/test_button.py new file mode 100644 index 00000000000..9a2f480bede --- /dev/null +++ b/tests/components/pyload/test_button.py @@ -0,0 +1,122 @@ +"""The tests for the button component.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, call, patch + +from pyloadapi import CannotConnect, InvalidAuth +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.pyload.button import PyLoadButtonEntity +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + +API_CALL = { + PyLoadButtonEntity.ABORT_DOWNLOADS: call.stop_all_downloads, + PyLoadButtonEntity.RESTART_FAILED: call.restart_failed, + PyLoadButtonEntity.DELETE_FINISHED: call.delete_finished, + PyLoadButtonEntity.RESTART: call.restart, +} + + +@pytest.fixture(autouse=True) +def button_only() -> Generator[None]: + """Enable only the button platform.""" + with patch( + "homeassistant.components.pyload.PLATFORMS", + [Platform.BUTTON], + ): + yield + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_pyloadapi: AsyncMock, +) -> None: + """Test button state.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_button_press( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test button press method.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + + for entity_entry in entity_entries: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_entry.entity_id}, + blocking=True, + ) + assert API_CALL[entity_entry.translation_key] in mock_pyloadapi.method_calls + mock_pyloadapi.reset_mock() + + +@pytest.mark.parametrize( + ("side_effect"), + [CannotConnect, InvalidAuth], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_button_press_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + entity_registry: er.EntityRegistry, + side_effect: Exception, +) -> None: + """Test button press method.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + mock_pyloadapi.stop_all_downloads.side_effect = side_effect + mock_pyloadapi.restart_failed.side_effect = side_effect + mock_pyloadapi.delete_finished.side_effect = side_effect + mock_pyloadapi.restart.side_effect = side_effect + + for entity_entry in entity_entries: + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_entry.entity_id}, + blocking=True, + ) diff --git a/tests/components/pyload/test_config_flow.py b/tests/components/pyload/test_config_flow.py new file mode 100644 index 00000000000..8c775412371 --- /dev/null +++ b/tests/components/pyload/test_config_flow.py @@ -0,0 +1,343 @@ +"""Test the pyLoad config flow.""" + +from unittest.mock import AsyncMock + +from pyloadapi.exceptions import CannotConnect, InvalidAuth, ParserError +import pytest + +from homeassistant.components.pyload.const import DEFAULT_NAME, DOMAIN +from homeassistant.config_entries import ( + SOURCE_IMPORT, + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_USER, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import NEW_INPUT, REAUTH_INPUT, USER_INPUT, YAML_INPUT + +from tests.common import MockConfigEntry + + +async def test_form( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_pyloadapi: AsyncMock, +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == USER_INPUT + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "expected_error"), + [ + (InvalidAuth, "invalid_auth"), + (CannotConnect, "cannot_connect"), + (ParserError, "cannot_connect"), + (ValueError, "unknown"), + ], +) +async def test_form_errors( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_pyloadapi: AsyncMock, + exception: Exception, + expected_error: str, +) -> None: + """Test we handle invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + mock_pyloadapi.login.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + mock_pyloadapi.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == USER_INPUT + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_flow_user_already_configured( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_pyloadapi: AsyncMock +) -> None: + """Test we abort user data set when entry is already configured.""" + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=USER_INPUT, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_flow_import( + hass: HomeAssistant, + mock_pyloadapi: AsyncMock, +) -> None: + """Test that we can import a YAML config.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=YAML_INPUT, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-name" + assert result["data"] == USER_INPUT + + +async def test_flow_import_already_configured( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_pyloadapi: AsyncMock +) -> None: + """Test we abort import data set when entry is already configured.""" + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=YAML_INPUT, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "reason"), + [ + (InvalidAuth, "invalid_auth"), + (CannotConnect, "cannot_connect"), + (ParserError, "cannot_connect"), + (ValueError, "unknown"), + ], +) +async def test_flow_import_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + exception: Exception, + reason: str, +) -> None: + """Test we abort import data set when entry is already configured.""" + + mock_pyloadapi.login.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=YAML_INPUT, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + + +async def test_reauth( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, +) -> None: + """Test reauth flow.""" + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + REAUTH_INPUT, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert config_entry.data == NEW_INPUT + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("side_effect", "error_text"), + [ + (InvalidAuth, "invalid_auth"), + (CannotConnect, "cannot_connect"), + (IndexError, "unknown"), + ], +) +async def test_reauth_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + side_effect: Exception, + error_text: str, +) -> None: + """Test reauth flow.""" + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_pyloadapi.login.side_effect = side_effect + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + REAUTH_INPUT, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_text} + + mock_pyloadapi.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + REAUTH_INPUT, + ) + + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert config_entry.data == NEW_INPUT + assert len(hass.config_entries.async_entries()) == 1 + + +async def test_reconfiguration( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, +) -> None: + """Test reconfiguration flow.""" + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert config_entry.data == USER_INPUT + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("side_effect", "error_text"), + [ + (InvalidAuth, "invalid_auth"), + (CannotConnect, "cannot_connect"), + (IndexError, "unknown"), + ], +) +async def test_reconfigure_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + side_effect: Exception, + error_text: str, +) -> None: + """Test reconfiguration flow.""" + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + "unique_id": config_entry.unique_id, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + mock_pyloadapi.login.side_effect = side_effect + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_text} + + mock_pyloadapi.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert config_entry.data == USER_INPUT + assert len(hass.config_entries.async_entries()) == 1 diff --git a/tests/components/pyload/test_diagnostics.py b/tests/components/pyload/test_diagnostics.py new file mode 100644 index 00000000000..9c5e73f853f --- /dev/null +++ b/tests/components/pyload/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Tests for pyLoad diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/pyload/test_init.py b/tests/components/pyload/test_init.py new file mode 100644 index 00000000000..12713ef2e54 --- /dev/null +++ b/tests/components/pyload/test_init.py @@ -0,0 +1,65 @@ +"""Test pyLoad init.""" + +from unittest.mock import MagicMock + +from pyloadapi.exceptions import CannotConnect, InvalidAuth, ParserError +import pytest + +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_entry_setup_unload( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: MagicMock, +) -> None: + """Test integration setup and unload.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("side_effect"), + [CannotConnect, ParserError], +) +async def test_config_entry_setup_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: MagicMock, + side_effect: Exception, +) -> None: + """Test config entry not ready.""" + mock_pyloadapi.login.side_effect = side_effect + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_config_entry_setup_invalid_auth( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: MagicMock, +) -> None: + """Test config entry authentication.""" + mock_pyloadapi.login.side_effect = InvalidAuth + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_ERROR + + assert any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) diff --git a/tests/components/pyload/test_sensor.py b/tests/components/pyload/test_sensor.py index e2b392b06f9..8c194a111ea 100644 --- a/tests/components/pyload/test_sensor.py +++ b/tests/components/pyload/test_sensor.py @@ -1,114 +1,92 @@ """Tests for the pyLoad Sensors.""" -from unittest.mock import AsyncMock +from collections.abc import Generator +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory from pyloadapi.exceptions import CannotConnect, InvalidAuth, ParserError import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.pyload.sensor import SCAN_INTERVAL -from homeassistant.components.sensor import DOMAIN -from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant +from homeassistant.components.pyload.const import DOMAIN +from homeassistant.components.pyload.coordinator import SCAN_INTERVAL +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component -from tests.common import async_fire_time_changed - -SENSORS = ["sensor.pyload_speed"] +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.fixture(autouse=True) +def sensor_only() -> Generator[None]: + """Enable only the sensor platform.""" + with patch( + "homeassistant.components.pyload.PLATFORMS", + [Platform.SENSOR], + ): + yield -@pytest.mark.usefixtures("mock_pyloadapi") async def test_setup( hass: HomeAssistant, - pyload_config: ConfigType, + config_entry: MockConfigEntry, snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_pyloadapi: AsyncMock, ) -> None: """Test setup of the pyload sensor platform.""" - - assert await async_setup_component(hass, DOMAIN, pyload_config) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - for sensor in SENSORS: - result = hass.states.get(sensor) - assert result == snapshot + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) @pytest.mark.parametrize( - ("exception", "expected_exception"), - [ - (CannotConnect, "Unable to connect and retrieve data from pyLoad API"), - (ParserError, "Unable to parse data from pyLoad API"), - ( - InvalidAuth, - "Authentication failed for username, check your login credentials", - ), - ], -) -async def test_setup_exceptions( - hass: HomeAssistant, - pyload_config: ConfigType, - mock_pyloadapi: AsyncMock, - exception: Exception, - expected_exception: str, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test exceptions during setup up pyLoad platform.""" - - mock_pyloadapi.login.side_effect = exception - - assert await async_setup_component(hass, DOMAIN, pyload_config) - await hass.async_block_till_done() - - assert len(hass.states.async_all(DOMAIN)) == 0 - assert expected_exception in caplog.text - - -@pytest.mark.parametrize( - ("exception", "expected_exception"), - [ - (CannotConnect, "Unable to connect and retrieve data from pyLoad API"), - (ParserError, "Unable to parse data from pyLoad API"), - (InvalidAuth, "Authentication failed, trying to reauthenticate"), - ], + "exception", + [CannotConnect, InvalidAuth, ParserError], ) async def test_sensor_update_exceptions( hass: HomeAssistant, - pyload_config: ConfigType, + config_entry: MockConfigEntry, mock_pyloadapi: AsyncMock, exception: Exception, - expected_exception: str, - caplog: pytest.LogCaptureFixture, snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, ) -> None: - """Test exceptions during update of pyLoad sensor.""" + """Test if pyLoad sensors go unavailable when exceptions occur (except ParserErrors).""" - mock_pyloadapi.get_status.side_effect = exception - - assert await async_setup_component(hass, DOMAIN, pyload_config) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert len(hass.states.async_all(DOMAIN)) == 1 - assert expected_exception in caplog.text + mock_pyloadapi.get_status.side_effect = exception + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() - for sensor in SENSORS: - assert hass.states.get(sensor).state == STATE_UNAVAILABLE + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) async def test_sensor_invalid_auth( hass: HomeAssistant, - pyload_config: ConfigType, + config_entry: MockConfigEntry, mock_pyloadapi: AsyncMock, caplog: pytest.LogCaptureFixture, freezer: FrozenDateTimeFactory, ) -> None: """Test invalid auth during sensor update.""" - assert await async_setup_component(hass, DOMAIN, pyload_config) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert len(hass.states.async_all(DOMAIN)) == 1 mock_pyloadapi.get_status.side_effect = InvalidAuth mock_pyloadapi.login.side_effect = InvalidAuth @@ -118,6 +96,86 @@ async def test_sensor_invalid_auth( await hass.async_block_till_done() assert ( - "Authentication failed for username, check your login credentials" + "Authentication failed for username, verify your login credentials" in caplog.text ) + + +async def test_platform_setup_triggers_import_flow( + hass: HomeAssistant, + pyload_config: ConfigType, + mock_setup_entry: AsyncMock, + mock_pyloadapi: AsyncMock, +) -> None: + """Test if an issue is created when attempting setup from yaml config.""" + + assert await async_setup_component(hass, SENSOR_DOMAIN, pyload_config) + await hass.async_block_till_done() + + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "reason"), + [ + (InvalidAuth, "invalid_auth"), + (CannotConnect, "cannot_connect"), + (ParserError, "cannot_connect"), + (ValueError, "unknown"), + ], +) +async def test_deprecated_yaml_import_issue( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + pyload_config: ConfigType, + mock_pyloadapi: AsyncMock, + exception: Exception, + reason: str, +) -> None: + """Test an issue is created when attempting setup from yaml config and an error happens.""" + + mock_pyloadapi.login.side_effect = exception + await async_setup_component(hass, SENSOR_DOMAIN, pyload_config) + await hass.async_block_till_done() + + assert issue_registry.async_get_issue( + domain=DOMAIN, issue_id=f"deprecated_yaml_import_issue_{reason}" + ) + + +async def test_deprecated_yaml( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + pyload_config: ConfigType, + mock_pyloadapi: AsyncMock, +) -> None: + """Test an issue is created when we import from yaml config.""" + + await async_setup_component(hass, SENSOR_DOMAIN, pyload_config) + await hass.async_block_till_done() + + assert issue_registry.async_get_issue( + domain=HOMEASSISTANT_DOMAIN, issue_id=f"deprecated_yaml_{DOMAIN}" + ) + + +async def test_pyload_pre_0_5_0( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, +) -> None: + """Test setup of the pyload sensor platform.""" + mock_pyloadapi.get_status.return_value = { + "pause": False, + "active": 1, + "queue": 6, + "total": 37, + "speed": 5405963.0, + "download": True, + "reconnect": False, + } + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/pyload/test_switch.py b/tests/components/pyload/test_switch.py new file mode 100644 index 00000000000..493dbd8c0da --- /dev/null +++ b/tests/components/pyload/test_switch.py @@ -0,0 +1,152 @@ +"""Tests for the pyLoad Switches.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, call, patch + +from pyloadapi import CannotConnect, InvalidAuth +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.pyload.switch import PyLoadSwitch +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + +# Maps entity to the mock calls to assert +API_CALL = { + PyLoadSwitch.PAUSE_RESUME_QUEUE: { + SERVICE_TURN_ON: call.unpause, + SERVICE_TURN_OFF: call.pause, + SERVICE_TOGGLE: call.toggle_pause, + }, + PyLoadSwitch.RECONNECT: { + SERVICE_TURN_ON: call.toggle_reconnect, + SERVICE_TURN_OFF: call.toggle_reconnect, + SERVICE_TOGGLE: call.toggle_reconnect, + }, +} + + +@pytest.fixture(autouse=True) +def switch_only() -> Generator[None]: + """Enable only the switch platform.""" + with patch( + "homeassistant.components.pyload.PLATFORMS", + [Platform.SWITCH], + ): + yield + + +async def test_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_pyloadapi: AsyncMock, +) -> None: + """Test switch state.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("service_call"), + [ + SERVICE_TURN_ON, + SERVICE_TURN_OFF, + SERVICE_TOGGLE, + ], +) +async def test_turn_on_off( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + service_call: str, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch turn on/off, toggle method.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + + for entity_entry in entity_entries: + await hass.services.async_call( + SWITCH_DOMAIN, + service_call, + {ATTR_ENTITY_ID: entity_entry.entity_id}, + blocking=True, + ) + assert ( + API_CALL[entity_entry.translation_key][service_call] + in mock_pyloadapi.method_calls + ) + mock_pyloadapi.reset_mock() + + +@pytest.mark.parametrize( + ("service_call"), + [ + SERVICE_TURN_ON, + SERVICE_TURN_OFF, + SERVICE_TOGGLE, + ], +) +@pytest.mark.parametrize( + ("side_effect"), + [CannotConnect, InvalidAuth], +) +async def test_turn_on_off_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + service_call: str, + entity_registry: er.EntityRegistry, + side_effect: Exception, +) -> None: + """Test switch turn on/off, toggle method.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + mock_pyloadapi.unpause.side_effect = side_effect + mock_pyloadapi.pause.side_effect = side_effect + mock_pyloadapi.toggle_pause.side_effect = side_effect + mock_pyloadapi.toggle_reconnect.side_effect = side_effect + + for entity_entry in entity_entries: + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SWITCH_DOMAIN, + service_call, + {ATTR_ENTITY_ID: entity_entry.entity_id}, + blocking=True, + ) diff --git a/tests/components/python_script/test_init.py b/tests/components/python_script/test_init.py index 03fa73f076e..c4dc00c448a 100644 --- a/tests/components/python_script/test_init.py +++ b/tests/components/python_script/test_init.py @@ -155,7 +155,7 @@ raise Exception('boom') task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) await hass.async_block_till_done(wait_background_tasks=True) - assert type(task.exception()) == HomeAssistantError + assert type(task.exception()) is HomeAssistantError assert "Error executing script (Exception): boom" in str(task.exception()) @@ -183,7 +183,7 @@ hass.async_stop() task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) await hass.async_block_till_done(wait_background_tasks=True) - assert type(task.exception()) == ServiceValidationError + assert type(task.exception()) is ServiceValidationError assert "Not allowed to access async methods" in str(task.exception()) @@ -233,7 +233,7 @@ async def test_accessing_forbidden_methods_with_response(hass: HomeAssistant) -> task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) await hass.async_block_till_done(wait_background_tasks=True) - assert type(task.exception()) == ServiceValidationError + assert type(task.exception()) is ServiceValidationError assert f"Not allowed to access {name}" in str(task.exception()) diff --git a/tests/components/qbittorrent/conftest.py b/tests/components/qbittorrent/conftest.py index b15e2a6865b..17fb8e15b47 100644 --- a/tests/components/qbittorrent/conftest.py +++ b/tests/components/qbittorrent/conftest.py @@ -1,10 +1,10 @@ """Fixtures for testing qBittorrent component.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest import requests_mock -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/qnap/conftest.py b/tests/components/qnap/conftest.py index c0947318f60..2625f1805b6 100644 --- a/tests/components/qnap/conftest.py +++ b/tests/components/qnap/conftest.py @@ -1,9 +1,9 @@ """Setup the QNAP tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator TEST_HOST = "1.2.3.4" TEST_USERNAME = "admin" diff --git a/tests/components/qnap_qsw/test_diagnostics.py b/tests/components/qnap_qsw/test_diagnostics.py index 8bca9d8d989..ccaac458b12 100644 --- a/tests/components/qnap_qsw/test_diagnostics.py +++ b/tests/components/qnap_qsw/test_diagnostics.py @@ -25,7 +25,7 @@ from aioqsw.const import ( QSD_SYSTEM_TIME, QSD_TEMP, QSD_TEMP_MAX, - QSD_UPTIME, + QSD_UPTIME_SECONDS, QSD_VERSION, ) @@ -118,6 +118,6 @@ async def test_config_entry_diagnostics( assert ( sys_time_diag.items() >= { - QSD_UPTIME: sys_time_mock[API_UPTIME], + QSD_UPTIME_SECONDS: sys_time_mock[API_UPTIME], }.items() ) diff --git a/tests/components/rabbitair/test_config_flow.py b/tests/components/rabbitair/test_config_flow.py index 2e0cfba38c0..7f9479339a5 100644 --- a/tests/components/rabbitair/test_config_flow.py +++ b/tests/components/rabbitair/test_config_flow.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from ipaddress import ip_address from unittest.mock import MagicMock, Mock, patch import pytest from rabbitair import Mode, Model, Speed -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components import zeroconf diff --git a/tests/components/radio_browser/conftest.py b/tests/components/radio_browser/conftest.py index 95fda545a6c..fc666b32c53 100644 --- a/tests/components/radio_browser/conftest.py +++ b/tests/components/radio_browser/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.radio_browser.const import DOMAIN diff --git a/tests/components/rainbird/conftest.py b/tests/components/rainbird/conftest.py index a2c26c71231..b0411d9d313 100644 --- a/tests/components/rainbird/conftest.py +++ b/tests/components/rainbird/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from http import HTTPStatus import json from typing import Any @@ -9,7 +10,6 @@ from unittest.mock import patch from pyrainbird import encryption import pytest -from typing_extensions import Generator from homeassistant.components.rainbird import DOMAIN from homeassistant.components.rainbird.const import ( diff --git a/tests/components/rainbird/test_config_flow.py b/tests/components/rainbird/test_config_flow.py index cdcef95f458..87506ad656c 100644 --- a/tests/components/rainbird/test_config_flow.py +++ b/tests/components/rainbird/test_config_flow.py @@ -1,11 +1,11 @@ """Tests for the Rain Bird config flow.""" +from collections.abc import AsyncGenerator from http import HTTPStatus from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import AsyncGenerator from homeassistant import config_entries from homeassistant.components.rainbird import DOMAIN @@ -40,7 +40,7 @@ def mock_responses() -> list[AiohttpClientMockResponse]: @pytest.fixture(autouse=True) -async def config_entry_data() -> None: +async def config_entry_data() -> dict[str, Any] | None: """Fixture to disable config entry setup for exercising config flow.""" return None diff --git a/tests/components/rainforest_eagle/conftest.py b/tests/components/rainforest_eagle/conftest.py index 1aff693e61f..c3790a12e86 100644 --- a/tests/components/rainforest_eagle/conftest.py +++ b/tests/components/rainforest_eagle/conftest.py @@ -1,6 +1,7 @@ """Conftest for rainforest_eagle.""" -from unittest.mock import AsyncMock, Mock, patch +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -13,6 +14,7 @@ from homeassistant.components.rainforest_eagle.const import ( TYPE_EAGLE_200, ) from homeassistant.const import CONF_HOST, CONF_TYPE +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MOCK_200_RESPONSE_WITHOUT_PRICE, MOCK_CLOUD_ID @@ -21,7 +23,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def config_entry_200(hass): +def config_entry_200(hass: HomeAssistant) -> MockConfigEntry: """Return a config entry.""" entry = MockConfigEntry( domain="rainforest_eagle", @@ -38,7 +40,9 @@ def config_entry_200(hass): @pytest.fixture -async def setup_rainforest_200(hass, config_entry_200): +async def setup_rainforest_200( + hass: HomeAssistant, config_entry_200: MockConfigEntry +) -> AsyncGenerator[Mock]: """Set up rainforest.""" with patch( "aioeagle.ElectricMeter.create_instance", @@ -53,7 +57,7 @@ async def setup_rainforest_200(hass, config_entry_200): @pytest.fixture -async def setup_rainforest_100(hass): +async def setup_rainforest_100(hass: HomeAssistant) -> AsyncGenerator[MagicMock]: """Set up rainforest.""" MockConfigEntry( domain="rainforest_eagle", diff --git a/tests/components/rainforest_raven/conftest.py b/tests/components/rainforest_raven/conftest.py index 0a809c6430a..35ce4443032 100644 --- a/tests/components/rainforest_raven/conftest.py +++ b/tests/components/rainforest_raven/conftest.py @@ -1,9 +1,9 @@ """Fixtures for the Rainforest RAVEn tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/rainforest_raven/test_config_flow.py b/tests/components/rainforest_raven/test_config_flow.py index 7f7041cbcd8..da7e65882a4 100644 --- a/tests/components/rainforest_raven/test_config_flow.py +++ b/tests/components/rainforest_raven/test_config_flow.py @@ -1,11 +1,11 @@ """Test Rainforest RAVEn config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from aioraven.device import RAVEnConnectionError import pytest from serial.tools.list_ports_common import ListPortInfo -from typing_extensions import Generator from homeassistant.components.rainforest_raven.const import DOMAIN from homeassistant.config_entries import SOURCE_USB, SOURCE_USER diff --git a/tests/components/rainmachine/conftest.py b/tests/components/rainmachine/conftest.py index 717d74b421b..22ee807d187 100644 --- a/tests/components/rainmachine/conftest.py +++ b/tests/components/rainmachine/conftest.py @@ -1,5 +1,6 @@ """Define test fixtures for RainMachine.""" +from collections.abc import AsyncGenerator import json from typing import Any from unittest.mock import AsyncMock, patch @@ -8,19 +9,20 @@ import pytest from homeassistant.components.rainmachine import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_SSL +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture @pytest.fixture(name="client") -def client_fixture(controller, controller_mac): +def client_fixture(controller: AsyncMock, controller_mac: str) -> AsyncMock: """Define a regenmaschine client.""" return AsyncMock(load_local=AsyncMock(), controllers={controller_mac: controller}) @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_IP_ADDRESS: "192.168.1.100", @@ -31,7 +33,9 @@ def config_fixture(hass): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config, controller_mac): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any], controller_mac: str +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -78,7 +82,7 @@ def controller_fixture( @pytest.fixture(name="controller_mac") -def controller_mac_fixture(): +def controller_mac_fixture() -> str: """Define a controller MAC address.""" return "aa:bb:cc:dd:ee:ff" @@ -145,7 +149,9 @@ def data_zones_fixture(): @pytest.fixture(name="setup_rainmachine") -async def setup_rainmachine_fixture(hass, client, config): +async def setup_rainmachine_fixture( + hass: HomeAssistant, client: AsyncMock, config: dict[str, Any] +) -> AsyncGenerator[None]: """Define a fixture to set up RainMachine.""" with ( patch("homeassistant.components.rainmachine.Client", return_value=client), diff --git a/tests/components/rainmachine/test_diagnostics.py b/tests/components/rainmachine/test_diagnostics.py index 1fc03ab357a..ad5743957dd 100644 --- a/tests/components/rainmachine/test_diagnostics.py +++ b/tests/components/rainmachine/test_diagnostics.py @@ -2,6 +2,7 @@ from regenmaschine.errors import RainMachineError from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -17,10 +18,9 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) async def test_entry_diagnostics_failed_controller_diagnostics( @@ -33,7 +33,6 @@ async def test_entry_diagnostics_failed_controller_diagnostics( ) -> None: """Test config entry diagnostics when the controller diagnostics API call fails.""" controller.diagnostics.current.side_effect = RainMachineError - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/rdw/conftest.py b/tests/components/rdw/conftest.py index 3f45f44e3d8..71c73a55441 100644 --- a/tests/components/rdw/conftest.py +++ b/tests/components/rdw/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from vehicle import Vehicle from homeassistant.components.rdw.const import CONF_LICENSE_PLATE, DOMAIN diff --git a/tests/components/recollect_waste/conftest.py b/tests/components/recollect_waste/conftest.py index 360dd8aac98..8384da3f388 100644 --- a/tests/components/recollect_waste/conftest.py +++ b/tests/components/recollect_waste/conftest.py @@ -1,6 +1,7 @@ """Define test fixtures for ReCollect Waste.""" from datetime import date +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiorecollect.client import PickupEvent, PickupType @@ -11,6 +12,7 @@ from homeassistant.components.recollect_waste.const import ( CONF_SERVICE_ID, DOMAIN, ) +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -25,7 +27,9 @@ def client_fixture(pickup_events): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=f"{TEST_PLACE_ID}, {TEST_SERVICE_ID}", data=config @@ -35,7 +39,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_PLACE_ID: TEST_PLACE_ID, @@ -54,7 +58,7 @@ def pickup_events_fixture(): @pytest.fixture(name="mock_aiorecollect") -async def mock_aiorecollect_fixture(client): +def mock_aiorecollect_fixture(client): """Define a fixture to patch aiorecollect.""" with ( patch( @@ -70,7 +74,9 @@ async def mock_aiorecollect_fixture(client): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_aiorecollect): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_aiorecollect: None +) -> None: """Define a fixture to set up recollect_waste.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/recollect_waste/test_diagnostics.py b/tests/components/recollect_waste/test_diagnostics.py index 6c8549786e8..2b92892b1d1 100644 --- a/tests/components/recollect_waste/test_diagnostics.py +++ b/tests/components/recollect_waste/test_diagnostics.py @@ -5,6 +5,7 @@ from homeassistant.core import HomeAssistant from .conftest import TEST_SERVICE_ID +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -30,6 +31,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": [ { diff --git a/tests/components/recorder/auto_repairs/events/test_schema.py b/tests/components/recorder/auto_repairs/events/test_schema.py index e3b2638eded..cae181a6270 100644 --- a/tests/components/recorder/auto_repairs/events/test_schema.py +++ b/tests/components/recorder/auto_repairs/events/test_schema.py @@ -11,11 +11,18 @@ from ...common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -33,8 +40,8 @@ async def test_validate_db_schema_fix_float_issue( "homeassistant.components.recorder.migration._modify_columns" ) as modify_columns_mock, ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -50,8 +57,8 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_event_data( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -66,8 +73,8 @@ async def test_validate_db_schema_fix_utf8_issue_event_data( return_value={"event_data.4-byte UTF-8"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -83,8 +90,8 @@ async def test_validate_db_schema_fix_utf8_issue_event_data( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -99,8 +106,8 @@ async def test_validate_db_schema_fix_collation_issue( return_value={"events.utf8mb4_unicode_ci"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( diff --git a/tests/components/recorder/auto_repairs/states/test_schema.py b/tests/components/recorder/auto_repairs/states/test_schema.py index 58910a4441a..915ac1f3500 100644 --- a/tests/components/recorder/auto_repairs/states/test_schema.py +++ b/tests/components/recorder/auto_repairs/states/test_schema.py @@ -11,11 +11,18 @@ from ...common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -33,8 +40,8 @@ async def test_validate_db_schema_fix_float_issue( "homeassistant.components.recorder.migration._modify_columns" ) as modify_columns_mock, ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -52,8 +59,8 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_states( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -68,8 +75,8 @@ async def test_validate_db_schema_fix_utf8_issue_states( return_value={"states.4-byte UTF-8"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -84,8 +91,8 @@ async def test_validate_db_schema_fix_utf8_issue_states( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_state_attributes( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -100,8 +107,8 @@ async def test_validate_db_schema_fix_utf8_issue_state_attributes( return_value={"state_attributes.4-byte UTF-8"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -117,8 +124,8 @@ async def test_validate_db_schema_fix_utf8_issue_state_attributes( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -133,8 +140,8 @@ async def test_validate_db_schema_fix_collation_issue( return_value={"states.utf8mb4_unicode_ci"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( diff --git a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py index 175cb6ecd1a..a2cf41578c7 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py +++ b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py @@ -1,7 +1,6 @@ """Test removing statistics duplicates.""" import importlib -from pathlib import Path import sys from unittest.mock import patch @@ -10,17 +9,14 @@ from sqlalchemy import create_engine from sqlalchemy.orm import Session from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder, statistics +from homeassistant.components.recorder import statistics from homeassistant.components.recorder.auto_repairs.statistics.duplicates import ( delete_statistics_duplicates, delete_statistics_meta_duplicates, ) -from homeassistant.components.recorder.const import SQLITE_URL_PREFIX from homeassistant.components.recorder.statistics import async_add_external_statistics from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant -from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from ...common import async_wait_recording_done @@ -31,20 +27,15 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" -@pytest.fixture -def setup_recorder(recorder_mock: Recorder) -> None: - """Set up recorder.""" - - +@pytest.mark.usefixtures("recorder_mock") async def test_delete_duplicates_no_duplicates( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - setup_recorder: None, ) -> None: """Test removal of duplicated statistics.""" await async_wait_recording_done(hass) @@ -56,10 +47,10 @@ async def test_delete_duplicates_no_duplicates( assert "Found duplicated" not in caplog.text +@pytest.mark.usefixtures("recorder_mock") async def test_duplicate_statistics_handle_integrity_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - setup_recorder: None, ) -> None: """Test the recorder does not blow up if statistics is duplicated.""" await async_wait_recording_done(hass) @@ -140,15 +131,13 @@ def _create_engine_28(*args, **kwargs): return engine +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_delete_metadata_duplicates( - caplog: pytest.LogCaptureFixture, tmp_path: Path + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - module = "tests.components.recorder.db_schema_28" importlib.import_module(module) old_db_schema = sys.modules[module] @@ -205,11 +194,10 @@ async def test_delete_metadata_duplicates( new=_create_engine_28, ), ): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass), + ): await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -228,9 +216,10 @@ async def test_delete_metadata_duplicates( await hass.async_stop() # Test that the duplicates are removed during migration from schema 28 - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass), + ): await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -247,15 +236,13 @@ async def test_delete_metadata_duplicates( await hass.async_stop() +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_delete_metadata_duplicates_many( - caplog: pytest.LogCaptureFixture, tmp_path: Path + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - module = "tests.components.recorder.db_schema_28" importlib.import_module(module) old_db_schema = sys.modules[module] @@ -324,11 +311,10 @@ async def test_delete_metadata_duplicates_many( new=_create_engine_28, ), ): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass), + ): await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -338,9 +324,10 @@ async def test_delete_metadata_duplicates_many( await hass.async_stop() # Test that the duplicates are removed during migration from schema 28 - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass), + ): await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -359,8 +346,9 @@ async def test_delete_metadata_duplicates_many( await hass.async_stop() +@pytest.mark.usefixtures("recorder_mock") async def test_delete_metadata_duplicates_no_duplicates( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, setup_recorder: None + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test removal of duplicated statistics.""" await async_wait_recording_done(hass) diff --git a/tests/components/recorder/auto_repairs/statistics/test_schema.py b/tests/components/recorder/auto_repairs/statistics/test_schema.py index f4e1d74aadf..34a075afbc7 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_schema.py +++ b/tests/components/recorder/auto_repairs/statistics/test_schema.py @@ -11,11 +11,18 @@ from ...common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.mark.parametrize("db_engine", ["mysql"]) @pytest.mark.parametrize("enable_schema_validation", [True]) async def test_validate_db_schema_fix_utf8_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -30,8 +37,8 @@ async def test_validate_db_schema_fix_utf8_issue( return_value={"statistics_meta.4-byte UTF-8"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -48,8 +55,8 @@ async def test_validate_db_schema_fix_utf8_issue( @pytest.mark.parametrize("table", ["statistics_short_term", "statistics"]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, table: str, db_engine: str, @@ -68,8 +75,8 @@ async def test_validate_db_schema_fix_float_issue( "homeassistant.components.recorder.migration._modify_columns" ) as modify_columns_mock, ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -92,8 +99,8 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, recorder_dialect_name: None, db_engine: str, @@ -108,8 +115,8 @@ async def test_validate_db_schema_fix_collation_issue( return_value={"statistics.utf8mb4_unicode_ci"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( diff --git a/tests/components/recorder/auto_repairs/test_schema.py b/tests/components/recorder/auto_repairs/test_schema.py index d921c0cdbf8..857c0f6572f 100644 --- a/tests/components/recorder/auto_repairs/test_schema.py +++ b/tests/components/recorder/auto_repairs/test_schema.py @@ -3,6 +3,7 @@ import pytest from sqlalchemy import text +from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.auto_repairs.schema import ( correct_db_schema_precision, correct_db_schema_utf8, @@ -12,7 +13,7 @@ from homeassistant.components.recorder.auto_repairs.schema import ( ) from homeassistant.components.recorder.db_schema import States from homeassistant.components.recorder.migration import _modify_columns -from homeassistant.components.recorder.util import get_instance, session_scope +from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant from ..common import async_wait_recording_done @@ -20,11 +21,18 @@ from ..common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -33,46 +41,37 @@ async def test_validate_db_schema( Note: The test uses SQLite, the purpose is only to exercise the code. """ - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert "Detected statistics schema errors" not in caplog.text assert "Database is about to correct DB schema errors" not in caplog.text +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_utf8_issue_good_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the schema is correct.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - schema_errors = await instance.async_add_executor_job( - validate_table_schema_supports_utf8, instance, States, (States.state,) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_supports_utf8, recorder_mock, States, (States.state,) ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_utf8_issue_with_broken_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the schema is broken and repairing it.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): with session_scope(session=session_maker()) as session: @@ -84,38 +83,34 @@ async def test_validate_db_schema_fix_utf8_issue_with_broken_schema( ) ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( - validate_table_schema_supports_utf8, instance, States, (States.state,) + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_supports_utf8, recorder_mock, States, (States.state,) ) assert schema_errors == {"states.4-byte UTF-8"} # Now repair the schema - await instance.async_add_executor_job( - correct_db_schema_utf8, instance, States, schema_errors + await recorder_mock.async_add_executor_job( + correct_db_schema_utf8, recorder_mock, States, schema_errors ) # Now validate the schema again - schema_errors = await instance.async_add_executor_job( - validate_table_schema_supports_utf8, instance, States, ("state",) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_supports_utf8, recorder_mock, States, ("state",) ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_incorrect_collation( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the collation is incorrect.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): with session_scope(session=session_maker()) as session: @@ -126,59 +121,51 @@ async def test_validate_db_schema_fix_incorrect_collation( ) ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( - validate_table_schema_has_correct_collation, instance, States + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_has_correct_collation, recorder_mock, States ) assert schema_errors == {"states.utf8mb4_unicode_ci"} # Now repair the schema - await instance.async_add_executor_job( - correct_db_schema_utf8, instance, States, schema_errors + await recorder_mock.async_add_executor_job( + correct_db_schema_utf8, recorder_mock, States, schema_errors ) # Now validate the schema again - schema_errors = await instance.async_add_executor_job( - validate_table_schema_has_correct_collation, instance, States + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_has_correct_collation, recorder_mock, States ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_correct_collation( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is correct with the correct collation.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - schema_errors = await instance.async_add_executor_job( + schema_errors = await recorder_mock.async_add_executor_job( validate_table_schema_has_correct_collation, - instance, + recorder_mock, States, ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_utf8_issue_with_broken_schema_unrepairable( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the schema is broken and cannot be repaired.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): with session_scope(session=session_maker()) as session: @@ -189,63 +176,55 @@ async def test_validate_db_schema_fix_utf8_issue_with_broken_schema_unrepairable "LOCK=EXCLUSIVE;" ) ) - _modify_columns( - session_maker, - instance.engine, - "states", - [ - "entity_id VARCHAR(255) NOT NULL", - ], - ) + _modify_columns( + session_maker, + recorder_mock.engine, + "states", + [ + "entity_id VARCHAR(255) NOT NULL", + ], + ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( - validate_table_schema_supports_utf8, instance, States, ("state",) + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_supports_utf8, recorder_mock, States, ("state",) ) assert schema_errors == set() assert "Error when validating DB schema" in caplog.text +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_good_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is correct.""" - if not recorder_db_url.startswith(("mysql://", "postgresql://")): - # This problem only happens on MySQL and PostgreSQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - schema_errors = await instance.async_add_executor_job( + schema_errors = await recorder_mock.async_add_executor_job( validate_db_schema_precision, - instance, + recorder_mock, States, ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_with_broken_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is broken and than repair it.""" - if not recorder_db_url.startswith(("mysql://", "postgresql://")): - # This problem only happens on MySQL and PostgreSQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): _modify_columns( session_maker, - instance.engine, + recorder_mock.engine, "states", [ "last_updated_ts FLOAT(4)", @@ -253,47 +232,44 @@ async def test_validate_db_schema_precision_with_broken_schema( ], ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( validate_db_schema_precision, - instance, + recorder_mock, States, ) assert schema_errors == {"states.double precision"} # Now repair the schema - await instance.async_add_executor_job( - correct_db_schema_precision, instance, States, schema_errors + await recorder_mock.async_add_executor_job( + correct_db_schema_precision, recorder_mock, States, schema_errors ) # Now validate the schema again - schema_errors = await instance.async_add_executor_job( + schema_errors = await recorder_mock.async_add_executor_job( validate_db_schema_precision, - instance, + recorder_mock, States, ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_with_unrepairable_broken_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is broken and cannot be repaired.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): _modify_columns( session_maker, - instance.engine, + recorder_mock.engine, "states", [ "state VARCHAR(255) NOT NULL", @@ -302,10 +278,10 @@ async def test_validate_db_schema_precision_with_unrepairable_broken_schema( ], ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( validate_db_schema_precision, - instance, + recorder_mock, States, ) assert "Error when validating DB schema" in caplog.text diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index c72b1ac830b..aee35fceb80 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -79,10 +79,18 @@ async def async_block_recorder(hass: HomeAssistant, seconds: float) -> None: await event.wait() +def get_start_time(start: datetime) -> datetime: + """Calculate a valid start time for statistics.""" + start_minutes = start.minute - start.minute % 5 + return start.replace(minute=start_minutes, second=0, microsecond=0) + + def do_adhoc_statistics(hass: HomeAssistant, **kwargs: Any) -> None: """Trigger an adhoc statistics run.""" if not (start := kwargs.get("start")): start = statistics.get_start_time() + elif (start.minute % 5) != 0 or start.second != 0 or start.microsecond != 0: + raise ValueError(f"Statistics must start on 5 minute boundary got {start}") get_instance(hass).queue_task(StatisticsTask(start, False)) @@ -291,11 +299,11 @@ def record_states(hass): wait_recording_done(hass) return hass.states.get(entity_id) - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=15 * 5) three = two + timedelta(seconds=30 * 5) - four = three + timedelta(seconds=15 * 5) + four = three + timedelta(seconds=14 * 5) states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []} with freeze_time(one) as freezer: @@ -416,6 +424,14 @@ def get_schema_module_path(schema_version_postfix: str) -> str: return f"tests.components.recorder.db_schema_{schema_version_postfix}" +@dataclass(slots=True) +class MockMigrationTask(migration.MigrationTask): + """Mock migration task which does nothing.""" + + def run(self, instance: Recorder) -> None: + """Run migration task.""" + + @contextmanager def old_db_schema(schema_version_postfix: str) -> Iterator[None]: """Fixture to initialize the db with the old schema.""" @@ -434,7 +450,7 @@ def old_db_schema(schema_version_postfix: str) -> Iterator[None]: patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", core.RecorderTask), + patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch( CREATE_ENGINE_TARGET, new=partial( diff --git a/tests/components/recorder/conftest.py b/tests/components/recorder/conftest.py index 4db573fa65f..9cdf9dbb372 100644 --- a/tests/components/recorder/conftest.py +++ b/tests/components/recorder/conftest.py @@ -1,14 +1,46 @@ """Fixtures for the recorder component tests.""" -from unittest.mock import patch +from collections.abc import Callable, Generator +from contextlib import contextmanager +from dataclasses import dataclass +from functools import partial +import threading +from unittest.mock import Mock, patch import pytest -from typing_extensions import Generator +from sqlalchemy.engine import Engine +from sqlalchemy.orm.session import Session from homeassistant.components import recorder +from homeassistant.components.recorder import db_schema +from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant +def pytest_configure(config): + """Add custom skip_on_db_engine marker.""" + config.addinivalue_line( + "markers", + "skip_on_db_engine(engine): mark test to run only on named DB engine(s)", + ) + + +@pytest.fixture +def skip_by_db_engine(request: pytest.FixtureRequest, recorder_db_url: str) -> None: + """Fixture to skip tests on unsupported DB engines. + + Mark the test with @pytest.mark.skip_on_db_engine("mysql") to skip on mysql, or + @pytest.mark.skip_on_db_engine(["mysql", "sqlite"]) to skip on mysql and sqlite. + """ + if request.node.get_closest_marker("skip_on_db_engine"): + skip_on_db_engine = request.node.get_closest_marker("skip_on_db_engine").args[0] + if isinstance(skip_on_db_engine, str): + skip_on_db_engine = [skip_on_db_engine] + db_engine = recorder_db_url.partition("://")[0] + if db_engine in skip_on_db_engine: + pytest.skip(f"skipped for DB engine: {db_engine}") + + @pytest.fixture def recorder_dialect_name(hass: HomeAssistant, db_engine: str) -> Generator[None]: """Patch the recorder dialect.""" @@ -22,3 +54,139 @@ def recorder_dialect_name(hass: HomeAssistant, db_engine: str) -> Generator[None "homeassistant.components.recorder.Recorder.dialect_name", db_engine ): yield + + +@dataclass(slots=True) +class InstrumentedMigration: + """Container to aid controlling migration progress.""" + + live_migration_done: threading.Event + live_migration_done_stall: threading.Event + migration_stall: threading.Event + migration_started: threading.Event + migration_version: int | None + non_live_migration_done: threading.Event + non_live_migration_done_stall: threading.Event + apply_update_mock: Mock + stall_on_schema_version: int | None + apply_update_stalled: threading.Event + apply_update_version: int | None + + +@pytest.fixture(name="instrument_migration") +def instrument_migration_fixture( + hass: HomeAssistant, +) -> Generator[InstrumentedMigration]: + """Instrument recorder migration.""" + with instrument_migration(hass) as instrumented_migration: + yield instrumented_migration + + +@contextmanager +def instrument_migration( + hass: HomeAssistant, +) -> Generator[InstrumentedMigration]: + """Instrument recorder migration.""" + + real_migrate_schema_live = recorder.migration.migrate_schema_live + real_migrate_schema_non_live = recorder.migration.migrate_schema_non_live + real_apply_update = recorder.migration._apply_update + + def _instrument_migrate_schema_live(real_func, *args): + """Control migration progress and check results.""" + return _instrument_migrate_schema( + real_func, + args, + instrumented_migration.live_migration_done, + instrumented_migration.live_migration_done_stall, + ) + + def _instrument_migrate_schema_non_live(real_func, *args): + """Control migration progress and check results.""" + return _instrument_migrate_schema( + real_func, + args, + instrumented_migration.non_live_migration_done, + instrumented_migration.non_live_migration_done_stall, + ) + + def _instrument_migrate_schema( + real_func, + args, + migration_done: threading.Event, + migration_done_stall: threading.Event, + ): + """Control migration progress and check results.""" + instrumented_migration.migration_started.set() + + try: + migration_result = real_func(*args) + except Exception: + migration_done.set() + migration_done_stall.wait() + raise + + # Check and report the outcome of the migration; if migration fails + # the recorder will silently create a new database. + with session_scope(hass=hass, read_only=True) as session: + res = ( + session.query(db_schema.SchemaChanges) + .order_by(db_schema.SchemaChanges.change_id.desc()) + .first() + ) + instrumented_migration.migration_version = res.schema_version + migration_done.set() + migration_done_stall.wait() + return migration_result + + def _instrument_apply_update( + instance: recorder.Recorder, + hass: HomeAssistant, + engine: Engine, + session_maker: Callable[[], Session], + new_version: int, + old_version: int, + ): + """Control migration progress.""" + instrumented_migration.apply_update_version = new_version + stall_version = instrumented_migration.stall_on_schema_version + if stall_version is None or stall_version == new_version: + instrumented_migration.apply_update_stalled.set() + instrumented_migration.migration_stall.wait() + real_apply_update( + instance, hass, engine, session_maker, new_version, old_version + ) + + with ( + patch( + "homeassistant.components.recorder.migration.migrate_schema_live", + wraps=partial(_instrument_migrate_schema_live, real_migrate_schema_live), + ), + patch( + "homeassistant.components.recorder.migration.migrate_schema_non_live", + wraps=partial( + _instrument_migrate_schema_non_live, real_migrate_schema_non_live + ), + ), + patch( + "homeassistant.components.recorder.migration._apply_update", + wraps=_instrument_apply_update, + ) as apply_update_mock, + ): + instrumented_migration = InstrumentedMigration( + live_migration_done=threading.Event(), + live_migration_done_stall=threading.Event(), + migration_stall=threading.Event(), + migration_started=threading.Event(), + migration_version=None, + non_live_migration_done=threading.Event(), + non_live_migration_done_stall=threading.Event(), + apply_update_mock=apply_update_mock, + stall_on_schema_version=None, + apply_update_stalled=threading.Event(), + apply_update_version=None, + ) + + instrumented_migration.live_migration_done_stall.set() + instrumented_migration.non_live_migration_done_stall.set() + yield instrumented_migration diff --git a/tests/components/recorder/db_schema_16.py b/tests/components/recorder/db_schema_16.py index 24786b1ad44..ffee438f2e9 100644 --- a/tests/components/recorder/db_schema_16.py +++ b/tests/components/recorder/db_schema_16.py @@ -356,7 +356,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_18.py b/tests/components/recorder/db_schema_18.py index db6fbb78f56..09cd41d9e33 100644 --- a/tests/components/recorder/db_schema_18.py +++ b/tests/components/recorder/db_schema_18.py @@ -369,7 +369,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_22.py b/tests/components/recorder/db_schema_22.py index cd0dc52a927..d05cb48ff6f 100644 --- a/tests/components/recorder/db_schema_22.py +++ b/tests/components/recorder/db_schema_22.py @@ -488,7 +488,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_23.py b/tests/components/recorder/db_schema_23.py index 9187d271216..9dffadaa0cc 100644 --- a/tests/components/recorder/db_schema_23.py +++ b/tests/components/recorder/db_schema_23.py @@ -478,7 +478,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_23_with_newer_columns.py b/tests/components/recorder/db_schema_23_with_newer_columns.py index 9f902523c64..4343f53d00d 100644 --- a/tests/components/recorder/db_schema_23_with_newer_columns.py +++ b/tests/components/recorder/db_schema_23_with_newer_columns.py @@ -602,7 +602,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_30.py b/tests/components/recorder/db_schema_30.py index b82213cbc89..2668f610dfd 100644 --- a/tests/components/recorder/db_schema_30.py +++ b/tests/components/recorder/db_schema_30.py @@ -33,6 +33,7 @@ from sqlalchemy import ( type_coerce, ) from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite +from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.orm import aliased, declarative_base, relationship from sqlalchemy.orm.session import Session @@ -109,7 +110,7 @@ STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc] """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: """Offload the datetime parsing to ciso8601.""" return lambda value: None if value is None else ciso8601.parse_datetime(value) diff --git a/tests/components/recorder/db_schema_32.py b/tests/components/recorder/db_schema_32.py index 15b56e2fc86..60f4f733ec0 100644 --- a/tests/components/recorder/db_schema_32.py +++ b/tests/components/recorder/db_schema_32.py @@ -33,6 +33,7 @@ from sqlalchemy import ( type_coerce, ) from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite +from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.orm import aliased, declarative_base, relationship from sqlalchemy.orm.session import Session @@ -109,7 +110,7 @@ STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc] """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: """Offload the datetime parsing to ciso8601.""" return lambda value: None if value is None else ciso8601.parse_datetime(value) diff --git a/tests/components/recorder/db_schema_42.py b/tests/components/recorder/db_schema_42.py index c0dfc70571d..99bdbb28f2c 100644 --- a/tests/components/recorder/db_schema_42.py +++ b/tests/components/recorder/db_schema_42.py @@ -171,7 +171,7 @@ def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: """Offload the datetime parsing to ciso8601.""" return lambda value: None if value is None else ciso8601.parse_datetime(value) @@ -179,7 +179,7 @@ class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): class NativeLargeBinary(LargeBinary): """A faster version of LargeBinary for engines that support python bytes natively.""" - def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: """No conversion needed for engines that support native bytes.""" return None diff --git a/tests/components/recorder/db_schema_43.py b/tests/components/recorder/db_schema_43.py new file mode 100644 index 00000000000..26d8ecd6856 --- /dev/null +++ b/tests/components/recorder/db_schema_43.py @@ -0,0 +1,889 @@ +"""Models for SQLAlchemy. + +This file contains the model definitions for schema version 43. +It is used to test the schema migration logic. +""" + +from __future__ import annotations + +from collections.abc import Callable +from datetime import datetime, timedelta +import logging +import time +from typing import Any, Self, cast + +import ciso8601 +from fnv_hash_fast import fnv1a_32 +from sqlalchemy import ( + CHAR, + JSON, + BigInteger, + Boolean, + ColumnElement, + DateTime, + Float, + ForeignKey, + Identity, + Index, + Integer, + LargeBinary, + SmallInteger, + String, + Text, + case, + type_coerce, +) +from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite +from sqlalchemy.engine.interfaces import Dialect +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.orm import DeclarativeBase, Mapped, aliased, mapped_column, relationship +from sqlalchemy.types import TypeDecorator + +from homeassistant.components.recorder.const import ( + ALL_DOMAIN_EXCLUDE_ATTRS, + SupportedDialect, +) +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticDataTimestamp, + StatisticMetaData, + bytes_to_ulid_or_none, + bytes_to_uuid_hex_or_none, + datetime_to_timestamp_or_none, + process_timestamp, + ulid_to_bytes_or_none, + uuid_hex_to_bytes_or_none, +) +from homeassistant.components.sensor import ATTR_STATE_CLASS +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_FRIENDLY_NAME, + ATTR_UNIT_OF_MEASUREMENT, + MATCH_ALL, + MAX_LENGTH_EVENT_EVENT_TYPE, + MAX_LENGTH_STATE_ENTITY_ID, + MAX_LENGTH_STATE_STATE, +) +from homeassistant.core import Context, Event, EventOrigin, EventStateChangedData, State +from homeassistant.helpers.json import JSON_DUMP, json_bytes, json_bytes_strip_null +import homeassistant.util.dt as dt_util +from homeassistant.util.json import ( + JSON_DECODE_EXCEPTIONS, + json_loads, + json_loads_object, +) + + +# SQLAlchemy Schema +class Base(DeclarativeBase): + """Base class for tables.""" + + +SCHEMA_VERSION = 43 + +_LOGGER = logging.getLogger(__name__) + +TABLE_EVENTS = "events" +TABLE_EVENT_DATA = "event_data" +TABLE_EVENT_TYPES = "event_types" +TABLE_STATES = "states" +TABLE_STATE_ATTRIBUTES = "state_attributes" +TABLE_STATES_META = "states_meta" +TABLE_RECORDER_RUNS = "recorder_runs" +TABLE_SCHEMA_CHANGES = "schema_changes" +TABLE_STATISTICS = "statistics" +TABLE_STATISTICS_META = "statistics_meta" +TABLE_STATISTICS_RUNS = "statistics_runs" +TABLE_STATISTICS_SHORT_TERM = "statistics_short_term" +TABLE_MIGRATION_CHANGES = "migration_changes" + +STATISTICS_TABLES = ("statistics", "statistics_short_term") + +MAX_STATE_ATTRS_BYTES = 16384 +MAX_EVENT_DATA_BYTES = 32768 + +PSQL_DIALECT = SupportedDialect.POSTGRESQL + +ALL_TABLES = [ + TABLE_STATES, + TABLE_STATE_ATTRIBUTES, + TABLE_EVENTS, + TABLE_EVENT_DATA, + TABLE_EVENT_TYPES, + TABLE_RECORDER_RUNS, + TABLE_SCHEMA_CHANGES, + TABLE_MIGRATION_CHANGES, + TABLE_STATES_META, + TABLE_STATISTICS, + TABLE_STATISTICS_META, + TABLE_STATISTICS_RUNS, + TABLE_STATISTICS_SHORT_TERM, +] + +TABLES_TO_CHECK = [ + TABLE_STATES, + TABLE_EVENTS, + TABLE_RECORDER_RUNS, + TABLE_SCHEMA_CHANGES, +] + +LAST_UPDATED_INDEX_TS = "ix_states_last_updated_ts" +METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts" +EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin" +STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" +LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id" +LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated_ts" +CONTEXT_ID_BIN_MAX_LENGTH = 16 + +MYSQL_COLLATE = "utf8mb4_unicode_ci" +MYSQL_DEFAULT_CHARSET = "utf8mb4" +MYSQL_ENGINE = "InnoDB" + +_DEFAULT_TABLE_ARGS = { + "mysql_default_charset": MYSQL_DEFAULT_CHARSET, + "mysql_collate": MYSQL_COLLATE, + "mysql_engine": MYSQL_ENGINE, + "mariadb_default_charset": MYSQL_DEFAULT_CHARSET, + "mariadb_collate": MYSQL_COLLATE, + "mariadb_engine": MYSQL_ENGINE, +} + +_MATCH_ALL_KEEP = { + ATTR_DEVICE_CLASS, + ATTR_STATE_CLASS, + ATTR_UNIT_OF_MEASUREMENT, + ATTR_FRIENDLY_NAME, +} + + +class UnusedDateTime(DateTime): + """An unused column type that behaves like a datetime.""" + + +class Unused(CHAR): + """An unused column type that behaves like a string.""" + + +@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] +@compiles(Unused, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] +def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: + """Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite.""" + return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite) + + +@compiles(Unused, "postgresql") # type: ignore[misc,no-untyped-call] +def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: + """Compile Unused as CHAR(1) on postgresql.""" + return "CHAR(1)" # Uses 1 byte + + +class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): + """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" + + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: + """Offload the datetime parsing to ciso8601.""" + return lambda value: None if value is None else ciso8601.parse_datetime(value) + + +class NativeLargeBinary(LargeBinary): + """A faster version of LargeBinary for engines that support python bytes natively.""" + + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: + """No conversion needed for engines that support native bytes.""" + return None + + +# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32 +# for sqlite and postgresql we use a bigint +UINT_32_TYPE = BigInteger().with_variant( + mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call] + "mysql", + "mariadb", +) +JSON_VARIANT_CAST = Text().with_variant( + postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call] + "postgresql", +) +JSONB_VARIANT_CAST = Text().with_variant( + postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call] + "postgresql", +) +DATETIME_TYPE = ( + DateTime(timezone=True) + .with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql", "mariadb") # type: ignore[no-untyped-call] + .with_variant(FAST_PYSQLITE_DATETIME(), "sqlite") # type: ignore[no-untyped-call] +) +DOUBLE_TYPE = ( + Float() + .with_variant(mysql.DOUBLE(asdecimal=False), "mysql", "mariadb") # type: ignore[no-untyped-call] + .with_variant(oracle.DOUBLE_PRECISION(), "oracle") + .with_variant(postgresql.DOUBLE_PRECISION(), "postgresql") +) +UNUSED_LEGACY_COLUMN = Unused(0) +UNUSED_LEGACY_DATETIME_COLUMN = UnusedDateTime(timezone=True) +UNUSED_LEGACY_INTEGER_COLUMN = SmallInteger() +DOUBLE_PRECISION_TYPE_SQL = "DOUBLE PRECISION" +CONTEXT_BINARY_TYPE = LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH).with_variant( + NativeLargeBinary(CONTEXT_ID_BIN_MAX_LENGTH), "mysql", "mariadb", "sqlite" +) + +TIMESTAMP_TYPE = DOUBLE_TYPE + + +class JSONLiteral(JSON): + """Teach SA how to literalize json.""" + + def literal_processor(self, dialect: Dialect) -> Callable[[Any], str]: + """Processor to convert a value to JSON.""" + + def process(value: Any) -> str: + """Dump json.""" + return JSON_DUMP(value) + + return process + + +EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote] + + +class Events(Base): + """Event history data.""" + + __table_args__ = ( + # Used for fetching events at a specific time + # see logbook + Index( + "ix_events_event_type_id_time_fired_ts", "event_type_id", "time_fired_ts" + ), + Index( + EVENTS_CONTEXT_ID_BIN_INDEX, + "context_id_bin", + mysql_length=CONTEXT_ID_BIN_MAX_LENGTH, + mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_EVENTS + event_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + event_type: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + event_data: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + origin: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + origin_idx: Mapped[int | None] = mapped_column(SmallInteger) + time_fired: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + time_fired_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) + context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + data_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("event_data.data_id"), index=True + ) + context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + event_type_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("event_types.event_type_id") + ) + event_data_rel: Mapped[EventData | None] = relationship("EventData") + event_type_rel: Mapped[EventTypes | None] = relationship("EventTypes") + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + @property + def _time_fired_isotime(self) -> str | None: + """Return time_fired as an isotime string.""" + date_time: datetime | None + if self.time_fired_ts is not None: + date_time = dt_util.utc_from_timestamp(self.time_fired_ts) + else: + date_time = process_timestamp(self.time_fired) + if date_time is None: + return None + return date_time.isoformat(sep=" ", timespec="seconds") + + @staticmethod + def from_event(event: Event) -> Events: + """Create an event database object from a native event.""" + context = event.context + return Events( + event_type=None, + event_data=None, + origin_idx=event.origin.idx, + time_fired=None, + time_fired_ts=event.time_fired_timestamp, + context_id=None, + context_id_bin=ulid_to_bytes_or_none(context.id), + context_user_id=None, + context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), + context_parent_id=None, + context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), + ) + + def to_native(self, validate_entity_id: bool = True) -> Event | None: + """Convert to a native HA Event.""" + context = Context( + id=bytes_to_ulid_or_none(self.context_id_bin), + user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin), + parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin), + ) + try: + return Event( + self.event_type or "", + json_loads_object(self.event_data) if self.event_data else {}, + EventOrigin(self.origin) + if self.origin + else EVENT_ORIGIN_ORDER[self.origin_idx or 0], + self.time_fired_ts or 0, + context=context, + ) + except JSON_DECODE_EXCEPTIONS: + # When json_loads fails + _LOGGER.exception("Error converting to event: %s", self) + return None + + +class EventData(Base): + """Event data history.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_EVENT_DATA + data_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True) + # Note that this is not named attributes to avoid confusion with the states table + shared_data: Mapped[str | None] = mapped_column( + Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb") + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + @staticmethod + def shared_data_bytes_from_event( + event: Event, dialect: SupportedDialect | None + ) -> bytes: + """Create shared_data from an event.""" + if dialect == SupportedDialect.POSTGRESQL: + bytes_result = json_bytes_strip_null(event.data) + bytes_result = json_bytes(event.data) + if len(bytes_result) > MAX_EVENT_DATA_BYTES: + _LOGGER.warning( + "Event data for %s exceed maximum size of %s bytes. " + "This can cause database performance issues; Event data " + "will not be stored", + event.event_type, + MAX_EVENT_DATA_BYTES, + ) + return b"{}" + return bytes_result + + @staticmethod + def hash_shared_data_bytes(shared_data_bytes: bytes) -> int: + """Return the hash of json encoded shared data.""" + return fnv1a_32(shared_data_bytes) + + def to_native(self) -> dict[str, Any]: + """Convert to an event data dictionary.""" + shared_data = self.shared_data + if shared_data is None: + return {} + try: + return cast(dict[str, Any], json_loads(shared_data)) + except JSON_DECODE_EXCEPTIONS: + _LOGGER.exception("Error converting row to event data: %s", self) + return {} + + +class EventTypes(Base): + """Event type history.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_EVENT_TYPES + event_type_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + event_type: Mapped[str | None] = mapped_column( + String(MAX_LENGTH_EVENT_EVENT_TYPE), index=True, unique=True + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + +class States(Base): + """State change history.""" + + __table_args__ = ( + # Used for fetching the state of entities at a specific time + # (get_states in history.py) + Index(METADATA_ID_LAST_UPDATED_INDEX_TS, "metadata_id", "last_updated_ts"), + Index( + STATES_CONTEXT_ID_BIN_INDEX, + "context_id_bin", + mysql_length=CONTEXT_ID_BIN_MAX_LENGTH, + mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_STATES + state_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + state: Mapped[str | None] = mapped_column(String(MAX_LENGTH_STATE_STATE)) + attributes: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + event_id: Mapped[int | None] = mapped_column(UNUSED_LEGACY_INTEGER_COLUMN) + last_changed: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + last_changed_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) + last_reported_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) + last_updated: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + last_updated_ts: Mapped[float | None] = mapped_column( + TIMESTAMP_TYPE, default=time.time, index=True + ) + old_state_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("states.state_id"), index=True + ) + attributes_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("state_attributes.attributes_id"), index=True + ) + context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + origin_idx: Mapped[int | None] = mapped_column( + SmallInteger + ) # 0 is local, 1 is remote + old_state: Mapped[States | None] = relationship("States", remote_side=[state_id]) + state_attributes: Mapped[StateAttributes | None] = relationship("StateAttributes") + context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + metadata_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("states_meta.metadata_id") + ) + states_meta_rel: Mapped[StatesMeta | None] = relationship("StatesMeta") + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + f"" + ) + + @property + def _last_updated_isotime(self) -> str | None: + """Return last_updated as an isotime string.""" + date_time: datetime | None + if self.last_updated_ts is not None: + date_time = dt_util.utc_from_timestamp(self.last_updated_ts) + else: + date_time = process_timestamp(self.last_updated) + if date_time is None: + return None + return date_time.isoformat(sep=" ", timespec="seconds") + + @staticmethod + def from_event(event: Event[EventStateChangedData]) -> States: + """Create object from a state_changed event.""" + state = event.data["new_state"] + # None state means the state was removed from the state machine + if state is None: + state_value = "" + last_updated_ts = event.time_fired_timestamp + last_changed_ts = None + last_reported_ts = None + else: + state_value = state.state + last_updated_ts = state.last_updated_timestamp + if state.last_updated == state.last_changed: + last_changed_ts = None + else: + last_changed_ts = state.last_changed_timestamp + if state.last_updated == state.last_reported: + last_reported_ts = None + else: + last_reported_ts = state.last_reported_timestamp + context = event.context + return States( + state=state_value, + entity_id=event.data["entity_id"], + attributes=None, + context_id=None, + context_id_bin=ulid_to_bytes_or_none(context.id), + context_user_id=None, + context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), + context_parent_id=None, + context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), + origin_idx=event.origin.idx, + last_updated=None, + last_changed=None, + last_updated_ts=last_updated_ts, + last_changed_ts=last_changed_ts, + last_reported_ts=last_reported_ts, + ) + + def to_native(self, validate_entity_id: bool = True) -> State | None: + """Convert to an HA state object.""" + context = Context( + id=bytes_to_ulid_or_none(self.context_id_bin), + user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin), + parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin), + ) + try: + attrs = json_loads_object(self.attributes) if self.attributes else {} + except JSON_DECODE_EXCEPTIONS: + # When json_loads fails + _LOGGER.exception("Error converting row to state: %s", self) + return None + last_updated = dt_util.utc_from_timestamp(self.last_updated_ts or 0) + if self.last_changed_ts is None or self.last_changed_ts == self.last_updated_ts: + last_changed = dt_util.utc_from_timestamp(self.last_updated_ts or 0) + else: + last_changed = dt_util.utc_from_timestamp(self.last_changed_ts or 0) + if ( + self.last_reported_ts is None + or self.last_reported_ts == self.last_updated_ts + ): + last_reported = dt_util.utc_from_timestamp(self.last_updated_ts or 0) + else: + last_reported = dt_util.utc_from_timestamp(self.last_reported_ts or 0) + return State( + self.entity_id or "", + self.state, # type: ignore[arg-type] + # Join the state_attributes table on attributes_id to get the attributes + # for newer states + attrs, + last_changed=last_changed, + last_reported=last_reported, + last_updated=last_updated, + context=context, + validate_entity_id=validate_entity_id, + ) + + +class StateAttributes(Base): + """State attribute change history.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_STATE_ATTRIBUTES + attributes_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True) + # Note that this is not named attributes to avoid confusion with the states table + shared_attrs: Mapped[str | None] = mapped_column( + Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb") + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + f"" + ) + + @staticmethod + def shared_attrs_bytes_from_event( + event: Event[EventStateChangedData], + dialect: SupportedDialect | None, + ) -> bytes: + """Create shared_attrs from a state_changed event.""" + # None state means the state was removed from the state machine + if (state := event.data["new_state"]) is None: + return b"{}" + if state_info := state.state_info: + unrecorded_attributes = state_info["unrecorded_attributes"] + exclude_attrs = { + *ALL_DOMAIN_EXCLUDE_ATTRS, + *unrecorded_attributes, + } + if MATCH_ALL in unrecorded_attributes: + # Don't exclude device class, state class, unit of measurement + # or friendly name when using the MATCH_ALL exclude constant + exclude_attrs.update(state.attributes) + exclude_attrs -= _MATCH_ALL_KEEP + else: + exclude_attrs = ALL_DOMAIN_EXCLUDE_ATTRS + encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes + bytes_result = encoder( + {k: v for k, v in state.attributes.items() if k not in exclude_attrs} + ) + if len(bytes_result) > MAX_STATE_ATTRS_BYTES: + _LOGGER.warning( + "State attributes for %s exceed maximum size of %s bytes. " + "This can cause database performance issues; Attributes " + "will not be stored", + state.entity_id, + MAX_STATE_ATTRS_BYTES, + ) + return b"{}" + return bytes_result + + @staticmethod + def hash_shared_attrs_bytes(shared_attrs_bytes: bytes) -> int: + """Return the hash of json encoded shared attributes.""" + return fnv1a_32(shared_attrs_bytes) + + def to_native(self) -> dict[str, Any]: + """Convert to a state attributes dictionary.""" + shared_attrs = self.shared_attrs + if shared_attrs is None: + return {} + try: + return cast(dict[str, Any], json_loads(shared_attrs)) + except JSON_DECODE_EXCEPTIONS: + # When json_loads fails + _LOGGER.exception("Error converting row to state attributes: %s", self) + return {} + + +class StatesMeta(Base): + """Metadata for states.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_STATES_META + metadata_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + entity_id: Mapped[str | None] = mapped_column( + String(MAX_LENGTH_STATE_ENTITY_ID), index=True, unique=True + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + +class StatisticsBase: + """Statistics base class.""" + + id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + created: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + created_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, default=time.time) + metadata_id: Mapped[int | None] = mapped_column( + Integer, + ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"), + ) + start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) + mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + min: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + max: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + last_reset_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) + state: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + sum: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + + duration: timedelta + + @classmethod + def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: + """Create object from a statistics with datatime objects.""" + return cls( # type: ignore[call-arg] + metadata_id=metadata_id, + created=None, + created_ts=time.time(), + start=None, + start_ts=dt_util.utc_to_timestamp(stats["start"]), + mean=stats.get("mean"), + min=stats.get("min"), + max=stats.get("max"), + last_reset=None, + last_reset_ts=datetime_to_timestamp_or_none(stats.get("last_reset")), + state=stats.get("state"), + sum=stats.get("sum"), + ) + + @classmethod + def from_stats_ts(cls, metadata_id: int, stats: StatisticDataTimestamp) -> Self: + """Create object from a statistics with timestamps.""" + return cls( # type: ignore[call-arg] + metadata_id=metadata_id, + created=None, + created_ts=time.time(), + start=None, + start_ts=stats["start_ts"], + mean=stats.get("mean"), + min=stats.get("min"), + max=stats.get("max"), + last_reset=None, + last_reset_ts=stats.get("last_reset_ts"), + state=stats.get("state"), + sum=stats.get("sum"), + ) + + +class Statistics(Base, StatisticsBase): + """Long term statistics.""" + + duration = timedelta(hours=1) + + __table_args__ = ( + # Used for fetching statistics for a certain entity at a specific time + Index( + "ix_statistics_statistic_id_start_ts", + "metadata_id", + "start_ts", + unique=True, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_STATISTICS + + +class StatisticsShortTerm(Base, StatisticsBase): + """Short term statistics.""" + + duration = timedelta(minutes=5) + + __table_args__ = ( + # Used for fetching statistics for a certain entity at a specific time + Index( + "ix_statistics_short_term_statistic_id_start_ts", + "metadata_id", + "start_ts", + unique=True, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_STATISTICS_SHORT_TERM + + +class StatisticsMeta(Base): + """Statistics meta data.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_STATISTICS_META + id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + statistic_id: Mapped[str | None] = mapped_column( + String(255), index=True, unique=True + ) + source: Mapped[str | None] = mapped_column(String(32)) + unit_of_measurement: Mapped[str | None] = mapped_column(String(255)) + has_mean: Mapped[bool | None] = mapped_column(Boolean) + has_sum: Mapped[bool | None] = mapped_column(Boolean) + name: Mapped[str | None] = mapped_column(String(255)) + + @staticmethod + def from_meta(meta: StatisticMetaData) -> StatisticsMeta: + """Create object from meta data.""" + return StatisticsMeta(**meta) + + +class RecorderRuns(Base): + """Representation of recorder run.""" + + __table_args__ = ( + Index("ix_recorder_runs_start_end", "start", "end"), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_RECORDER_RUNS + run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) + end: Mapped[datetime | None] = mapped_column(DATETIME_TYPE) + closed_incorrect: Mapped[bool] = mapped_column(Boolean, default=False) + created: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + end = ( + f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None + ) + return ( + f"" + ) + + def to_native(self, validate_entity_id: bool = True) -> Self: + """Return self, native format is this model.""" + return self + + +class MigrationChanges(Base): + """Representation of migration changes.""" + + __tablename__ = TABLE_MIGRATION_CHANGES + __table_args__ = (_DEFAULT_TABLE_ARGS,) + + migration_id: Mapped[str] = mapped_column(String(255), primary_key=True) + version: Mapped[int] = mapped_column(SmallInteger) + + +class SchemaChanges(Base): + """Representation of schema version changes.""" + + __tablename__ = TABLE_SCHEMA_CHANGES + __table_args__ = (_DEFAULT_TABLE_ARGS,) + + change_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + schema_version: Mapped[int | None] = mapped_column(Integer) + changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + +class StatisticsRuns(Base): + """Representation of statistics run.""" + + __tablename__ = TABLE_STATISTICS_RUNS + __table_args__ = (_DEFAULT_TABLE_ARGS,) + + run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + f"" + ) + + +EVENT_DATA_JSON = type_coerce( + EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) +) +OLD_FORMAT_EVENT_DATA_JSON = type_coerce( + Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) +) + +SHARED_ATTRS_JSON = type_coerce( + StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) +) +OLD_FORMAT_ATTRS_JSON = type_coerce( + States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) +) + +ENTITY_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["entity_id"] +OLD_ENTITY_ID_IN_EVENT: ColumnElement = OLD_FORMAT_EVENT_DATA_JSON["entity_id"] +DEVICE_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["device_id"] +OLD_STATE = aliased(States, name="old_state") + +SHARED_ATTR_OR_LEGACY_ATTRIBUTES = case( + (StateAttributes.shared_attrs.is_(None), States.attributes), + else_=StateAttributes.shared_attrs, +).label("attributes") +SHARED_DATA_OR_LEGACY_EVENT_DATA = case( + (EventData.shared_data.is_(None), Events.event_data), else_=EventData.shared_data +).label("event_data") diff --git a/tests/components/recorder/db_schema_9.py b/tests/components/recorder/db_schema_9.py new file mode 100644 index 00000000000..f9a8c2d2cad --- /dev/null +++ b/tests/components/recorder/db_schema_9.py @@ -0,0 +1,233 @@ +"""Models for SQLAlchemy. + +This file contains the model definitions for schema version 9, +used by Home Assistant Core 0.119.0. +It is used to test the schema migration logic. +""" + +import json +import logging + +from sqlalchemy import ( + Boolean, + Column, + DateTime, + ForeignKey, + Index, + Integer, + String, + Text, + distinct, +) +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship +from sqlalchemy.orm.session import Session + +from homeassistant.core import Context, Event, EventOrigin, State, split_entity_id +from homeassistant.helpers.json import JSONEncoder +import homeassistant.util.dt as dt_util + +# SQLAlchemy Schema +Base = declarative_base() + +SCHEMA_VERSION = 9 + +_LOGGER = logging.getLogger(__name__) + +DB_TIMEZONE = "+00:00" + +TABLE_EVENTS = "events" +TABLE_STATES = "states" +TABLE_RECORDER_RUNS = "recorder_runs" +TABLE_SCHEMA_CHANGES = "schema_changes" + +ALL_TABLES = [TABLE_EVENTS, TABLE_STATES, TABLE_RECORDER_RUNS, TABLE_SCHEMA_CHANGES] + + +class Events(Base): # type: ignore[valid-type,misc] + """Event history data.""" + + __tablename__ = TABLE_EVENTS + event_id = Column(Integer, primary_key=True) + event_type = Column(String(32)) + event_data = Column(Text) + origin = Column(String(32)) + time_fired = Column(DateTime(timezone=True), index=True) + created = Column(DateTime(timezone=True), default=dt_util.utcnow) + context_id = Column(String(36), index=True) + context_user_id = Column(String(36), index=True) + context_parent_id = Column(String(36), index=True) + + __table_args__ = ( + # Used for fetching events at a specific time + # see logbook + Index("ix_events_event_type_time_fired", "event_type", "time_fired"), + ) + + @staticmethod + def from_event(event, event_data=None): + """Create an event database object from a native event.""" + return Events( + event_type=event.event_type, + event_data=event_data or json.dumps(event.data, cls=JSONEncoder), + origin=str(event.origin.value), + time_fired=event.time_fired, + context_id=event.context.id, + context_user_id=event.context.user_id, + context_parent_id=event.context.parent_id, + ) + + def to_native(self, validate_entity_id=True): + """Convert to a natve HA Event.""" + context = Context( + id=self.context_id, + user_id=self.context_user_id, + parent_id=self.context_parent_id, + ) + try: + return Event( + self.event_type, + json.loads(self.event_data), + EventOrigin(self.origin), + process_timestamp(self.time_fired), + context=context, + ) + except ValueError: + # When json.loads fails + _LOGGER.exception("Error converting to event: %s", self) + return None + + +class States(Base): # type: ignore[valid-type,misc] + """State change history.""" + + __tablename__ = TABLE_STATES + state_id = Column(Integer, primary_key=True) + domain = Column(String(64)) + entity_id = Column(String(255)) + state = Column(String(255)) + attributes = Column(Text) + event_id = Column(Integer, ForeignKey("events.event_id"), index=True) + last_changed = Column(DateTime(timezone=True), default=dt_util.utcnow) + last_updated = Column(DateTime(timezone=True), default=dt_util.utcnow, index=True) + created = Column(DateTime(timezone=True), default=dt_util.utcnow) + old_state_id = Column(Integer, ForeignKey("states.state_id")) + event = relationship("Events", uselist=False) + old_state = relationship("States", remote_side=[state_id]) + + __table_args__ = ( + # Used for fetching the state of entities at a specific time + # (get_states in history.py) + Index("ix_states_entity_id_last_updated", "entity_id", "last_updated"), + ) + + @staticmethod + def from_event(event): + """Create object from a state_changed event.""" + entity_id = event.data["entity_id"] + state = event.data.get("new_state") + + dbstate = States(entity_id=entity_id) + + # State got deleted + if state is None: + dbstate.state = "" + dbstate.domain = split_entity_id(entity_id)[0] + dbstate.attributes = "{}" + dbstate.last_changed = event.time_fired + dbstate.last_updated = event.time_fired + else: + dbstate.domain = state.domain + dbstate.state = state.state + dbstate.attributes = json.dumps(dict(state.attributes), cls=JSONEncoder) + dbstate.last_changed = state.last_changed + dbstate.last_updated = state.last_updated + + return dbstate + + def to_native(self, validate_entity_id=True): + """Convert to an HA state object.""" + try: + return State( + self.entity_id, + self.state, + json.loads(self.attributes), + process_timestamp(self.last_changed), + process_timestamp(self.last_updated), + # Join the events table on event_id to get the context instead + # as it will always be there for state_changed events + context=Context(id=None), + validate_entity_id=validate_entity_id, + ) + except ValueError: + # When json.loads fails + _LOGGER.exception("Error converting row to state: %s", self) + return None + + +class RecorderRuns(Base): # type: ignore[valid-type,misc] + """Representation of recorder run.""" + + __tablename__ = TABLE_RECORDER_RUNS + run_id = Column(Integer, primary_key=True) + start = Column(DateTime(timezone=True), default=dt_util.utcnow) + end = Column(DateTime(timezone=True)) + closed_incorrect = Column(Boolean, default=False) + created = Column(DateTime(timezone=True), default=dt_util.utcnow) + + __table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),) + + def entity_ids(self, point_in_time=None): + """Return the entity ids that existed in this run. + + Specify point_in_time if you want to know which existed at that point + in time inside the run. + """ + session = Session.object_session(self) + + assert session is not None, "RecorderRuns need to be persisted" + + query = session.query(distinct(States.entity_id)).filter( + States.last_updated >= self.start + ) + + if point_in_time is not None: + query = query.filter(States.last_updated < point_in_time) + elif self.end is not None: + query = query.filter(States.last_updated < self.end) + + return [row[0] for row in query] + + def to_native(self, validate_entity_id=True): + """Return self, native format is this model.""" + return self + + +class SchemaChanges(Base): # type: ignore[valid-type,misc] + """Representation of schema version changes.""" + + __tablename__ = TABLE_SCHEMA_CHANGES + change_id = Column(Integer, primary_key=True) + schema_version = Column(Integer) + changed = Column(DateTime(timezone=True), default=dt_util.utcnow) + + +def process_timestamp(ts): + """Process a timestamp into datetime object.""" + if ts is None: + return None + if ts.tzinfo is None: + return ts.replace(tzinfo=dt_util.UTC) + + return dt_util.as_utc(ts) + + +def process_timestamp_to_utc_isoformat(ts): + """Process a timestamp into UTC isotime.""" + if ts is None: + return None + if ts.tzinfo == dt_util.UTC: + return ts.isoformat() + if ts.tzinfo is None: + return f"{ts.isoformat()}{DB_TIMEZONE}" + return ts.astimezone(dt_util.UTC).isoformat() diff --git a/tests/components/recorder/test_entity_registry.py b/tests/components/recorder/test_entity_registry.py index a74992525b1..ad438dcc525 100644 --- a/tests/components/recorder/test_entity_registry.py +++ b/tests/components/recorder/test_entity_registry.py @@ -40,7 +40,7 @@ def _count_entity_id_in_states_meta( @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_filters.py b/tests/components/recorder/test_filters.py index 13a2a325f1e..2841cabda1b 100644 --- a/tests/components/recorder/test_filters.py +++ b/tests/components/recorder/test_filters.py @@ -7,13 +7,8 @@ from homeassistant.components.recorder.filters import ( extract_include_exclude_filter_conf, merge_include_exclude_filters, ) -from homeassistant.helpers.entityfilter import ( - CONF_DOMAINS, - CONF_ENTITIES, - CONF_ENTITY_GLOBS, - CONF_EXCLUDE, - CONF_INCLUDE, -) +from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE +from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS EMPTY_INCLUDE_FILTER = { CONF_INCLUDE: { diff --git a/tests/components/recorder/test_filters_with_entityfilter.py b/tests/components/recorder/test_filters_with_entityfilter.py index 1ee127a9989..97839803619 100644 --- a/tests/components/recorder/test_filters_with_entityfilter.py +++ b/tests/components/recorder/test_filters_with_entityfilter.py @@ -13,14 +13,17 @@ from homeassistant.components.recorder.filters import ( sqlalchemy_filter_from_include_exclude_conf, ) from homeassistant.components.recorder.util import session_scope -from homeassistant.const import ATTR_ENTITY_ID, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entityfilter import ( +from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_DOMAINS, CONF_ENTITIES, - CONF_ENTITY_GLOBS, CONF_EXCLUDE, CONF_INCLUDE, + STATE_ON, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entityfilter import ( + CONF_ENTITY_GLOBS, convert_include_exclude_filter, ) diff --git a/tests/components/recorder/test_filters_with_entityfilter_schema_37.py b/tests/components/recorder/test_filters_with_entityfilter_schema_37.py index 9c66d2ee169..d3024df4ed6 100644 --- a/tests/components/recorder/test_filters_with_entityfilter_schema_37.py +++ b/tests/components/recorder/test_filters_with_entityfilter_schema_37.py @@ -1,12 +1,12 @@ """The tests for the recorder filter matching the EntityFilter component.""" +from collections.abc import AsyncGenerator import json from unittest.mock import patch import pytest from sqlalchemy import select from sqlalchemy.engine.row import Row -from typing_extensions import AsyncGenerator from homeassistant.components.recorder import Recorder, get_instance from homeassistant.components.recorder.db_schema import EventData, Events, States @@ -16,14 +16,17 @@ from homeassistant.components.recorder.filters import ( sqlalchemy_filter_from_include_exclude_conf, ) from homeassistant.components.recorder.util import session_scope -from homeassistant.const import ATTR_ENTITY_ID, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entityfilter import ( +from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_DOMAINS, CONF_ENTITIES, - CONF_ENTITY_GLOBS, CONF_EXCLUDE, CONF_INCLUDE, + STATE_ON, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entityfilter import ( + CONF_ENTITY_GLOBS, convert_include_exclude_filter, ) diff --git a/tests/components/recorder/test_history.py b/tests/components/recorder/test_history.py index af846353467..3923c72107a 100644 --- a/tests/components/recorder/test_history.py +++ b/tests/components/recorder/test_history.py @@ -47,7 +47,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -891,14 +891,17 @@ def record_states( return zero, four, states +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_state_changes_during_period_query_during_migration_to_schema_25( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) @@ -957,14 +960,17 @@ async def test_state_changes_during_period_query_during_migration_to_schema_25( assert state.attributes == {"name": "the light"} +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_get_states_query_during_migration_to_schema_25( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) @@ -1007,14 +1013,17 @@ async def test_get_states_query_during_migration_to_schema_25( assert state.attributes == {"name": "the light"} +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_get_states_query_during_migration_to_schema_25_multiple_entities( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) diff --git a/tests/components/recorder/test_history_db_schema_30.py b/tests/components/recorder/test_history_db_schema_30.py index e5e80b0cdb9..0e5f6cf7f79 100644 --- a/tests/components/recorder/test_history_db_schema_30.py +++ b/tests/components/recorder/test_history_db_schema_30.py @@ -33,7 +33,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_history_db_schema_32.py b/tests/components/recorder/test_history_db_schema_32.py index 8a3e6a58ab3..3ee6edd8e1e 100644 --- a/tests/components/recorder/test_history_db_schema_32.py +++ b/tests/components/recorder/test_history_db_schema_32.py @@ -33,7 +33,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_history_db_schema_42.py b/tests/components/recorder/test_history_db_schema_42.py index 083d4c0930e..5d9444e9cfe 100644 --- a/tests/components/recorder/test_history_db_schema_42.py +++ b/tests/components/recorder/test_history_db_schema_42.py @@ -42,7 +42,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -893,14 +893,17 @@ def record_states( return zero, four, states +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_state_changes_during_period_query_during_migration_to_schema_25( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) @@ -959,14 +962,17 @@ async def test_state_changes_during_period_query_during_migration_to_schema_25( assert state.attributes == {"name": "the light"} +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_get_states_query_during_migration_to_schema_25( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) @@ -1009,14 +1015,17 @@ async def test_get_states_query_during_migration_to_schema_25( assert state.attributes == {"name": "the light"} +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_get_states_query_during_migration_to_schema_25_multiple_entities( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 52947ce0c19..c8e58d58105 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -3,9 +3,10 @@ from __future__ import annotations import asyncio +from collections.abc import Generator from datetime import datetime, timedelta -from pathlib import Path import sqlite3 +import sys import threading from typing import Any, cast from unittest.mock import MagicMock, Mock, patch @@ -14,7 +15,6 @@ from freezegun.api import FrozenDateTimeFactory import pytest from sqlalchemy.exc import DatabaseError, OperationalError, SQLAlchemyError from sqlalchemy.pool import QueuePool -from typing_extensions import Generator from homeassistant.components import recorder from homeassistant.components.recorder import ( @@ -26,7 +26,6 @@ from homeassistant.components.recorder import ( CONF_DB_URL, CONFIG_SCHEMA, DOMAIN, - SQLITE_URL_PREFIX, Recorder, db_schema, get_instance, @@ -104,7 +103,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -140,19 +139,16 @@ def _default_recorder(hass): ) +@pytest.mark.parametrize("persistent_database", [True]) async def test_shutdown_before_startup_finishes( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, - recorder_db_url: str, - tmp_path: Path, ) -> None: - """Test shutdown before recorder starts is clean.""" - if recorder_db_url == "sqlite://": - # On-disk database because this test does not play nice with the - # MutexPool - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") + """Test shutdown before recorder starts is clean. + + On-disk database because this test does not play nice with the MutexPool. + """ config = { - recorder.CONF_DB_URL: recorder_db_url, recorder.CONF_COMMIT_INTERVAL: 1, } hass.set_state(CoreState.not_running) @@ -905,16 +901,19 @@ async def test_saving_event_with_oversized_data( hass.bus.async_fire("test_event", event_data) hass.bus.async_fire("test_event_too_big", massive_dict) await async_wait_recording_done(hass) - events = {} with session_scope(hass=hass, read_only=True) as session: - for _, data, event_type in ( - session.query(Events.event_id, EventData.shared_data, EventTypes.event_type) - .outerjoin(EventData, Events.data_id == EventData.data_id) - .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) - .where(EventTypes.event_type.in_(["test_event", "test_event_too_big"])) - ): - events[event_type] = data + events = { + event_type: data + for _, data, event_type in ( + session.query( + Events.event_id, EventData.shared_data, EventTypes.event_type + ) + .outerjoin(EventData, Events.data_id == EventData.data_id) + .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) + .where(EventTypes.event_type.in_(["test_event", "test_event_too_big"])) + ) + } assert "test_event_too_big" in caplog.text @@ -932,18 +931,19 @@ async def test_saving_event_invalid_context_ulid( event_data = {"test_attr": 5, "test_attr_10": "nice"} hass.bus.async_fire("test_event", event_data, context=Context(id="invalid")) await async_wait_recording_done(hass) - events = {} with session_scope(hass=hass, read_only=True) as session: - for _, data, event_type in ( - session.query(Events.event_id, EventData.shared_data, EventTypes.event_type) - .outerjoin(EventData, Events.data_id == EventData.data_id) - .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) - .where(EventTypes.event_type.in_(["test_event"])) - ): - events[event_type] = data - - assert "invalid" in caplog.text + events = { + event_type: data + for _, data, event_type in ( + session.query( + Events.event_id, EventData.shared_data, EventTypes.event_type + ) + .outerjoin(EventData, Events.data_id == EventData.data_id) + .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) + .where(EventTypes.event_type.in_(["test_event"])) + ) + } assert len(events) == 1 assert json_loads(events["test_event"]) == event_data @@ -1365,28 +1365,27 @@ async def test_statistics_runs_initiated( @pytest.mark.freeze_time("2022-09-13 09:00:00+02:00") +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("enable_statistics", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_compile_missing_statistics( - tmp_path: Path, freezer: FrozenDateTimeFactory + async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory ) -> None: """Test missing statistics are compiled on startup.""" now = dt_util.utcnow().replace(minute=0, second=0, microsecond=0) - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" def get_statistic_runs(hass: HomeAssistant) -> list: with session_scope(hass=hass, read_only=True) as session: return list(session.query(StatisticsRuns)) - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, wait_recorder=False) as instance, + ): await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) - instance = recorder.get_instance(hass) statistics_runs = await instance.async_add_executor_job( get_statistic_runs, hass ) @@ -1412,7 +1411,10 @@ async def test_compile_missing_statistics( stats_hourly.append(event) freezer.tick(timedelta(hours=1)) - async with async_test_home_assistant() as hass: + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, wait_recorder=False) as instance, + ): hass.bus.async_listen( EVENT_RECORDER_5MIN_STATISTICS_GENERATED, async_5min_stats_updated_listener ) @@ -1421,13 +1423,9 @@ async def test_compile_missing_statistics( async_hourly_stats_updated_listener, ) - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) - await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) - instance = recorder.get_instance(hass) statistics_runs = await instance.async_add_executor_job( get_statistic_runs, hass ) @@ -1627,24 +1625,24 @@ async def test_service_disable_states_not_recording( ) -async def test_service_disable_run_information_recorded(tmp_path: Path) -> None: +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_service_disable_run_information_recorded( + async_test_recorder: RecorderInstanceGenerator, +) -> None: """Test that runs are still recorded when recorder is disabled.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" def get_recorder_runs(hass: HomeAssistant) -> list: with session_scope(hass=hass, read_only=True) as session: return list(session.query(RecorderRuns)) - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_start() await async_wait_recording_done(hass) - instance = recorder.get_instance(hass) db_run_info = await instance.async_add_executor_job(get_recorder_runs, hass) assert len(db_run_info) == 1 assert db_run_info[0].start is not None @@ -1660,13 +1658,13 @@ async def test_service_disable_run_information_recorded(tmp_path: Path) -> None: await async_wait_recording_done(hass) await hass.async_stop() - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_start() await async_wait_recording_done(hass) - instance = recorder.get_instance(hass) db_run_info = await instance.async_add_executor_job(get_recorder_runs, hass) assert len(db_run_info) == 2 assert db_run_info[0].start is not None @@ -1681,23 +1679,17 @@ class CannotSerializeMe: """A class that the JSONEncoder cannot serialize.""" +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("recorder_config", [{CONF_COMMIT_INTERVAL: 0}]) async def test_database_corruption_while_running( - hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + recorder_mock: Recorder, + recorder_db_url: str, + caplog: pytest.LogCaptureFixture, ) -> None: """Test we can recover from sqlite3 db corruption.""" - - def _create_tmpdir_for_test_db() -> Path: - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - return test_dir.joinpath("test.db") - - test_db_file = await hass.async_add_executor_job(_create_tmpdir_for_test_db) - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl, CONF_COMMIT_INTERVAL: 0}} - ) await hass.async_block_till_done() caplog.clear() @@ -1707,7 +1699,9 @@ async def test_database_corruption_while_running( hass.states.async_set("test.lost", "on", {}) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError( + "database disk image is malformed" + ) await async_wait_recording_done(hass) with patch.object( @@ -1716,6 +1710,7 @@ async def test_database_corruption_while_running( side_effect=OperationalError("statement", {}, []), ): await async_wait_recording_done(hass) + test_db_file = recorder_db_url.removeprefix("sqlite:///") await hass.async_add_executor_job(corrupt_db_file, test_db_file) await async_wait_recording_done(hass) @@ -1809,23 +1804,21 @@ async def test_entity_id_filter( assert len(db_events) == idx + 1, data +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) async def test_database_lock_and_unlock( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, - recorder_db_url: str, - tmp_path: Path, ) -> None: - """Test writing events during lock getting written after unlocking.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # Database locking is only used for SQLite - return + """Test writing events during lock getting written after unlocking. - if recorder_db_url == "sqlite://": - # Use file DB, in memory DB cannot do write locks. - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") + This test is specific for SQLite: Locking is not implemented for other engines. + + Use file DB, in memory DB cannot do write locks. + """ config = { recorder.CONF_COMMIT_INTERVAL: 0, - recorder.CONF_DB_URL: recorder_db_url, } await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() @@ -1863,26 +1856,23 @@ async def test_database_lock_and_unlock( assert len(db_events) == 1 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) async def test_database_lock_and_overflow( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, - recorder_db_url: str, - tmp_path: Path, caplog: pytest.LogCaptureFixture, issue_registry: ir.IssueRegistry, ) -> None: - """Test writing events during lock leading to overflow the queue causes the database to unlock.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # Database locking is only used for SQLite - return pytest.skip("Database locking is only used for SQLite") + """Test writing events during lock leading to overflow the queue causes the database to unlock. - # Use file DB, in memory DB cannot do write locks. - if recorder_db_url == "sqlite://": - # Use file DB, in memory DB cannot do write locks. - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") + This test is specific for SQLite: Locking is not implemented for other engines. + + Use file DB, in memory DB cannot do write locks. + """ config = { recorder.CONF_COMMIT_INTERVAL: 0, - recorder.CONF_DB_URL: recorder_db_url, } def _get_db_events(): @@ -1896,7 +1886,9 @@ async def test_database_lock_and_overflow( with ( patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), patch.object(recorder.core, "DB_LOCK_QUEUE_CHECK_TIMEOUT", 0.01), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), + patch.object( + recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize + ), ): await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() @@ -1929,25 +1921,23 @@ async def test_database_lock_and_overflow( assert start_time.count(":") == 2 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) async def test_database_lock_and_overflow_checks_available_memory( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, - recorder_db_url: str, - tmp_path: Path, caplog: pytest.LogCaptureFixture, issue_registry: ir.IssueRegistry, ) -> None: - """Test writing events during lock leading to overflow the queue causes the database to unlock.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - return pytest.skip("Database locking is only used for SQLite") + """Test writing events during lock leading to overflow the queue causes the database to unlock. - # Use file DB, in memory DB cannot do write locks. - if recorder_db_url == "sqlite://": - # Use file DB, in memory DB cannot do write locks. - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") + This test is specific for SQLite: Locking is not implemented for other engines. + + Use file DB, in memory DB cannot do write locks. + """ config = { recorder.CONF_COMMIT_INTERVAL: 0, - recorder.CONF_DB_URL: recorder_db_url, } def _get_db_events(): @@ -1958,26 +1948,43 @@ async def test_database_lock_and_overflow_checks_available_memory( ) ) - await async_setup_recorder_instance(hass, config) - await hass.async_block_till_done() + with patch( + "homeassistant.components.recorder.core.QUEUE_CHECK_INTERVAL", + timedelta(seconds=1), + ): + await async_setup_recorder_instance(hass, config) + await hass.async_block_till_done() event_type = "EVENT_TEST" event_types = (event_type,) await async_wait_recording_done(hass) + min_available_memory = 256 * 1024**2 + + out_of_ram = False + + def _get_available_memory(*args: Any, **kwargs: Any) -> int: + nonlocal out_of_ram + return min_available_memory / 2 if out_of_ram else min_available_memory with ( patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 1), + patch.object( + recorder.core, + "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", + min_available_memory, + ), patch.object(recorder.core, "DB_LOCK_QUEUE_CHECK_TIMEOUT", 0.01), patch.object( recorder.core.Recorder, "_available_memory", - return_value=recorder.core.ESTIMATED_QUEUE_ITEM_SIZE * 4, + side_effect=_get_available_memory, ), ): instance = get_instance(hass) - await instance.lock_database() + assert await instance.lock_database() + db_events = await instance.async_add_executor_job(_get_db_events) + assert len(db_events) == 0 # Record up to the extended limit (which takes into account the available memory) for _ in range(2): event_data = {"test_attr": 5, "test_attr_10": "nice"} @@ -1994,6 +2001,7 @@ async def test_database_lock_and_overflow_checks_available_memory( assert "Database queue backlog reached more than" not in caplog.text + out_of_ram = True # Record beyond the extended limit (which takes into account the available memory) for _ in range(20): event_data = {"test_attr": 5, "test_attr_10": "nice"} @@ -2019,13 +2027,15 @@ async def test_database_lock_and_overflow_checks_available_memory( assert start_time.count(":") == 2 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_database_lock_timeout( hass: HomeAssistant, setup_recorder: None, recorder_db_url: str ) -> None: - """Test locking database timeout when recorder stopped.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite: Locking is not implemented for other engines - return + """Test locking database timeout when recorder stopped. + + This test is specific for SQLite: Locking is not implemented for other engines. + """ hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) @@ -2093,16 +2103,18 @@ async def test_database_connection_keep_alive( assert "Sending keepalive" in caplog.text +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_database_connection_keep_alive_disabled_on_sqlite( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: - """Test we do not do keep alive for sqlite.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite, keepalive runs on other engines - return + """Test we do not do keep alive for sqlite. + + This test is specific for SQLite, keepalive runs on other engines. + """ instance = await async_setup_recorder_instance(hass) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -2310,7 +2322,7 @@ async def test_connect_args_priority(hass: HomeAssistant, config_url) -> None: __bases__ = [] _has_events = False - def __init__(*args, **kwargs): ... + def __init__(self, *args: Any, **kwargs: Any) -> None: ... @property def is_async(self): @@ -2557,7 +2569,13 @@ async def test_clean_shutdown_when_recorder_thread_raises_during_validate_db_sch assert instance.engine is None -async def test_clean_shutdown_when_schema_migration_fails(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("func_to_patch", "expected_setup_result"), + [("migrate_schema_non_live", False), ("migrate_schema_live", False)], +) +async def test_clean_shutdown_when_schema_migration_fails( + hass: HomeAssistant, func_to_patch: str, expected_setup_result: bool +) -> None: """Test we still shutdown cleanly when schema migration fails.""" with ( patch.object( @@ -2568,13 +2586,13 @@ async def test_clean_shutdown_when_schema_migration_fails(hass: HomeAssistant) - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch.object( migration, - "migrate_schema", + func_to_patch, side_effect=Exception, ), ): if recorder.DOMAIN not in hass.data: recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( + setup_result = await async_setup_component( hass, recorder.DOMAIN, { @@ -2585,6 +2603,7 @@ async def test_clean_shutdown_when_schema_migration_fails(hass: HomeAssistant) - } }, ) + assert setup_result == expected_setup_result await hass.async_block_till_done() instance = recorder.get_instance(hass) @@ -2642,7 +2661,6 @@ async def test_commit_before_commits_pending_writes( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, recorder_db_url: str, - tmp_path: Path, ) -> None: """Test commit_before with a non-zero commit interval. @@ -2712,3 +2730,20 @@ async def test_all_tables_use_default_table_args(hass: HomeAssistant) -> None: """Test that all tables use the default table args.""" for table in db_schema.Base.metadata.tables.values(): assert table.kwargs.items() >= db_schema._DEFAULT_TABLE_ARGS.items() + + +async def test_empty_entity_id( + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the recorder can handle an empty entity_id.""" + await async_setup_recorder_instance( + hass, + { + "exclude": {"domains": "hidden_domain"}, + }, + ) + hass.bus.async_fire("hello", {"entity_id": ""}) + await async_wait_recording_done(hass) + assert "Invalid entity ID" not in caplog.text diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index a21f4771616..b56dfe3e189 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -4,11 +4,12 @@ import datetime import importlib import sqlite3 import sys -import threading -from unittest.mock import Mock, PropertyMock, call, patch +from unittest.mock import ANY, Mock, PropertyMock, call, patch import pytest -from sqlalchemy import create_engine, text +from sqlalchemy import create_engine, inspect, text +from sqlalchemy.engine import Engine +from sqlalchemy.engine.interfaces import ReflectedForeignKeyConstraint from sqlalchemy.exc import ( DatabaseError, InternalError, @@ -16,14 +17,14 @@ from sqlalchemy.exc import ( ProgrammingError, SQLAlchemyError, ) -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, scoped_session, sessionmaker from sqlalchemy.pool import StaticPool -from homeassistant.bootstrap import async_setup_component from homeassistant.components import persistent_notification as pn, recorder from homeassistant.components.recorder import db_schema, migration from homeassistant.components.recorder.db_schema import ( SCHEMA_VERSION, + Events, RecorderRuns, States, ) @@ -33,8 +34,17 @@ from homeassistant.helpers import recorder as recorder_helper import homeassistant.util.dt as dt_util from .common import async_wait_recording_done, create_engine_test +from .conftest import InstrumentedMigration from tests.common import async_fire_time_changed +from tests.typing import RecorderInstanceGenerator + + +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" def _get_native_states(hass, entity_id): @@ -48,12 +58,13 @@ def _get_native_states(hass, entity_id): return states -async def test_schema_update_calls(recorder_db_url: str, hass: HomeAssistant) -> None: +async def test_schema_update_calls( + hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator +) -> None: """Test that schema migrations occur in correct order.""" assert recorder.util.async_migration_in_progress(hass) is False with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, @@ -62,26 +73,48 @@ async def test_schema_update_calls(recorder_db_url: str, hass: HomeAssistant) -> "homeassistant.components.recorder.migration._apply_update", wraps=migration._apply_update, ) as update, + patch( + "homeassistant.components.recorder.migration._migrate_schema", + wraps=migration._migrate_schema, + ) as migrate_schema, ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} - ) + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) assert recorder.util.async_migration_in_progress(hass) is False instance = recorder.get_instance(hass) engine = instance.engine session_maker = instance.get_session - update.assert_has_calls( - [ - call(instance, hass, engine, session_maker, version + 1, 0) - for version in range(db_schema.SCHEMA_VERSION) - ] - ) + assert update.mock_calls == [ + call(instance, hass, engine, session_maker, version + 1, 0) + for version in range(db_schema.SCHEMA_VERSION) + ] + assert migrate_schema.mock_calls == [ + call( + instance, + hass, + engine, + session_maker, + migration.SchemaValidationStatus(0, True, set(), 0), + 42, + ), + call( + instance, + hass, + engine, + session_maker, + migration.SchemaValidationStatus(42, True, set(), 0), + db_schema.SCHEMA_VERSION, + ), + ] -async def test_migration_in_progress(recorder_db_url: str, hass: HomeAssistant) -> None: +async def test_migration_in_progress( + hass: HomeAssistant, + recorder_db_url: str, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, +) -> None: """Test that we can check for migration in progress.""" if recorder_db_url.startswith("mysql://"): # The database drop at the end of this test currently hangs on MySQL @@ -94,38 +127,55 @@ async def test_migration_in_progress(recorder_db_url: str, hass: HomeAssistant) assert recorder.util.async_migration_in_progress(hass) is False with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} + await async_setup_recorder_instance( + hass, wait_recorder=False, wait_recorder_setup=False ) - await recorder.get_instance(hass).async_migration_event.wait() + await hass.async_add_executor_job(instrument_migration.migration_started.wait) assert recorder.util.async_migration_in_progress(hass) is True + + # Let migration finish + instrument_migration.migration_stall.set() await async_wait_recording_done(hass) assert recorder.util.async_migration_in_progress(hass) is False assert recorder.get_instance(hass).schema_version == SCHEMA_VERSION +@pytest.mark.parametrize( + ( + "func_to_patch", + "expected_setup_result", + "expected_pn_create", + "expected_pn_dismiss", + ), + [ + ("migrate_schema_non_live", False, 1, 0), + ("migrate_schema_live", True, 2, 1), + ], +) async def test_database_migration_failed( - recorder_db_url: str, hass: HomeAssistant + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + func_to_patch: str, + expected_setup_result: bool, + expected_pn_create: int, + expected_pn_dismiss: int, ) -> None: """Test we notify if the migration fails.""" assert recorder.util.async_migration_in_progress(hass) is False with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), patch( - "homeassistant.components.recorder.migration._apply_update", + f"homeassistant.components.recorder.migration.{func_to_patch}", side_effect=ValueError, ), patch( @@ -137,9 +187,8 @@ async def test_database_migration_failed( side_effect=pn.dismiss, ) as mock_dismiss, ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} + await async_setup_recorder_instance( + hass, wait_recorder=False, expected_setup_result=expected_setup_result ) hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) @@ -148,67 +197,220 @@ async def test_database_migration_failed( await hass.async_block_till_done() assert recorder.util.async_migration_in_progress(hass) is False - assert len(mock_create.mock_calls) == 2 - assert len(mock_dismiss.mock_calls) == 1 + assert len(mock_create.mock_calls) == expected_pn_create + assert len(mock_dismiss.mock_calls) == expected_pn_dismiss -async def test_database_migration_encounters_corruption( - recorder_db_url: str, hass: HomeAssistant +@pytest.mark.parametrize( + ( + "patch_version", + "func_to_patch", + "expected_setup_result", + "expected_pn_create", + "expected_pn_dismiss", + ), + [ + # Test error handling in _update_states_table_with_foreign_key_options + (11, "homeassistant.components.recorder.migration.DropConstraint", False, 1, 0), + # Test error handling in _modify_columns + (12, "sqlalchemy.engine.base.Connection.execute", False, 1, 0), + # Test error handling in _drop_foreign_key_constraints + (44, "homeassistant.components.recorder.migration.DropConstraint", False, 2, 1), + ], +) +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +async def test_database_migration_failed_non_sqlite( + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, + patch_version: int, + func_to_patch: str, + expected_setup_result: bool, + expected_pn_create: int, + expected_pn_dismiss: int, ) -> None: - """Test we move away the database if its corrupt.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite, wiping the database on error only happens - # with SQLite. - return + """Test we notify if the migration fails.""" + assert recorder.util.async_migration_in_progress(hass) is False + instrument_migration.stall_on_schema_version = patch_version + + with ( + patch( + "homeassistant.components.recorder.core.create_engine", + new=create_engine_test, + ), + patch( + "homeassistant.components.persistent_notification.create", + side_effect=pn.create, + ) as mock_create, + patch( + "homeassistant.components.persistent_notification.dismiss", + side_effect=pn.dismiss, + ) as mock_dismiss, + ): + await async_setup_recorder_instance( + hass, + wait_recorder=False, + wait_recorder_setup=False, + expected_setup_result=expected_setup_result, + ) + # Wait for migration to reach the schema version we want to break + await hass.async_add_executor_job( + instrument_migration.apply_update_stalled.wait + ) + + # Make it fail + with patch( + func_to_patch, + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ): + instrument_migration.migration_stall.set() + hass.states.async_set("my.entity", "on", {}) + hass.states.async_set("my.entity", "off", {}) + await hass.async_block_till_done() + await hass.async_add_executor_job(recorder.get_instance(hass).join) + await hass.async_block_till_done() + + assert instrument_migration.apply_update_version == patch_version + assert recorder.util.async_migration_in_progress(hass) is False + assert len(mock_create.mock_calls) == expected_pn_create + assert len(mock_dismiss.mock_calls) == expected_pn_dismiss + + +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +async def test_live_database_migration_encounters_corruption( + hass: HomeAssistant, + recorder_db_url: str, + async_setup_recorder_instance: RecorderInstanceGenerator, +) -> None: + """Test we move away the database if its corrupt. + + This test is specific for SQLite, wiping the database on error only happens + with SQLite. + """ assert recorder.util.async_migration_in_progress(hass) is False sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError( + "database disk image is malformed" + ) with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.migration._schema_is_current", side_effect=[False], ), patch( - "homeassistant.components.recorder.migration.migrate_schema", + "homeassistant.components.recorder.migration.migrate_schema_live", side_effect=sqlite3_exception, ), patch( "homeassistant.components.recorder.core.move_away_broken_database" ) as move_away, patch( - "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", - ), + "homeassistant.components.recorder.core.Recorder._setup_run", + autospec=True, + wraps=recorder.Recorder._setup_run, + ) as setup_run, ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} - ) + await async_setup_recorder_instance(hass) hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) await async_wait_recording_done(hass) assert recorder.util.async_migration_in_progress(hass) is False - assert move_away.called + move_away.assert_called_once() + setup_run.assert_called_once() -async def test_database_migration_encounters_corruption_not_sqlite( - recorder_db_url: str, hass: HomeAssistant +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +async def test_non_live_database_migration_encounters_corruption( + hass: HomeAssistant, + recorder_db_url: str, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: - """Test we fail on database error when we cannot recover.""" + """Test we move away the database if its corrupt. + + This test is specific for SQLite, wiping the database on error only happens + with SQLite. + """ + assert recorder.util.async_migration_in_progress(hass) is False + sqlite3_exception = DatabaseError("statement", {}, []) + sqlite3_exception.__cause__ = sqlite3.DatabaseError( + "database disk image is malformed" + ) + with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.migration._schema_is_current", side_effect=[False], ), patch( - "homeassistant.components.recorder.migration.migrate_schema", + "homeassistant.components.recorder.migration.migrate_schema_live", + ) as migrate_schema_live, + patch( + "homeassistant.components.recorder.migration.migrate_schema_non_live", + side_effect=sqlite3_exception, + ), + patch( + "homeassistant.components.recorder.core.move_away_broken_database" + ) as move_away, + patch( + "homeassistant.components.recorder.core.Recorder._setup_run", + autospec=True, + wraps=recorder.Recorder._setup_run, + ) as setup_run, + ): + await async_setup_recorder_instance(hass) + hass.states.async_set("my.entity", "on", {}) + hass.states.async_set("my.entity", "off", {}) + await async_wait_recording_done(hass) + + assert recorder.util.async_migration_in_progress(hass) is False + move_away.assert_called_once() + migrate_schema_live.assert_not_called() + setup_run.assert_called_once() + + +@pytest.mark.parametrize( + ( + "live_migration", + "func_to_patch", + "expected_setup_result", + "expected_pn_create", + "expected_pn_dismiss", + ), + [ + (True, "migrate_schema_live", True, 2, 1), + (False, "migrate_schema_non_live", False, 1, 0), + ], +) +async def test_database_migration_encounters_corruption_not_sqlite( + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + live_migration: bool, + func_to_patch: str, + expected_setup_result: bool, + expected_pn_create: int, + expected_pn_dismiss: int, +) -> None: + """Test we fail on database error when we cannot recover.""" + assert recorder.util.async_migration_in_progress(hass) is False + + with ( + patch( + "homeassistant.components.recorder.migration._schema_is_current", + side_effect=[False], + ), + patch( + f"homeassistant.components.recorder.migration.{func_to_patch}", side_effect=DatabaseError("statement", {}, []), ), patch( @@ -222,10 +424,13 @@ async def test_database_migration_encounters_corruption_not_sqlite( "homeassistant.components.persistent_notification.dismiss", side_effect=pn.dismiss, ) as mock_dismiss, + patch( + "homeassistant.components.recorder.core.migration.live_migration", + return_value=live_migration, + ), ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} + await async_setup_recorder_instance( + hass, wait_recorder=False, expected_setup_result=expected_setup_result ) hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) @@ -235,39 +440,39 @@ async def test_database_migration_encounters_corruption_not_sqlite( assert recorder.util.async_migration_in_progress(hass) is False assert not move_away.called - assert len(mock_create.mock_calls) == 2 - assert len(mock_dismiss.mock_calls) == 1 + assert len(mock_create.mock_calls) == expected_pn_create + assert len(mock_dismiss.mock_calls) == expected_pn_dismiss async def test_events_during_migration_are_queued( - recorder_db_url: str, hass: HomeAssistant + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, ) -> None: """Test that events during migration are queued.""" assert recorder.util.async_migration_in_progress(hass) is False with ( - patch( - "homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", - True, - ), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, - "recorder", - {"recorder": {"db_url": recorder_db_url, "commit_interval": 0}}, + await async_setup_recorder_instance( + hass, {"commit_interval": 0}, wait_recorder=False, wait_recorder_setup=False ) + await hass.async_add_executor_job(instrument_migration.migration_started.wait) + assert recorder.util.async_migration_in_progress(hass) is True hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) + + # Let migration finish + instrument_migration.migration_stall.set() await recorder.get_instance(hass).async_recorder_ready.wait() await async_wait_recording_done(hass) @@ -279,27 +484,29 @@ async def test_events_during_migration_are_queued( async def test_events_during_migration_queue_exhausted( - recorder_db_url: str, hass: HomeAssistant + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, ) -> None: """Test that events during migration takes so long the queue is exhausted.""" assert recorder.util.async_migration_in_progress(hass) is False with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), + patch.object( + recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize + ), ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, - "recorder", - {"recorder": {"db_url": recorder_db_url, "commit_interval": 0}}, + await async_setup_recorder_instance( + hass, {"commit_interval": 0}, wait_recorder=False, wait_recorder_setup=False ) + await hass.async_add_executor_job(instrument_migration.migration_started.wait) + assert recorder.util.async_migration_in_progress(hass) is True hass.states.async_set("my.entity", "on", {}) await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) @@ -307,6 +514,9 @@ async def test_events_during_migration_queue_exhausted( async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() hass.states.async_set("my.entity", "off", {}) + + # Let migration finish + instrument_migration.migration_stall.set() await recorder.get_instance(hass).async_recorder_ready.wait() await async_wait_recording_done(hass) @@ -325,10 +535,23 @@ async def test_events_during_migration_queue_exhausted( @pytest.mark.parametrize( ("start_version", "live"), - [(0, True), (16, True), (18, True), (22, True), (25, True)], + [ + (0, False), + (9, False), + (16, False), + (18, False), + (22, False), + (25, False), + (43, True), + ], ) async def test_schema_migrate( - recorder_db_url: str, hass: HomeAssistant, start_version, live + hass: HomeAssistant, + recorder_db_url: str, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, + start_version, + live, ) -> None: """Test the full schema migration logic. @@ -337,11 +560,6 @@ async def test_schema_migrate( inspection could quickly become quite cumbersome. """ - migration_done = threading.Event() - migration_stall = threading.Event() - migration_version = None - real_migrate_schema = recorder.migration.migrate_schema - real_apply_update = recorder.migration._apply_update real_create_index = recorder.migration._create_index create_calls = 0 @@ -368,33 +586,6 @@ async def test_schema_migrate( start=self.recorder_runs_manager.recording_start, created=dt_util.utcnow() ) - def _instrument_migrate_schema(*args): - """Control migration progress and check results.""" - nonlocal migration_done - nonlocal migration_version - try: - real_migrate_schema(*args) - except Exception: - migration_done.set() - raise - - # Check and report the outcome of the migration; if migration fails - # the recorder will silently create a new database. - with session_scope(hass=hass, read_only=True) as session: - res = ( - session.query(db_schema.SchemaChanges) - .order_by(db_schema.SchemaChanges.change_id.desc()) - .first() - ) - migration_version = res.schema_version - migration_done.set() - - def _instrument_apply_update(*args): - """Control migration progress.""" - nonlocal migration_stall - migration_stall.wait() - real_apply_update(*args) - def _sometimes_failing_create_index(*args): """Make the first index create raise a retryable error to ensure we retry.""" if recorder_db_url.startswith("mysql://"): @@ -407,7 +598,6 @@ async def test_schema_migrate( real_create_index(*args) with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=_create_engine_test, @@ -417,22 +607,11 @@ async def test_schema_migrate( side_effect=_mock_setup_run, autospec=True, ) as setup_run, - patch( - "homeassistant.components.recorder.migration.migrate_schema", - wraps=_instrument_migrate_schema, - ), - patch( - "homeassistant.components.recorder.migration._apply_update", - wraps=_instrument_apply_update, - ) as apply_update_mock, patch("homeassistant.components.recorder.util.time.sleep"), patch( "homeassistant.components.recorder.migration._create_index", wraps=_sometimes_failing_create_index, ), - patch( - "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", - ), patch( "homeassistant.components.recorder.Recorder._process_state_changed_event_into_session", ), @@ -443,24 +622,23 @@ async def test_schema_migrate( "homeassistant.components.recorder.Recorder._pre_process_startup_events", ), ): - recorder_helper.async_initialize_recorder(hass) - hass.async_create_task( - async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} - ) + await async_setup_recorder_instance( + hass, wait_recorder=False, wait_recorder_setup=live ) + await hass.async_add_executor_job(instrument_migration.migration_started.wait) + assert recorder.util.async_migration_in_progress(hass) is True await recorder_helper.async_wait_recorder(hass) assert recorder.util.async_migration_in_progress(hass) is True assert recorder.util.async_migration_is_live(hass) == live - migration_stall.set() + instrument_migration.migration_stall.set() await hass.async_block_till_done() - await hass.async_add_executor_job(migration_done.wait) + await hass.async_add_executor_job(instrument_migration.live_migration_done.wait) await async_wait_recording_done(hass) - assert migration_version == db_schema.SCHEMA_VERSION + assert instrument_migration.migration_version == db_schema.SCHEMA_VERSION assert setup_run.called assert recorder.util.async_migration_in_progress(hass) is not True - assert apply_update_mock.called + assert instrument_migration.apply_update_mock.called def test_invalid_update(hass: HomeAssistant) -> None: @@ -633,3 +811,378 @@ def test_raise_if_exception_missing_empty_cause_str() -> None: with pytest.raises(ProgrammingError): migration.raise_if_exception_missing_str(programming_exc, ["not present"]) + + +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None: + """Test that we can rebuild the states table in SQLite. + + This test is specific for SQLite. + """ + engine = create_engine(recorder_db_url) + session_maker = scoped_session(sessionmaker(bind=engine, future=True)) + with session_scope(session=session_maker()) as session: + db_schema.Base.metadata.create_all(engine) + with session_scope(session=session_maker()) as session: + session.add(States(state="on")) + session.commit() + + assert migration.rebuild_sqlite_table(session_maker, engine, States) is True + + with session_scope(session=session_maker()) as session: + assert session.query(States).count() == 1 + assert session.query(States).first().state == "on" + + engine.dispose() + + +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_rebuild_sqlite_states_table_missing_fails( + recorder_db_url: str, caplog: pytest.LogCaptureFixture +) -> None: + """Test handling missing states table when attempting rebuild. + + This test is specific for SQLite. + """ + engine = create_engine(recorder_db_url) + session_maker = scoped_session(sessionmaker(bind=engine, future=True)) + with session_scope(session=session_maker()) as session: + db_schema.Base.metadata.create_all(engine) + + with session_scope(session=session_maker()) as session: + session.add(Events(event_type="state_changed", event_data="{}")) + session.connection().execute(text("DROP TABLE states")) + session.commit() + + assert migration.rebuild_sqlite_table(session_maker, engine, States) is False + assert "Error recreating SQLite table states" in caplog.text + caplog.clear() + + # Now rebuild the events table to make sure the database did not + # get corrupted + assert migration.rebuild_sqlite_table(session_maker, engine, Events) is True + + with session_scope(session=session_maker()) as session: + assert session.query(Events).count() == 1 + assert session.query(Events).first().event_type == "state_changed" + assert session.query(Events).first().event_data == "{}" + + engine.dispose() + + +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_rebuild_sqlite_states_table_extra_columns( + recorder_db_url: str, caplog: pytest.LogCaptureFixture +) -> None: + """Test handling extra columns when rebuilding the states table. + + This test is specific for SQLite. + """ + engine = create_engine(recorder_db_url) + session_maker = scoped_session(sessionmaker(bind=engine, future=True)) + with session_scope(session=session_maker()) as session: + db_schema.Base.metadata.create_all(engine) + with session_scope(session=session_maker()) as session: + session.add(States(state="on")) + session.commit() + session.connection().execute( + text("ALTER TABLE states ADD COLUMN extra_column TEXT") + ) + + assert migration.rebuild_sqlite_table(session_maker, engine, States) is True + assert "Error recreating SQLite table states" not in caplog.text + + with session_scope(session=session_maker()) as session: + assert session.query(States).count() == 1 + assert session.query(States).first().state == "on" + + engine.dispose() + + +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints_to_recreate = ( + ("events", "data_id", "event_data", "data_id"), + ("states", "event_id", None, None), # This won't be found + ("states", "old_state_id", "states", "state_id"), + ) + + db_engine = recorder_db_url.partition("://")[0] + + expected_dropped_constraints = { + "mysql": [ + ( + "events", + "data_id", + { + "constrained_columns": ["data_id"], + "name": ANY, + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "constrained_columns": ["old_state_id"], + "name": ANY, + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + "postgresql": [ + ( + "events", + "data_id", + { + "comment": None, + "constrained_columns": ["data_id"], + "name": "events_data_id_fkey", + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "comment": None, + "constrained_columns": ["old_state_id"], + "name": "states_old_state_id_fkey", + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + } + + def find_constraints( + engine: Engine, table: str, column: str + ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: + inspector = inspect(engine) + return [ + (table, column, foreign_key) + for foreign_key in inspector.get_foreign_keys(table) + if foreign_key["name"] and foreign_key["constrained_columns"] == [column] + ] + + engine = create_engine(recorder_db_url) + db_schema.Base.metadata.create_all(engine) + + matching_constraints_1 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_1 == expected_dropped_constraints[db_engine] + + with Session(engine) as session: + session_maker = Mock(return_value=session) + for table, column, _, _ in constraints_to_recreate: + migration._drop_foreign_key_constraints( + session_maker, engine, table, column + ) + + # Check we don't find the constrained columns again (they are removed) + matching_constraints_2 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_2 == [] + + # Restore the constraints + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_recreate + ) + + # Check we do find the constrained columns again (they are restored) + matching_constraints_3 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_3 == expected_dropped_constraints[db_engine] + + engine.dispose() + + +def test_restore_foreign_key_constraints_with_error( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints_to_restore = [("events", "data_id", "event_data", "data_id")] + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + + session_maker = Mock(return_value=session) + with pytest.raises(InternalError): + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_restore + ) + + assert "Could not update foreign options in events table" in caplog.text + + +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_restore_foreign_key_constraints_with_integrity_error( + recorder_db_url: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints = ( + ("events", "data_id", "event_data", "data_id", Events), + ("states", "old_state_id", "states", "state_id", States), + ) + + engine = create_engine(recorder_db_url) + db_schema.Base.metadata.create_all(engine) + + # Drop constraints + with Session(engine) as session: + session_maker = Mock(return_value=session) + for table, column, _, _, _ in constraints: + migration._drop_foreign_key_constraints( + session_maker, engine, table, column + ) + + # Add rows violating the constraints + with Session(engine) as session: + for _, column, _, _, table_class in constraints: + session.add(table_class(**{column: 123})) + session.add(table_class()) + # Insert a States row referencing the row with an invalid foreign reference + session.add(States(old_state_id=1)) + session.commit() + + # Check we could insert the rows + with Session(engine) as session: + assert session.query(Events).count() == 2 + assert session.query(States).count() == 3 + + # Restore constraints + to_restore = [ + (table, column, foreign_table, foreign_column) + for table, column, foreign_table, foreign_column, _ in constraints + ] + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints(session_maker, engine, to_restore) + + # Check the violating row has been deleted from the Events table + with Session(engine) as session: + assert session.query(Events).count() == 1 + assert session.query(States).count() == 3 + + engine.dispose() + + assert ( + "Could not update foreign options in events table, " + "will delete violations and try again" + ) in caplog.text + + +def test_delete_foreign_key_violations_unsupported_engine( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling _delete_foreign_key_violations with an unsupported engine.""" + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + engine.dialect = Mock() + engine.dialect.name = "sqlite" + + session_maker = Mock(return_value=session) + with pytest.raises( + RuntimeError, match="_delete_foreign_key_violations not supported for sqlite" + ): + migration._delete_foreign_key_violations(session_maker, engine, "", "", "", "") + + +def test_drop_foreign_key_constraints_unsupported_engine( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling _drop_foreign_key_constraints with an unsupported engine.""" + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + engine.dialect = Mock() + engine.dialect.name = "sqlite" + + session_maker = Mock(return_value=session) + with pytest.raises( + RuntimeError, match="_drop_foreign_key_constraints not supported for sqlite" + ): + migration._drop_foreign_key_constraints(session_maker, engine, "", "") + + +def test_update_states_table_with_foreign_key_options_unsupported_engine( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling function with an unsupported engine. + + This tests _update_states_table_with_foreign_key_options. + """ + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + engine.dialect = Mock() + engine.dialect.name = "sqlite" + + session_maker = Mock(return_value=session) + with pytest.raises( + RuntimeError, + match="_update_states_table_with_foreign_key_options not supported for sqlite", + ): + migration._update_states_table_with_foreign_key_options(session_maker, engine) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 8fda495cf60..b2a83ae8313 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -12,7 +12,6 @@ import pytest from sqlalchemy import create_engine, inspect from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session -from typing_extensions import AsyncGenerator from homeassistant.components import recorder from homeassistant.components.recorder import ( @@ -33,13 +32,7 @@ from homeassistant.components.recorder.queries import ( get_migration_changes, select_event_type_ids, ) -from homeassistant.components.recorder.tasks import ( - EntityIDMigrationTask, - EntityIDPostMigrationTask, - EventsContextIDMigrationTask, - EventTypeIDMigrationTask, - StatesContextIDMigrationTask, -) +from homeassistant.components.recorder.tasks import EntityIDPostMigrationTask from homeassistant.components.recorder.util import ( execute_stmt_lambda_element, session_scope, @@ -49,6 +42,7 @@ import homeassistant.util.dt as dt_util from homeassistant.util.ulid import bytes_to_ulid, ulid_at_time, ulid_to_bytes from .common import ( + MockMigrationTask, async_attach_db_engine, async_recorder_block_till_done, async_wait_recording_done, @@ -60,6 +54,13 @@ CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + async def _async_wait_migration_done(hass: HomeAssistant) -> None: """Wait for the migration to be done.""" await recorder.get_instance(hass).async_block_till_done() @@ -110,27 +111,17 @@ def db_schema_32(): patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", core.RecorderTask), + patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): yield -@pytest.fixture(name="legacy_recorder_mock") -async def legacy_recorder_mock_fixture( - recorder_mock: Recorder, -) -> AsyncGenerator[Recorder]: - """Fixture for legacy recorder mock.""" - with patch.object(recorder_mock.states_meta_manager, "active", False): - yield recorder_mock - - @pytest.mark.parametrize("enable_migrate_context_ids", [True]) async def test_migrate_events_context_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -224,7 +215,7 @@ async def test_migrate_events_context_ids( ) ) - await instance.async_add_executor_job(_insert_events) + await recorder_mock.async_add_executor_job(_insert_events) await async_wait_recording_done(hass) now = dt_util.utcnow() @@ -233,7 +224,8 @@ async def test_migrate_events_context_ids( with freeze_time(now): # This is a threadsafe way to add a task to the recorder - instance.queue_task(EventsContextIDMigrationTask()) + migrator = migration.EventsContextIDMigration(None, None) + recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) def _object_as_dict(obj): @@ -260,7 +252,7 @@ async def test_migrate_events_context_ids( assert len(events) == 6 return {event.event_type: _object_as_dict(event) for event in events} - events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] assert old_uuid_context_id_event["context_id"] is None @@ -331,7 +323,9 @@ async def test_migrate_events_context_ids( event_with_garbage_context_id_no_time_fired_ts["context_parent_id_bin"] is None ) - migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) + migration_changes = await recorder_mock.async_add_executor_job( + _get_migration_id, hass + ) assert ( migration_changes[migration.EventsContextIDMigration.migration_id] == migration.EventsContextIDMigration.migration_version @@ -340,10 +334,9 @@ async def test_migrate_events_context_ids( @pytest.mark.parametrize("enable_migrate_context_ids", [True]) async def test_migrate_states_context_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -419,10 +412,11 @@ async def test_migrate_states_context_ids( ) ) - await instance.async_add_executor_job(_insert_states) + await recorder_mock.async_add_executor_job(_insert_states) await async_wait_recording_done(hass) - instance.queue_task(StatesContextIDMigrationTask()) + migrator = migration.StatesContextIDMigration(None, None) + recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) def _object_as_dict(obj): @@ -449,7 +443,9 @@ async def test_migrate_states_context_ids( assert len(events) == 6 return {state.entity_id: _object_as_dict(state) for state in events} - states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_entity_id = await recorder_mock.async_add_executor_job( + _fetch_migrated_states + ) old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] assert old_uuid_context_id["context_id"] is None @@ -524,7 +520,9 @@ async def test_migrate_states_context_ids( == b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee" ) - migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) + migration_changes = await recorder_mock.async_add_executor_job( + _get_migration_id, hass + ) assert ( migration_changes[migration.StatesContextIDMigration.migration_id] == migration.StatesContextIDMigration.migration_version @@ -533,10 +531,9 @@ async def test_migrate_states_context_ids( @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) async def test_migrate_event_type_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate event_types to the EventTypes table.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -563,11 +560,12 @@ async def test_migrate_event_type_ids( ) ) - await instance.async_add_executor_job(_insert_events) + await recorder_mock.async_add_executor_job(_insert_events) await async_wait_recording_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EventTypeIDMigrationTask()) + migrator = migration.EventTypeIDMigration(None, None) + recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) def _fetch_migrated_events(): @@ -599,21 +597,23 @@ async def test_migrate_event_type_ids( ) return result - events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) assert len(events_by_type["event_type_one"]) == 2 assert len(events_by_type["event_type_two"]) == 1 def _get_many(): with session_scope(hass=hass, read_only=True) as session: - return instance.event_type_manager.get_many( + return recorder_mock.event_type_manager.get_many( ("event_type_one", "event_type_two"), session ) - mapped = await instance.async_add_executor_job(_get_many) + mapped = await recorder_mock.async_add_executor_job(_get_many) assert mapped["event_type_one"] is not None assert mapped["event_type_two"] is not None - migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) + migration_changes = await recorder_mock.async_add_executor_job( + _get_migration_id, hass + ) assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version @@ -621,11 +621,8 @@ async def test_migrate_event_type_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -async def test_migrate_entity_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -652,11 +649,12 @@ async def test_migrate_entity_ids( ) ) - await instance.async_add_executor_job(_insert_states) + await recorder_mock.async_add_executor_job(_insert_states) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EntityIDMigrationTask()) + migrator = migration.EntityIDMigration(None, None) + recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) await _async_wait_migration_done(hass) def _fetch_migrated_states(): @@ -683,11 +681,15 @@ async def test_migrate_entity_ids( ) return result - states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_entity_id = await recorder_mock.async_add_executor_job( + _fetch_migrated_states + ) assert len(states_by_entity_id["sensor.two"]) == 2 assert len(states_by_entity_id["sensor.one"]) == 1 - migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) + migration_changes = await recorder_mock.async_add_executor_job( + _get_migration_id, hass + ) assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version @@ -696,10 +698,9 @@ async def test_migrate_entity_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) async def test_post_migrate_entity_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -726,11 +727,11 @@ async def test_post_migrate_entity_ids( ) ) - await instance.async_add_executor_job(_insert_events) + await recorder_mock.async_add_executor_job(_insert_events) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EntityIDPostMigrationTask()) + recorder_mock.queue_task(EntityIDPostMigrationTask()) await _async_wait_migration_done(hass) def _fetch_migrated_states(): @@ -742,7 +743,7 @@ async def test_post_migrate_entity_ids( assert len(states) == 3 return {state.state: state.entity_id for state in states} - states_by_state = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_state = await recorder_mock.async_add_executor_job(_fetch_migrated_states) assert states_by_state["one_1"] is None assert states_by_state["two_2"] is None assert states_by_state["two_1"] is None @@ -750,10 +751,9 @@ async def test_post_migrate_entity_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) async def test_migrate_null_entity_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -783,11 +783,12 @@ async def test_migrate_null_entity_ids( ), ) - await instance.async_add_executor_job(_insert_states) + await recorder_mock.async_add_executor_job(_insert_states) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EntityIDMigrationTask()) + migrator = migration.EntityIDMigration(None, None) + recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) await _async_wait_migration_done(hass) def _fetch_migrated_states(): @@ -814,7 +815,9 @@ async def test_migrate_null_entity_ids( ) return result - states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_entity_id = await recorder_mock.async_add_executor_job( + _fetch_migrated_states + ) assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000 assert len(states_by_entity_id["sensor.one"]) == 2 @@ -822,7 +825,7 @@ async def test_migrate_null_entity_ids( with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await instance.async_add_executor_job(_get_migration_id) + migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version @@ -831,10 +834,9 @@ async def test_migrate_null_entity_ids( @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) async def test_migrate_null_event_type_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate event_types to the EventTypes table when the event_type is NULL.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -864,11 +866,12 @@ async def test_migrate_null_event_type_ids( ), ) - await instance.async_add_executor_job(_insert_events) + await recorder_mock.async_add_executor_job(_insert_events) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EventTypeIDMigrationTask()) + migrator = migration.EventTypeIDMigration(None, None) + recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) def _fetch_migrated_events(): @@ -900,7 +903,7 @@ async def test_migrate_null_event_type_ids( ) return result - events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) assert len(events_by_type["event_type_one"]) == 2 assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000 @@ -908,7 +911,7 @@ async def test_migrate_null_event_type_ids( with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await instance.async_add_executor_job(_get_migration_id) + migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version @@ -916,11 +919,9 @@ async def test_migrate_null_event_type_ids( async def test_stats_timestamp_conversion_is_reentrant( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test stats migration is reentrant.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) await async_attach_db_engine(hass) importlib.import_module(SCHEMA_MODULE) @@ -932,7 +933,7 @@ async def test_stats_timestamp_conversion_is_reentrant( def _do_migration(): migration._migrate_statistics_columns_to_timestamp_removing_duplicates( - hass, instance, instance.get_session, instance.engine + hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine ) def _insert_fake_metadata(): @@ -949,7 +950,7 @@ async def test_stats_timestamp_conversion_is_reentrant( ) ) - def _insert_pre_timestamp_stat(date_time: datetime) -> None: + def _insert_pre_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add( old_db_schema.StatisticsShortTerm( @@ -964,7 +965,7 @@ async def test_stats_timestamp_conversion_is_reentrant( ) ) - def _insert_post_timestamp_stat(date_time: datetime) -> None: + def _insert_post_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add( db_schema.StatisticsShortTerm( @@ -1070,11 +1071,9 @@ async def test_stats_timestamp_conversion_is_reentrant( async def test_stats_timestamp_with_one_by_one( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test stats migration with one by one.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) await async_attach_db_engine(hass) importlib.import_module(SCHEMA_MODULE) @@ -1091,7 +1090,7 @@ async def test_stats_timestamp_with_one_by_one( side_effect=IntegrityError("test", "test", "test"), ): migration._migrate_statistics_columns_to_timestamp_removing_duplicates( - hass, instance, instance.get_session, instance.engine + hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine ) def _insert_fake_metadata(): @@ -1108,7 +1107,7 @@ async def test_stats_timestamp_with_one_by_one( ) ) - def _insert_pre_timestamp_stat(date_time: datetime) -> None: + def _insert_pre_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1135,7 +1134,7 @@ async def test_stats_timestamp_with_one_by_one( ) ) - def _insert_post_timestamp_stat(date_time: datetime) -> None: + def _insert_post_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1291,11 +1290,9 @@ async def test_stats_timestamp_with_one_by_one( async def test_stats_timestamp_with_one_by_one_removes_duplicates( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test stats migration with one by one removes duplicates.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) await async_attach_db_engine(hass) importlib.import_module(SCHEMA_MODULE) @@ -1319,7 +1316,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( ), ): migration._migrate_statistics_columns_to_timestamp_removing_duplicates( - hass, instance, instance.get_session, instance.engine + hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine ) def _insert_fake_metadata(): @@ -1336,7 +1333,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( ) ) - def _insert_pre_timestamp_stat(date_time: datetime) -> None: + def _insert_pre_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1363,7 +1360,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( ) ) - def _insert_post_timestamp_stat(date_time: datetime) -> None: + def _insert_post_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index 4f59edb097f..bdd881a3a7b 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -1,7 +1,6 @@ """Test run time migrations are remembered in the migration_changes table.""" import importlib -from pathlib import Path import sys from unittest.mock import patch @@ -11,8 +10,8 @@ from sqlalchemy.orm import Session from homeassistant.components import recorder from homeassistant.components.recorder import core, migration, statistics +from homeassistant.components.recorder.migration import MigrationTask from homeassistant.components.recorder.queries import get_migration_changes -from homeassistant.components.recorder.tasks import StatesContextIDMigrationTask from homeassistant.components.recorder.util import ( execute_stmt_lambda_element, session_scope, @@ -20,7 +19,11 @@ from homeassistant.components.recorder.util import ( from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant -from .common import async_recorder_block_till_done, async_wait_recording_done +from .common import ( + MockMigrationTask, + async_recorder_block_till_done, + async_wait_recording_done, +) from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator @@ -29,6 +32,13 @@ CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + async def _async_wait_migration_done(hass: HomeAssistant) -> None: """Wait for the migration to be done.""" await recorder.get_instance(hass).async_block_till_done() @@ -63,10 +73,10 @@ def _create_engine_test(*args, **kwargs): @pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migration_changes_prevent_trying_to_migrate_again( - async_setup_recorder_instance: RecorderInstanceGenerator, - tmp_path: Path, - recorder_db_url: str, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test that we do not try to migrate when migration_changes indicate its already migrated. @@ -76,15 +86,8 @@ async def test_migration_changes_prevent_trying_to_migrate_again( 2. With current schema so the migration happens 3. With current schema to verify we do not have to query to see if the migration is done """ - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test uses a test database between runs so its - # SQLite specific - return - config = { - recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"), - recorder.CONF_COMMIT_INTERVAL: 1, - } + config = {recorder.CONF_COMMIT_INTERVAL: 1} importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -100,11 +103,13 @@ async def test_migration_changes_prevent_trying_to_migrate_again( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", core.RecorderTask), + patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): - async with async_test_home_assistant() as hass: - await async_setup_recorder_instance(hass, config) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, config), + ): await hass.async_block_till_done() await async_wait_recording_done(hass) await _async_wait_migration_done(hass) @@ -113,8 +118,7 @@ async def test_migration_changes_prevent_trying_to_migrate_again( await hass.async_stop() # Now start again with current db schema - async with async_test_home_assistant() as hass: - await async_setup_recorder_instance(hass, config) + async with async_test_home_assistant() as hass, async_test_recorder(hass, config): await hass.async_block_till_done() await async_wait_recording_done(hass) await _async_wait_migration_done(hass) @@ -138,19 +142,21 @@ async def test_migration_changes_prevent_trying_to_migrate_again( original_queue_task(self, task) # Finally verify we did not call needs_migrate_query on StatesContextIDMigration - async with async_test_home_assistant() as hass: - with ( - patch( - "homeassistant.components.recorder.core.Recorder.queue_task", - _queue_task, - ), - patch.object( - migration.StatesContextIDMigration, - "needs_migrate_query", - side_effect=RuntimeError("Should not be called"), - ), + with ( + patch( + "homeassistant.components.recorder.core.Recorder.queue_task", + _queue_task, + ), + patch.object( + migration.StatesContextIDMigration, + "needs_migrate_query", + side_effect=RuntimeError("Should not be called"), + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, config), ): - await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() await async_wait_recording_done(hass) await _async_wait_migration_done(hass) @@ -167,4 +173,4 @@ async def test_migration_changes_prevent_trying_to_migrate_again( await hass.async_stop() for task in tasks: - assert not isinstance(task, StatesContextIDMigrationTask) + assert not isinstance(task, MigrationTask) diff --git a/tests/components/recorder/test_models.py b/tests/components/recorder/test_models.py index d06c4a629d7..975d67a8e99 100644 --- a/tests/components/recorder/test_models.py +++ b/tests/components/recorder/test_models.py @@ -15,11 +15,9 @@ from homeassistant.components.recorder.db_schema import ( ) from homeassistant.components.recorder.models import ( LazyState, - bytes_to_ulid_or_none, process_datetime_to_timestamp, process_timestamp, process_timestamp_to_utc_isoformat, - ulid_to_bytes_or_none, ) from homeassistant.const import EVENT_STATE_CHANGED import homeassistant.core as ha @@ -428,27 +426,3 @@ async def test_process_datetime_to_timestamp_mirrors_utc_isoformat_behavior( process_datetime_to_timestamp(datetime_hst_timezone) == dt_util.parse_datetime("2016-07-09T21:00:00+00:00").timestamp() ) - - -def test_ulid_to_bytes_or_none(caplog: pytest.LogCaptureFixture) -> None: - """Test ulid_to_bytes_or_none.""" - - assert ( - ulid_to_bytes_or_none("01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1") - == b"\x01w\xaf\xf9w\xe5\xf8~\x1f\x87\xe1\xf8~\x1f\x87\xe1" - ) - assert ulid_to_bytes_or_none("invalid") is None - assert "invalid" in caplog.text - assert ulid_to_bytes_or_none(None) is None - - -def test_bytes_to_ulid_or_none(caplog: pytest.LogCaptureFixture) -> None: - """Test bytes_to_ulid_or_none.""" - - assert ( - bytes_to_ulid_or_none(b"\x01w\xaf\xf9w\xe5\xf8~\x1f\x87\xe1\xf8~\x1f\x87\xe1") - == "01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1" - ) - assert bytes_to_ulid_or_none(b"invalid") is None - assert "invalid" in caplog.text - assert bytes_to_ulid_or_none(None) is None diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 1ccbaada265..60ee913cb66 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -1,5 +1,6 @@ """Test data purging.""" +from collections.abc import Generator from datetime import datetime, timedelta import json import sqlite3 @@ -9,10 +10,9 @@ from freezegun import freeze_time import pytest from sqlalchemy.exc import DatabaseError, OperationalError from sqlalchemy.orm.session import Session -from typing_extensions import Generator from voluptuous.error import MultipleInvalid -from homeassistant.components import recorder +from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, Recorder from homeassistant.components.recorder.const import SupportedDialect from homeassistant.components.recorder.db_schema import ( Events, @@ -35,7 +35,6 @@ from homeassistant.components.recorder.tasks import PurgeTask from homeassistant.components.recorder.util import session_scope from homeassistant.const import EVENT_STATE_CHANGED, EVENT_THEMES_UPDATED, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util from .common import ( @@ -58,6 +57,13 @@ TEST_EVENT_TYPES = ( ) +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.fixture(name="use_sqlite") def mock_use_sqlite(request: pytest.FixtureRequest) -> Generator[None]: """Pytest fixture to switch purge method.""" @@ -70,47 +76,42 @@ def mock_use_sqlite(request: pytest.FixtureRequest) -> Generator[None]: yield -async def test_purge_big_database( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting 2/3 old states from a big database.""" - - instance = await async_setup_recorder_instance(hass) - for _ in range(12): await _add_test_states(hass, wait_recording_done=False) await async_wait_recording_done(hass) with ( - patch.object(instance, "max_bind_vars", 72), - patch.object(instance.database_engine, "max_bind_vars", 72), - session_scope(hass=hass) as session, + patch.object(recorder_mock, "max_bind_vars", 72), + patch.object(recorder_mock.database_engine, "max_bind_vars", 72), ): - states = session.query(States) - state_attributes = session.query(StateAttributes) - assert states.count() == 72 - assert state_attributes.count() == 3 + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) + assert states.count() == 72 + assert state_attributes.count() == 3 purge_before = dt_util.utcnow() - timedelta(days=4) finished = purge_old_data( - instance, + recorder_mock, purge_before, states_batch_size=1, events_batch_size=1, repack=False, ) assert not finished - assert states.count() == 24 - assert state_attributes.count() == 1 + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) + assert states.count() == 24 + assert state_attributes.count() == 1 -async def test_purge_old_states( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_states(hass) # make sure we start with 6 states @@ -125,24 +126,30 @@ async def test_purge_old_states( events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in instance.states_manager._last_committed_id - purge_before = dt_util.utcnow() - timedelta(days=4) + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished + purge_before = dt_util.utcnow() - timedelta(days=4) + + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + with session_scope(hass=hass) as session: states_after_purge = list(session.query(States)) # Since these states are deleted in batches, we can't guarantee the order # but we can look them up by state @@ -153,27 +160,33 @@ async def test_purge_old_states( assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id assert dontpurgeme_4.old_state_id is None - finished = purge_old_data(instance, purge_before, repack=False) - assert finished + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id - # run purge_old_data again - purge_before = dt_util.utcnow() - finished = purge_old_data( - instance, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished + # run purge_old_data again + purge_before = dt_util.utcnow() + finished = purge_old_data( + recorder_mock, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: assert states.count() == 0 assert state_attributes.count() == 0 - assert "test.recorder2" not in instance.states_manager._last_committed_id + assert "test.recorder2" not in recorder_mock.states_manager._last_committed_id # Add some more states await _add_test_states(hass) @@ -187,30 +200,27 @@ async def test_purge_old_states( events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id state_attributes = session.query(StateAttributes) assert state_attributes.count() == 3 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("recorder_mock", "skip_by_db_engine") async def test_purge_old_states_encouters_database_corruption( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, ) -> None: - """Test database image image is malformed while deleting old states.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite, wiping the database on error only happens - # with SQLite. - return - - await async_setup_recorder_instance(hass) + """Test database image image is malformed while deleting old states. + This test is specific for SQLite, wiping the database on error only happens + with SQLite. + """ await _add_test_states(hass) await async_wait_recording_done(hass) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database") with ( patch( @@ -221,7 +231,7 @@ async def test_purge_old_states_encouters_database_corruption( side_effect=sqlite3_exception, ), ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) @@ -234,13 +244,11 @@ async def test_purge_old_states_encouters_database_corruption( async def test_purge_old_states_encounters_temporary_mysql_error( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test retry on specific mysql operational errors.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_states(hass) await async_wait_recording_done(hass) @@ -253,9 +261,9 @@ async def test_purge_old_states_encounters_temporary_mysql_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=[mysql_exception, None], ), - patch.object(instance.engine.dialect, "name", "mysql"), + patch.object(recorder_mock.engine.dialect, "name", "mysql"), ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -264,14 +272,12 @@ async def test_purge_old_states_encounters_temporary_mysql_error( assert sleep_mock.called +@pytest.mark.usefixtures("recorder_mock") async def test_purge_old_states_encounters_operational_error( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: """Test error on operational errors that are not mysql does not retry.""" - await async_setup_recorder_instance(hass) - await _add_test_states(hass) await async_wait_recording_done(hass) @@ -281,7 +287,7 @@ async def test_purge_old_states_encounters_operational_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=exception, ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -290,12 +296,8 @@ async def test_purge_old_states_encounters_operational_error( assert "Error executing purge" in caplog.text -async def test_purge_old_events( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old events.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_events(hass) with session_scope(hass=hass) as session: @@ -304,38 +306,46 @@ async def test_purge_old_events( ) assert events.count() == 6 - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert not finished all_events = events.all() assert events.count() == 2, f"Should have 2 events left: {all_events}" - # we should only have 2 events left - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, + # we should only have 2 events left + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert finished assert events.count() == 2 async def test_purge_old_recorder_runs( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old recorder runs keeps current run.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_recorder_runs(hass) # make sure we start with 7 recorder runs @@ -343,35 +353,36 @@ async def test_purge_old_recorder_runs( recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished + + with session_scope(hass=hass) as session: + recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 1 async def test_purge_old_statistics_runs( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old statistics runs keeps the latest run.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_statistics_runs(hass) # make sure we start with 7 statistics runs @@ -379,20 +390,23 @@ async def test_purge_old_statistics_runs( statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data(instance, purge_before, repack=False) - assert not finished + # run purge_old_data() + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert not finished - finished = purge_old_data(instance, purge_before, repack=False) - assert finished + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert finished + + with session_scope(hass=hass) as session: + statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 1 @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.usefixtures("recorder_mock") async def test_purge_method( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, use_sqlite: bool, @@ -410,8 +424,6 @@ async def test_purge_method( assert run1.run_id == run2.run_id assert run1.start == run2.start - await async_setup_recorder_instance(hass) - service_data = {"keep_days": 4} await _add_test_events(hass) await _add_test_states(hass) @@ -517,8 +529,8 @@ async def test_purge_method( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) async def test_purge_edge_case( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test states and events are purged even if they occurred shortly before purge_before.""" @@ -552,11 +564,9 @@ async def test_purge_edge_case( attributes_id=1002, ) ) - instance = recorder.get_instance(hass) - convert_pending_events_to_event_types(instance, session) - convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(recorder_mock, session) - await async_setup_recorder_instance(hass, None) await async_wait_purge_done(hass) service_data = {"keep_days": 2} @@ -575,7 +585,7 @@ async def test_purge_edge_case( ) assert events.count() == 1 - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -590,10 +600,7 @@ async def test_purge_edge_case( assert events.count() == 0 -async def test_purge_cutoff_date( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, -) -> None: +async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test states and events are purged only if they occurred before "now() - keep_days".""" async def _add_db_entries(hass: HomeAssistant, cutoff: datetime, rows: int) -> None: @@ -656,10 +663,9 @@ async def test_purge_cutoff_date( attributes_id=1000 + row, ) ) - convert_pending_events_to_event_types(instance, session) - convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(recorder_mock, session) - instance = await async_setup_recorder_instance(hass, None) await async_wait_purge_done(hass) service_data = {"keep_days": 2} @@ -695,7 +701,7 @@ async def test_purge_cutoff_date( == 1 ) - instance.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) + recorder_mock.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) await hass.async_block_till_done() await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -736,7 +742,9 @@ async def test_purge_cutoff_date( ) # Make sure we can purge everything - instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) + recorder_mock.queue_task( + PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) + ) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -747,7 +755,9 @@ async def test_purge_cutoff_date( assert state_attributes.count() == 0 # Make sure we can purge everything when the db is already empty - instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) + recorder_mock.queue_task( + PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) + ) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -759,15 +769,16 @@ async def test_purge_cutoff_date( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.parametrize( + "recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}] +) async def test_purge_filtered_states( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test filtered states are purged.""" - config: ConfigType = {"exclude": {"entities": ["sensor.excluded"]}} - instance = await async_setup_recorder_instance(hass, config) - assert instance.entity_filter("sensor.excluded") is False + assert recorder_mock.entity_filter("sensor.excluded") is False def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -850,8 +861,8 @@ async def test_purge_filtered_states( time_fired_ts=dt_util.utc_to_timestamp(timestamp), ) ) - convert_pending_states_to_meta(instance, session) - convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(recorder_mock, session) service_data = {"keep_days": 10} _add_db_entries(hass) @@ -865,7 +876,7 @@ async def test_purge_filtered_states( assert events_keep.count() == 1 # Normal purge doesn't remove excluded entities - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -881,7 +892,7 @@ async def test_purge_filtered_states( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -929,7 +940,7 @@ async def test_purge_filtered_states( assert session.query(StateAttributes).count() == 11 # Do it again to make sure nothing changes - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -941,7 +952,7 @@ async def test_purge_filtered_states( assert session.query(StateAttributes).count() == 11 service_data = {"keep_days": 0} - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -954,15 +965,16 @@ async def test_purge_filtered_states( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.parametrize( + "recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}] +) async def test_purge_filtered_states_to_empty( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test filtered states are purged all the way to an empty db.""" - config: ConfigType = {"exclude": {"entities": ["sensor.excluded"]}} - instance = await async_setup_recorder_instance(hass, config) - assert instance.entity_filter("sensor.excluded") is False + assert recorder_mock.entity_filter("sensor.excluded") is False def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -977,7 +989,7 @@ async def test_purge_filtered_states_to_empty( timestamp, event_id * days, ) - convert_pending_states_to_meta(instance, session) + convert_pending_states_to_meta(recorder_mock, session) service_data = {"keep_days": 10} _add_db_entries(hass) @@ -990,7 +1002,7 @@ async def test_purge_filtered_states_to_empty( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -1002,21 +1014,22 @@ async def test_purge_filtered_states_to_empty( # Do it again to make sure nothing changes # Why do we do this? Should we check the end result? - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.parametrize( + "recorder_config", [{"exclude": {"entities": ["sensor.old_format"]}}] +) async def test_purge_without_state_attributes_filtered_states_to_empty( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test filtered legacy states without state attributes are purged all the way to an empty db.""" - config: ConfigType = {"exclude": {"entities": ["sensor.old_format"]}} - instance = await async_setup_recorder_instance(hass, config) - assert instance.entity_filter("sensor.old_format") is False + assert recorder_mock.entity_filter("sensor.old_format") is False def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -1053,8 +1066,8 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( time_fired_ts=dt_util.utc_to_timestamp(timestamp), ) ) - convert_pending_states_to_meta(instance, session) - convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(recorder_mock, session) service_data = {"keep_days": 10} _add_db_entries(hass) @@ -1067,7 +1080,7 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -1079,18 +1092,18 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( # Do it again to make sure nothing changes # Why do we do this? Should we check the end result? - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) +@pytest.mark.parametrize( + "recorder_config", [{"exclude": {"event_types": ["EVENT_PURGE"]}}] +) async def test_purge_filtered_events( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test filtered events are purged.""" - config: ConfigType = {"exclude": {"event_types": ["EVENT_PURGE"]}} - instance = await async_setup_recorder_instance(hass, config) await async_wait_recording_done(hass) def _add_db_entries(hass: HomeAssistant) -> None: @@ -1119,11 +1132,11 @@ async def test_purge_filtered_events( timestamp, event_id, ) - convert_pending_events_to_event_types(instance, session) - convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(recorder_mock, session) service_data = {"keep_days": 10} - await instance.async_add_executor_job(_add_db_entries, hass) + await recorder_mock.async_add_executor_job(_add_db_entries, hass) await async_wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: @@ -1135,7 +1148,7 @@ async def test_purge_filtered_events( assert states.count() == 10 # Normal purge doesn't remove excluded events - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -1151,7 +1164,7 @@ async def test_purge_filtered_events( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -1169,23 +1182,26 @@ async def test_purge_filtered_events( assert states.count() == 10 +@pytest.mark.parametrize( + "recorder_config", + [ + { + "exclude": { + "event_types": ["excluded_event"], + "entities": ["sensor.excluded", "sensor.old_format"], + } + } + ], +) async def test_purge_filtered_events_state_changed( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test filtered state_changed events are purged. This should also remove all states.""" - config: ConfigType = { - "exclude": { - "event_types": ["excluded_event"], - "entities": ["sensor.excluded", "sensor.old_format"], - } - } - instance = await async_setup_recorder_instance(hass, config) # Assert entity_id is NOT excluded - assert instance.entity_filter("sensor.excluded") is False - assert instance.entity_filter("sensor.old_format") is False - assert instance.entity_filter("sensor.keep") is True - assert "excluded_event" in instance.exclude_event_types + assert recorder_mock.entity_filter("sensor.excluded") is False + assert recorder_mock.entity_filter("sensor.old_format") is False + assert recorder_mock.entity_filter("sensor.keep") is True + assert "excluded_event" in recorder_mock.exclude_event_types def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -1258,8 +1274,8 @@ async def test_purge_filtered_events_state_changed( last_updated_ts=dt_util.utc_to_timestamp(timestamp), ) ) - convert_pending_events_to_event_types(instance, session) - convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(recorder_mock, session) service_data = {"keep_days": 10, "apply_filter": True} _add_db_entries(hass) @@ -1277,7 +1293,7 @@ async def test_purge_filtered_events_state_changed( assert events_purge.count() == 1 assert states.count() == 64 - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() for _ in range(4): @@ -1311,11 +1327,8 @@ async def test_purge_filtered_events_state_changed( ) # should have been kept -async def test_purge_entities( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_entities(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test purging of specific entities.""" - instance = await async_setup_recorder_instance(hass) async def _purge_entities(hass, entity_ids, domains, entity_globs): service_data = { @@ -1325,7 +1338,7 @@ async def test_purge_entities( } await hass.services.async_call( - recorder.DOMAIN, SERVICE_PURGE_ENTITIES, service_data + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, service_data ) await hass.async_block_till_done() @@ -1363,8 +1376,8 @@ async def test_purge_entities( timestamp, event_id * days, ) - convert_pending_states_to_meta(instance, session) - convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(recorder_mock, session) def _add_keep_records(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -1378,8 +1391,8 @@ async def test_purge_entities( timestamp, event_id, ) - convert_pending_states_to_meta(instance, session) - convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(recorder_mock, session) _add_purge_records(hass) _add_keep_records(hass) @@ -1657,15 +1670,14 @@ def _add_state_with_state_attributes( @pytest.mark.timeout(30) async def test_purge_many_old_events( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old events.""" old_events_count = 5 - instance = await async_setup_recorder_instance(hass) with ( - patch.object(instance, "max_bind_vars", old_events_count), - patch.object(instance.database_engine, "max_bind_vars", old_events_count), + patch.object(recorder_mock, "max_bind_vars", old_events_count), + patch.object(recorder_mock.database_engine, "max_bind_vars", old_events_count), ): await _add_test_events(hass, old_events_count) @@ -1675,48 +1687,62 @@ async def test_purge_many_old_events( ) assert events.count() == old_events_count * 6 - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert not finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert not finished assert events.count() == old_events_count * 3 - # we should only have 2 groups of events left - finished = purge_old_data( - instance, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, + # we should only have 2 groups of events left + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert finished assert events.count() == old_events_count * 2 - # we should now purge everything - finished = purge_old_data( - instance, - dt_util.utcnow(), - repack=False, - states_batch_size=20, - events_batch_size=20, + # we should now purge everything + finished = purge_old_data( + recorder_mock, + dt_util.utcnow(), + repack=False, + states_batch_size=20, + events_batch_size=20, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert finished assert events.count() == 0 async def test_purge_old_events_purges_the_event_type_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old events purges event type ids.""" - instance = await async_setup_recorder_instance(hass) - assert instance.event_type_manager.active is True + assert recorder_mock.event_type_manager.active is True utcnow = dt_util.utcnow() five_days_ago = utcnow - timedelta(days=5) @@ -1760,7 +1786,7 @@ async def test_purge_old_events_purges_the_event_type_ids( time_fired_ts=dt_util.utc_to_timestamp(timestamp), ) ) - return instance.event_type_manager.get_many( + return recorder_mock.event_type_manager.get_many( [ "EVENT_TEST_AUTOPURGE", "EVENT_TEST_PURGE", @@ -1770,7 +1796,7 @@ async def test_purge_old_events_purges_the_event_type_ids( session, ) - event_type_to_id = await instance.async_add_executor_job(_insert_events) + event_type_to_id = await recorder_mock.async_add_executor_job(_insert_events) test_event_type_ids = event_type_to_id.values() with session_scope(hass=hass) as session: events = session.query(Events).where( @@ -1783,47 +1809,70 @@ async def test_purge_old_events_purges_the_event_type_ids( assert events.count() == 30 assert event_types.count() == 4 - # run purge_old_data() - finished = purge_old_data( - instance, - far_past, - repack=False, + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + far_past, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).where( + Events.event_type_id.in_(test_event_type_ids) + ) + event_types = session.query(EventTypes).where( + EventTypes.event_type_id.in_(test_event_type_ids) ) - assert finished assert events.count() == 30 # We should remove the unused event type assert event_types.count() == 3 - assert "EVENT_TEST_UNUSED" not in instance.event_type_manager._id_map + assert "EVENT_TEST_UNUSED" not in recorder_mock.event_type_manager._id_map - # we should only have 10 events left since - # only one event type was recorded now - finished = purge_old_data( - instance, - utcnow, - repack=False, + # we should only have 10 events left since + # only one event type was recorded now + finished = purge_old_data( + recorder_mock, + utcnow, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).where( + Events.event_type_id.in_(test_event_type_ids) + ) + event_types = session.query(EventTypes).where( + EventTypes.event_type_id.in_(test_event_type_ids) ) - assert finished assert events.count() == 10 assert event_types.count() == 1 - # Purge everything - finished = purge_old_data( - instance, - utcnow + timedelta(seconds=1), - repack=False, + # Purge everything + finished = purge_old_data( + recorder_mock, + utcnow + timedelta(seconds=1), + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).where( + Events.event_type_id.in_(test_event_type_ids) + ) + event_types = session.query(EventTypes).where( + EventTypes.event_type_id.in_(test_event_type_ids) ) - assert finished assert events.count() == 0 assert event_types.count() == 0 async def test_purge_old_states_purges_the_state_metadata_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old states purges state metadata_ids.""" - instance = await async_setup_recorder_instance(hass) - assert instance.states_meta_manager.active is True + assert recorder_mock.states_meta_manager.active is True utcnow = dt_util.utcnow() five_days_ago = utcnow - timedelta(days=5) @@ -1867,13 +1916,15 @@ async def test_purge_old_states_purges_the_state_metadata_ids( last_updated_ts=dt_util.utc_to_timestamp(timestamp), ) ) - return instance.states_meta_manager.get_many( + return recorder_mock.states_meta_manager.get_many( ["sensor.one", "sensor.two", "sensor.three", "sensor.unused"], session, True, ) - entity_id_to_metadata_id = await instance.async_add_executor_job(_insert_states) + entity_id_to_metadata_id = await recorder_mock.async_add_executor_job( + _insert_states + ) test_metadata_ids = entity_id_to_metadata_id.values() with session_scope(hass=hass) as session: states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) @@ -1884,47 +1935,63 @@ async def test_purge_old_states_purges_the_state_metadata_ids( assert states.count() == 30 assert states_meta.count() == 4 - # run purge_old_data() - finished = purge_old_data( - instance, - far_past, - repack=False, + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + far_past, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) + states_meta = session.query(StatesMeta).where( + StatesMeta.metadata_id.in_(test_metadata_ids) ) - assert finished assert states.count() == 30 # We should remove the unused entity_id assert states_meta.count() == 3 - assert "sensor.unused" not in instance.event_type_manager._id_map + assert "sensor.unused" not in recorder_mock.event_type_manager._id_map - # we should only have 10 states left since - # only one event type was recorded now - finished = purge_old_data( - instance, - utcnow, - repack=False, + # we should only have 10 states left since + # only one event type was recorded now + finished = purge_old_data( + recorder_mock, + utcnow, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) + states_meta = session.query(StatesMeta).where( + StatesMeta.metadata_id.in_(test_metadata_ids) ) - assert finished assert states.count() == 10 assert states_meta.count() == 1 - # Purge everything - finished = purge_old_data( - instance, - utcnow + timedelta(seconds=1), - repack=False, + # Purge everything + finished = purge_old_data( + recorder_mock, + utcnow + timedelta(seconds=1), + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) + states_meta = session.query(StatesMeta).where( + StatesMeta.metadata_id.in_(test_metadata_ids) ) - assert finished assert states.count() == 0 assert states_meta.count() == 0 async def test_purge_entities_keep_days( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test purging states with an entity filter and keep_days.""" - instance = await async_setup_recorder_instance(hass, {}) await hass.async_block_till_done() await async_wait_recording_done(hass) start = dt_util.utcnow() @@ -1946,7 +2013,7 @@ async def test_purge_entities_keep_days( hass.states.async_set("sensor.keep", "now") await async_recorder_block_till_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1957,7 +2024,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 3 await hass.services.async_call( - recorder.DOMAIN, + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1967,7 +2034,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1978,7 +2045,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 1 await hass.services.async_call( - recorder.DOMAIN, + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1987,7 +2054,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index fb636cfa9dc..0754b2e911c 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -1,5 +1,6 @@ """Test data purging.""" +from collections.abc import Generator from datetime import datetime, timedelta import json import sqlite3 @@ -10,10 +11,12 @@ import pytest from sqlalchemy import text, update from sqlalchemy.exc import DatabaseError, OperationalError from sqlalchemy.orm.session import Session -from typing_extensions import Generator -from homeassistant.components import recorder -from homeassistant.components.recorder import migration +from homeassistant.components.recorder import ( + DOMAIN as RECORDER_DOMAIN, + Recorder, + migration, +) from homeassistant.components.recorder.const import SupportedDialect from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.recorder.purge import purge_old_data @@ -47,6 +50,13 @@ from .db_schema_32 import ( from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.fixture(autouse=True) def db_schema_32(): """Fixture to initialize the db with the old schema 32.""" @@ -66,11 +76,8 @@ def mock_use_sqlite(request: pytest.FixtureRequest) -> Generator[None]: yield -async def test_purge_old_states( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) @@ -87,23 +94,27 @@ async def test_purge_old_states( events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id states_after_purge = list(session.query(States)) # Since these states are deleted in batches, we can't guarantee the order @@ -115,27 +126,35 @@ async def test_purge_old_states( assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id assert dontpurgeme_4.old_state_id is None - finished = purge_old_data(instance, purge_before, repack=False) - assert finished + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id - # run purge_old_data again - purge_before = dt_util.utcnow() - finished = purge_old_data( - instance, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished + # run purge_old_data again + purge_before = dt_util.utcnow() + finished = purge_old_data( + recorder_mock, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 0 assert state_attributes.count() == 0 - assert "test.recorder2" not in instance.states_manager._last_committed_id + assert "test.recorder2" not in recorder_mock.states_manager._last_committed_id # Add some more states await _add_test_states(hass) @@ -149,31 +168,29 @@ async def test_purge_old_states( events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id state_attributes = session.query(StateAttributes) assert state_attributes.count() == 3 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("recorder_mock", "skip_by_db_engine") async def test_purge_old_states_encouters_database_corruption( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, ) -> None: - """Test database image image is malformed while deleting old states.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite, wiping the database on error only happens - # with SQLite. - return + """Test database image image is malformed while deleting old states. - await async_setup_recorder_instance(hass) + This test is specific for SQLite, wiping the database on error only happens + with SQLite. + """ await async_attach_db_engine(hass) await _add_test_states(hass) await async_wait_recording_done(hass) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database") with ( patch( @@ -184,7 +201,7 @@ async def test_purge_old_states_encouters_database_corruption( side_effect=sqlite3_exception, ), ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) @@ -197,12 +214,11 @@ async def test_purge_old_states_encouters_database_corruption( async def test_purge_old_states_encounters_temporary_mysql_error( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test retry on specific mysql operational errors.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) @@ -217,9 +233,9 @@ async def test_purge_old_states_encounters_temporary_mysql_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=[mysql_exception, None], ), - patch.object(instance.engine.dialect, "name", "mysql"), + patch.object(recorder_mock.engine.dialect, "name", "mysql"), ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -228,13 +244,12 @@ async def test_purge_old_states_encounters_temporary_mysql_error( assert sleep_mock.called +@pytest.mark.usefixtures("recorder_mock") async def test_purge_old_states_encounters_operational_error( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: """Test error on operational errors that are not mysql does not retry.""" - await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) @@ -246,7 +261,7 @@ async def test_purge_old_states_encounters_operational_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=exception, ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -255,11 +270,8 @@ async def test_purge_old_states_encounters_operational_error( assert "Error executing purge" in caplog.text -async def test_purge_old_events( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old events.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_events(hass) @@ -270,34 +282,39 @@ async def test_purge_old_events( purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == 2 - # we should only have 2 events left - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished + # we should only have 2 events left + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == 2 async def test_purge_old_recorder_runs( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old recorder runs keeps current run.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_recorder_runs(hass) @@ -307,34 +324,36 @@ async def test_purge_old_recorder_runs( recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished + + with session_scope(hass=hass) as session: + recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 1 async def test_purge_old_statistics_runs( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old statistics runs keeps the latest run.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_statistics_runs(hass) @@ -344,20 +363,23 @@ async def test_purge_old_statistics_runs( statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data(instance, purge_before, repack=False) - assert not finished + # run purge_old_data() + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert not finished - finished = purge_old_data(instance, purge_before, repack=False) - assert finished + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert finished + + with session_scope(hass=hass) as session: + statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 1 @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.usefixtures("recorder_mock") async def test_purge_method( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, use_sqlite: bool, @@ -375,7 +397,6 @@ async def test_purge_method( assert run1.run_id == run2.run_id assert run1.start == run2.start - await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) service_data = {"keep_days": 4} @@ -476,11 +497,8 @@ async def test_purge_method( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) -async def test_purge_edge_case( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, - use_sqlite: bool, -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: """Test states and events are purged even if they occurred shortly before purge_before.""" async def _add_db_entries(hass: HomeAssistant, timestamp: datetime) -> None: @@ -513,7 +531,6 @@ async def test_purge_edge_case( ) ) - await async_setup_recorder_instance(hass, None) await async_attach_db_engine(hass) await async_wait_purge_done(hass) @@ -532,7 +549,7 @@ async def test_purge_edge_case( events = session.query(Events).filter(Events.event_type == "EVENT_TEST_PURGE") assert events.count() == 1 - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -545,10 +562,7 @@ async def test_purge_edge_case( assert events.count() == 0 -async def test_purge_cutoff_date( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, -) -> None: +async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test states and events are purged only if they occurred before "now() - keep_days".""" async def _add_db_entries(hass: HomeAssistant, cutoff: datetime, rows: int) -> None: @@ -612,7 +626,6 @@ async def test_purge_cutoff_date( ) ) - instance = await async_setup_recorder_instance(hass, None) await async_attach_db_engine(hass) await async_wait_purge_done(hass) @@ -641,7 +654,7 @@ async def test_purge_cutoff_date( assert events.filter(Events.event_type == "PURGE").count() == rows - 1 assert events.filter(Events.event_type == "KEEP").count() == 1 - instance.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) + recorder_mock.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) await hass.async_block_till_done() await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -672,7 +685,9 @@ async def test_purge_cutoff_date( assert events.filter(Events.event_type == "KEEP").count() == 1 # Make sure we can purge everything - instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) + recorder_mock.queue_task( + PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) + ) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -683,7 +698,9 @@ async def test_purge_cutoff_date( assert state_attributes.count() == 0 # Make sure we can purge everything when the db is already empty - instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) + recorder_mock.queue_task( + PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) + ) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -936,16 +953,15 @@ def _add_state_and_state_changed_event( async def test_purge_many_old_events( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old events.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) old_events_count = 5 with ( - patch.object(instance, "max_bind_vars", old_events_count), - patch.object(instance.database_engine, "max_bind_vars", old_events_count), + patch.object(recorder_mock, "max_bind_vars", old_events_count), + patch.object(recorder_mock.database_engine, "max_bind_vars", old_events_count), ): await _add_test_events(hass, old_events_count) @@ -953,60 +969,70 @@ async def test_purge_many_old_events( events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == old_events_count * 6 - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert not finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == old_events_count * 3 - # we should only have 2 groups of events left - finished = purge_old_data( - instance, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, - ) - assert finished + # we should only have 2 groups of events left + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == old_events_count * 2 - # we should now purge everything - finished = purge_old_data( - instance, - dt_util.utcnow(), - repack=False, - states_batch_size=20, - events_batch_size=20, - ) - assert finished + # we should now purge everything + finished = purge_old_data( + recorder_mock, + dt_util.utcnow(), + repack=False, + states_batch_size=20, + events_batch_size=20, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == 0 async def test_purge_can_mix_legacy_and_new_format( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test purging with legacy and new events.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await async_wait_recording_done(hass) # New databases are no longer created with the legacy events index - assert instance.use_legacy_events_index is False + assert recorder_mock.use_legacy_events_index is False def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" - migration._create_index(instance.get_session, "states", "ix_states_event_id") - instance.use_legacy_events_index = True + migration._create_index( + recorder_mock.get_session, "states", "ix_states_event_id" + ) + recorder_mock.use_legacy_events_index = True - await instance.async_add_executor_job(_recreate_legacy_events_index) - assert instance.use_legacy_events_index is True + await recorder_mock.async_add_executor_job(_recreate_legacy_events_index) + assert recorder_mock.use_legacy_events_index is True utcnow = dt_util.utcnow() eleven_days_ago = utcnow - timedelta(days=11) @@ -1045,39 +1071,65 @@ async def test_purge_can_mix_legacy_and_new_format( assert states_with_event_id.count() == 50 assert states_without_event_id.count() == 51 - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - instance, - purge_before, - repack=False, + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 51 - # At this point all the legacy states are gone - # and we switch methods - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, + + # At this point all the legacy states are gone + # and we switch methods + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + # Since we only allow one iteration, we won't + # check if we are finished this loop similar + # to the legacy method + assert not finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - # Since we only allow one iteration, we won't - # check if we are finished this loop similar - # to the legacy method - assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=100, - states_batch_size=100, + + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=100, + states_batch_size=100, + ) + assert finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 _add_state_without_event_linkage( @@ -1085,41 +1137,53 @@ async def test_purge_can_mix_legacy_and_new_format( ) assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 2 - finished = purge_old_data( - instance, - purge_before, - repack=False, + + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert finished # The broken state without a timestamp # does not prevent future purges. Its ignored. assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_purge_can_mix_legacy_and_new_format_with_detached_state( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, recorder_db_url: str, ) -> None: - """Test purging with legacy and new events with a detached state.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - return pytest.skip("This tests disables foreign key checks on SQLite") + """Test purging with legacy and new events with a detached state. - instance = await async_setup_recorder_instance(hass) + This tests disables foreign key checks on SQLite. + """ await async_attach_db_engine(hass) await async_wait_recording_done(hass) # New databases are no longer created with the legacy events index - assert instance.use_legacy_events_index is False + assert recorder_mock.use_legacy_events_index is False def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" - migration._create_index(instance.get_session, "states", "ix_states_event_id") - instance.use_legacy_events_index = True + migration._create_index( + recorder_mock.get_session, "states", "ix_states_event_id" + ) + recorder_mock.use_legacy_events_index = True - await instance.async_add_executor_job(_recreate_legacy_events_index) - assert instance.use_legacy_events_index is True + await recorder_mock.async_add_executor_job(_recreate_legacy_events_index) + assert recorder_mock.use_legacy_events_index is True with session_scope(hass=hass) as session: session.execute(text("PRAGMA foreign_keys = OFF")) @@ -1189,39 +1253,65 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( assert states_with_event_id.count() == 52 assert states_without_event_id.count() == 51 - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - instance, - purge_before, - repack=False, + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 51 - # At this point all the legacy states are gone - # and we switch methods - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, + + # At this point all the legacy states are gone + # and we switch methods + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + # Since we only allow one iteration, we won't + # check if we are finished this loop similar + # to the legacy method + assert not finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - # Since we only allow one iteration, we won't - # check if we are finished this loop similar - # to the legacy method - assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=100, - states_batch_size=100, + + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=100, + states_batch_size=100, + ) + assert finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 _add_state_without_event_linkage( @@ -1229,12 +1319,21 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( ) assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 2 - finished = purge_old_data( - instance, - purge_before, - repack=False, + + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert finished # The broken state without a timestamp # does not prevent future purges. Its ignored. assert states_with_event_id.count() == 0 @@ -1242,11 +1341,9 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( async def test_purge_entities_keep_days( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test purging states with an entity filter and keep_days.""" - instance = await async_setup_recorder_instance(hass, {}) await async_attach_db_engine(hass) await hass.async_block_till_done() @@ -1270,7 +1367,7 @@ async def test_purge_entities_keep_days( hass.states.async_set("sensor.keep", "now") await async_recorder_block_till_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1281,7 +1378,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 3 await hass.services.async_call( - recorder.DOMAIN, + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1291,7 +1388,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1302,7 +1399,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 1 await hass.services.async_call( - recorder.DOMAIN, + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1311,7 +1408,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 7d8bc6e3415..5cbb29afc91 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -1,7 +1,8 @@ """The tests for sensor recorder platform.""" from datetime import timedelta -from unittest.mock import patch +from typing import Any +from unittest.mock import ANY, Mock, patch import pytest from sqlalchemy import select @@ -15,17 +16,21 @@ from homeassistant.components.recorder.models import ( ) from homeassistant.components.recorder.statistics import ( STATISTIC_UNIT_TO_UNIT_CONVERTER, + PlatformCompiledStatistics, _generate_max_mean_min_statistic_in_sub_period_stmt, _generate_statistics_at_time_stmt, _generate_statistics_during_period_stmt, async_add_external_statistics, async_import_statistics, + async_list_statistic_ids, get_last_short_term_statistics, get_last_statistics, get_latest_short_term_statistics_with_session, get_metadata, + get_metadata_with_session, get_short_term_statistics_run_cache, list_statistic_ids, + validate_statistics, ) from homeassistant.components.recorder.table_managers.statistics_meta import ( _generate_get_metadata_stmt, @@ -41,17 +46,20 @@ import homeassistant.util.dt as dt_util from .common import ( assert_dict_of_states_equal_without_context_and_last_changed, async_record_states, + async_recorder_block_till_done, async_wait_recording_done, do_adhoc_statistics, + get_start_time, statistics_during_period, ) +from tests.common import MockPlatform, mock_platform from tests.typing import RecorderInstanceGenerator, WebSocketGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -61,6 +69,15 @@ def setup_recorder(recorder_mock: Recorder) -> None: """Set up recorder.""" +async def _setup_mock_domain( + hass: HomeAssistant, + platform: Any | None = None, # There's no RecorderPlatform class yet +) -> None: + """Set up a mock domain.""" + mock_platform(hass, "some_domain.recorder", platform or MockPlatform()) + assert await async_setup_component(hass, "some_domain", {}) + + def test_converters_align_with_sensor() -> None: """Ensure STATISTIC_UNIT_TO_UNIT_CONVERTER is aligned with UNIT_CONVERTERS.""" for converter in UNIT_CONVERTERS.values(): @@ -293,14 +310,17 @@ def mock_sensor_statistics(): } def get_fake_stats(_hass, session, start, _end): + instance = recorder.get_instance(_hass) return statistics.PlatformCompiledStatistics( [ sensor_stats("sensor.test1", start), sensor_stats("sensor.test2", start), sensor_stats("sensor.test3", start), ], - get_metadata( - _hass, statistic_ids={"sensor.test1", "sensor.test2", "sensor.test3"} + get_metadata_with_session( + instance, + session, + statistic_ids={"sensor.test1", "sensor.test2", "sensor.test3"}, ), ) @@ -338,7 +358,7 @@ async def test_compile_periodic_statistics_exception( """Test exception handling when compiling periodic statistics.""" await async_setup_component(hass, "sensor", {}) - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) do_adhoc_statistics(hass, start=now) do_adhoc_statistics(hass, start=now + timedelta(minutes=5)) await async_wait_recording_done(hass) @@ -2468,3 +2488,151 @@ async def test_change_with_none( types={"change"}, ) assert stats == {} + + +async def test_recorder_platform_with_statistics( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test recorder platform.""" + instance = recorder.get_instance(hass) + recorder_data = hass.data["recorder"] + assert not recorder_data.recorder_platforms + + def _mock_compile_statistics(*args: Any) -> PlatformCompiledStatistics: + return PlatformCompiledStatistics([], {}) + + def _mock_list_statistic_ids(*args: Any, **kwargs: Any) -> dict: + return {} + + def _mock_validate_statistics(*args: Any) -> dict: + return {} + + recorder_platform = Mock( + compile_statistics=Mock(wraps=_mock_compile_statistics), + list_statistic_ids=Mock(wraps=_mock_list_statistic_ids), + validate_statistics=Mock(wraps=_mock_validate_statistics), + ) + + await _setup_mock_domain(hass, recorder_platform) + + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + assert recorder_data.recorder_platforms == {"some_domain": recorder_platform} + + recorder_platform.compile_statistics.assert_not_called() + recorder_platform.list_statistic_ids.assert_not_called() + recorder_platform.validate_statistics.assert_not_called() + + # Test compile statistics + zero = get_start_time(dt_util.utcnow()) + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + + recorder_platform.compile_statistics.assert_called_once_with( + hass, ANY, zero, zero + timedelta(minutes=5) + ) + recorder_platform.list_statistic_ids.assert_not_called() + recorder_platform.validate_statistics.assert_not_called() + + # Test list statistic IDs + await async_list_statistic_ids(hass) + recorder_platform.compile_statistics.assert_called_once() + recorder_platform.list_statistic_ids.assert_called_once_with( + hass, statistic_ids=None, statistic_type=None + ) + recorder_platform.validate_statistics.assert_not_called() + + # Test validate statistics + await instance.async_add_executor_job( + validate_statistics, + hass, + ) + recorder_platform.compile_statistics.assert_called_once() + recorder_platform.list_statistic_ids.assert_called_once() + recorder_platform.validate_statistics.assert_called_once_with(hass) + + +async def test_recorder_platform_without_statistics( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test recorder platform.""" + recorder_data = hass.data["recorder"] + assert recorder_data.recorder_platforms == {} + + await _setup_mock_domain(hass) + + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + assert recorder_data.recorder_platforms == {} + + +@pytest.mark.parametrize( + "supported_methods", + [ + ("compile_statistics",), + ("list_statistic_ids",), + ("validate_statistics",), + ], +) +async def test_recorder_platform_with_partial_statistics_support( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, + supported_methods: tuple[str, ...], +) -> None: + """Test recorder platform.""" + instance = recorder.get_instance(hass) + recorder_data = hass.data["recorder"] + assert not recorder_data.recorder_platforms + + def _mock_compile_statistics(*args: Any) -> PlatformCompiledStatistics: + return PlatformCompiledStatistics([], {}) + + def _mock_list_statistic_ids(*args: Any, **kwargs: Any) -> dict: + return {} + + def _mock_validate_statistics(*args: Any) -> dict: + return {} + + mock_impl = { + "compile_statistics": _mock_compile_statistics, + "list_statistic_ids": _mock_list_statistic_ids, + "validate_statistics": _mock_validate_statistics, + } + + kwargs = {meth: Mock(wraps=mock_impl[meth]) for meth in supported_methods} + + recorder_platform = Mock( + spec=supported_methods, + **kwargs, + ) + + await _setup_mock_domain(hass, recorder_platform) + + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + assert recorder_data.recorder_platforms == {"some_domain": recorder_platform} + + for meth in supported_methods: + getattr(recorder_platform, meth).assert_not_called() + + # Test compile statistics + zero = get_start_time(dt_util.utcnow()) + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + + # Test list statistic IDs + await async_list_statistic_ids(hass) + + # Test validate statistics + await instance.async_add_executor_job( + validate_statistics, + hass, + ) + + for meth in supported_methods: + getattr(recorder_platform, meth).assert_called_once() diff --git a/tests/components/recorder/test_statistics_v23_migration.py b/tests/components/recorder/test_statistics_v23_migration.py index af784692612..dfa87fc9391 100644 --- a/tests/components/recorder/test_statistics_v23_migration.py +++ b/tests/components/recorder/test_statistics_v23_migration.py @@ -15,7 +15,7 @@ from unittest.mock import patch import pytest from homeassistant.components import recorder -from homeassistant.components.recorder import SQLITE_URL_PREFIX, get_instance +from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.util import session_scope from homeassistant.helpers import recorder as recorder_helper from homeassistant.setup import setup_component @@ -34,13 +34,16 @@ SCHEMA_VERSION_POSTFIX = "23_with_newer_columns" SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) -def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None: - """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) +def test_delete_duplicates( + recorder_db_url: str, caplog: pytest.LogCaptureFixture +) -> None: + """Test removal of duplicated statistics. + The test only works with SQLite. + """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -176,7 +179,7 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> get_test_home_assistant() as hass, ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) wait_recording_done(hass) wait_recording_done(hass) @@ -204,7 +207,7 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> # Test that the duplicates are removed during migration from schema 23 with get_test_home_assistant() as hass: recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) @@ -215,15 +218,16 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> assert "Found duplicated" not in caplog.text +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) def test_delete_duplicates_many( - caplog: pytest.LogCaptureFixture, tmp_path: Path + recorder_db_url: str, caplog: pytest.LogCaptureFixture ) -> None: - """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + """Test removal of duplicated statistics. + The test only works with SQLite. + """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -359,7 +363,7 @@ def test_delete_duplicates_many( get_test_home_assistant() as hass, ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) wait_recording_done(hass) wait_recording_done(hass) @@ -393,7 +397,7 @@ def test_delete_duplicates_many( # Test that the duplicates are removed during migration from schema 23 with get_test_home_assistant() as hass: recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) @@ -405,15 +409,16 @@ def test_delete_duplicates_many( @pytest.mark.freeze_time("2021-08-01 00:00:00+00:00") +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) def test_delete_duplicates_non_identical( - caplog: pytest.LogCaptureFixture, tmp_path: Path + recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: - """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + """Test removal of duplicated statistics. + The test only works with SQLite. + """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -519,7 +524,7 @@ def test_delete_duplicates_non_identical( get_test_home_assistant() as hass, ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) wait_recording_done(hass) wait_recording_done(hass) @@ -543,7 +548,7 @@ def test_delete_duplicates_non_identical( with get_test_home_assistant() as hass: hass.config.config_dir = tmp_path recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) @@ -589,15 +594,16 @@ def test_delete_duplicates_non_identical( ] +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") def test_delete_duplicates_short_term( - caplog: pytest.LogCaptureFixture, tmp_path: Path + recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: - """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + """Test removal of duplicated statistics. + The test only works with SQLite. + """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -634,7 +640,7 @@ def test_delete_duplicates_short_term( get_test_home_assistant() as hass, ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) wait_recording_done(hass) wait_recording_done(hass) @@ -657,7 +663,7 @@ def test_delete_duplicates_short_term( with get_test_home_assistant() as hass: hass.config.config_dir = tmp_path recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) diff --git a/tests/components/recorder/test_system_health.py b/tests/components/recorder/test_system_health.py index fbcefa0b13e..0efaa82e5e5 100644 --- a/tests/components/recorder/test_system_health.py +++ b/tests/components/recorder/test_system_health.py @@ -15,13 +15,15 @@ from tests.common import get_system_health_info from tests.typing import RecorderInstanceGenerator +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_recorder_system_health( recorder_mock: Recorder, hass: HomeAssistant, recorder_db_url: str ) -> None: - """Test recorder system health.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Test recorder system health. + + This test is specific for SQLite. + """ assert await async_setup_component(hass, "system_health", {}) await async_wait_recording_done(hass) @@ -100,15 +102,17 @@ async def test_recorder_system_health_db_url_missing_host( } +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_recorder_system_health_crashed_recorder_runs_table( async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test recorder system health with crashed recorder runs table.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Test recorder system health with crashed recorder runs table. + + This test is specific for SQLite. + """ with patch( "homeassistant.components.recorder.table_managers.recorder_runs.RecorderRunsManager.load_from_db" diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index d72978c57bb..d850778d214 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -26,6 +26,8 @@ from homeassistant.components.recorder.models import ( process_timestamp, ) from homeassistant.components.recorder.util import ( + MIN_VERSION_SQLITE, + UPCOMING_MIN_VERSION_SQLITE, end_incomplete_runs, is_second_sunday, resolve_period, @@ -48,7 +50,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -116,12 +118,18 @@ def test_validate_or_move_away_sqlite_database( assert util.validate_or_move_away_sqlite_database(dburl) is True +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_last_run_was_recently_clean( - async_setup_recorder_instance: RecorderInstanceGenerator, tmp_path: Path + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: - """Test we can check if the last recorder run was recently clean.""" + """Test we can check if the last recorder run was recently clean. + + This is only implemented for SQLite. + """ config = { - recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"), recorder.CONF_COMMIT_INTERVAL: 1, } async with async_test_home_assistant() as hass: @@ -217,9 +225,9 @@ def test_setup_connection_for_dialect_mysql(mysql_version) -> None: @pytest.mark.parametrize( "sqlite_version", - ["3.31.0"], + [str(UPCOMING_MIN_VERSION_SQLITE)], ) -def test_setup_connection_for_dialect_sqlite(sqlite_version) -> None: +def test_setup_connection_for_dialect_sqlite(sqlite_version: str) -> None: """Test setting up the connection for a sqlite dialect.""" instance_mock = MagicMock() execute_args = [] @@ -270,10 +278,10 @@ def test_setup_connection_for_dialect_sqlite(sqlite_version) -> None: @pytest.mark.parametrize( "sqlite_version", - ["3.31.0"], + [str(UPCOMING_MIN_VERSION_SQLITE)], ) def test_setup_connection_for_dialect_sqlite_zero_commit_interval( - sqlite_version, + sqlite_version: str, ) -> None: """Test setting up the connection for a sqlite dialect with a zero commit interval.""" instance_mock = MagicMock(commit_interval=0) @@ -497,10 +505,6 @@ def test_supported_pgsql(caplog: pytest.LogCaptureFixture, pgsql_version) -> Non "2.0.0", "Version 2.0.0 of SQLite is not supported; minimum supported version is 3.31.0.", ), - ( - "dogs", - "Version dogs of SQLite is not supported; minimum supported version is 3.31.0.", - ), ], ) def test_fail_outdated_sqlite( @@ -719,14 +723,72 @@ async def test_no_issue_for_mariadb_with_MDEV_25020( assert database_engine.optimizer.slow_range_in_select is False +async def test_issue_for_old_sqlite( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we create and delete an issue for old sqlite versions.""" + instance_mock = MagicMock() + instance_mock.hass = hass + execute_args = [] + close_mock = MagicMock() + min_version = str(MIN_VERSION_SQLITE) + + def execute_mock(statement): + nonlocal execute_args + execute_args.append(statement) + + def fetchall_mock(): + nonlocal execute_args + if execute_args[-1] == "SELECT sqlite_version()": + return [[min_version]] + return None + + def _make_cursor_mock(*_): + return MagicMock(execute=execute_mock, close=close_mock, fetchall=fetchall_mock) + + dbapi_connection = MagicMock(cursor=_make_cursor_mock) + + database_engine = await hass.async_add_executor_job( + util.setup_connection_for_dialect, + instance_mock, + "sqlite", + dbapi_connection, + True, + ) + await hass.async_block_till_done() + + issue = issue_registry.async_get_issue(DOMAIN, "sqlite_too_old") + assert issue is not None + assert issue.translation_placeholders == { + "min_version": str(UPCOMING_MIN_VERSION_SQLITE), + "server_version": min_version, + } + + min_version = str(UPCOMING_MIN_VERSION_SQLITE) + database_engine = await hass.async_add_executor_job( + util.setup_connection_for_dialect, + instance_mock, + "sqlite", + dbapi_connection, + True, + ) + await hass.async_block_till_done() + + issue = issue_registry.async_get_issue(DOMAIN, "sqlite_too_old") + assert issue is None + assert database_engine is not None + + +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_basic_sanity_check( hass: HomeAssistant, setup_recorder: None, recorder_db_url: str ) -> None: - """Test the basic sanity checks with a missing table.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Test the basic sanity checks with a missing table. + This test is specific for SQLite. + """ cursor = util.get_instance(hass).engine.raw_connection().cursor() assert util.basic_sanity_check(cursor) is True @@ -737,17 +799,18 @@ async def test_basic_sanity_check( util.basic_sanity_check(cursor) +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_combined_checks( hass: HomeAssistant, setup_recorder: None, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: - """Run Checks on the open database.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Run Checks on the open database. + This test is specific for SQLite. + """ instance = util.get_instance(hass) instance.db_retry_wait = 0 @@ -829,14 +892,15 @@ async def test_end_incomplete_runs( assert "Ended unfinished session" in caplog.text +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_periodic_db_cleanups( hass: HomeAssistant, setup_recorder: None, recorder_db_url: str ) -> None: - """Test periodic db cleanups.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Test periodic db cleanups. + This test is specific for SQLite. + """ with patch.object(util.get_instance(hass).engine, "connect") as connect_mock: util.periodic_db_cleanups(util.get_instance(hass)) @@ -847,17 +911,22 @@ async def test_periodic_db_cleanups( assert str(text_obj) == "PRAGMA wal_checkpoint(TRUNCATE);" +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) async def test_write_lock_db( async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - tmp_path: Path, + recorder_db_url: str, ) -> None: - """Test database write lock.""" + """Test database write lock. - # Use file DB, in memory DB cannot do write locks. - config = { - recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db?timeout=0.1") - } + This is only supported for SQLite. + + Use file DB, in memory DB cannot do write locks. + """ + + config = {recorder.CONF_DB_URL: recorder_db_url + "?timeout=0.1"} instance = await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() @@ -921,7 +990,7 @@ async def test_execute_stmt_lambda_element( all_calls = 0 class MockExecutor: - def __init__(self, stmt): + def __init__(self, stmt) -> None: assert isinstance(stmt, StatementLambdaElement) def all(self): diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index a07c63b3376..c9ba330b758 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -2,26 +2,27 @@ from datetime import timedelta import importlib -from pathlib import Path import sys from unittest.mock import patch import pytest from sqlalchemy import create_engine, inspect +from sqlalchemy.exc import OperationalError, SQLAlchemyError from sqlalchemy.orm import Session from homeassistant.components import recorder -from homeassistant.components.recorder import SQLITE_URL_PREFIX, core, statistics +from homeassistant.components.recorder import core, migration, statistics from homeassistant.components.recorder.queries import select_event_type_ids from homeassistant.components.recorder.util import session_scope -from homeassistant.core import EVENT_STATE_CHANGED, Event, EventOrigin, State -from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import async_setup_component +from homeassistant.const import EVENT_STATE_CHANGED +from homeassistant.core import Event, EventOrigin, State import homeassistant.util.dt as dt_util from .common import async_wait_recording_done +from .conftest import instrument_migration from tests.common import async_test_home_assistant +from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE = "tests.components.recorder.db_schema_32" @@ -49,13 +50,16 @@ def _create_engine_test(*args, **kwargs): return engine -async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None: +@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_migrate_times( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: """Test we can migrate times.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] now = dt_util.utcnow() @@ -94,37 +98,26 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventsContextIDMigration, "migrate_data"), + patch.object(migration.StatesContextIDMigration, "migrate_data"), + patch.object(migration.EventTypeIDMigration, "migrate_data"), + patch.object(migration.EntityIDMigration, "migrate_data"), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch( - "homeassistant.components.recorder.Recorder._migrate_events_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_states_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_event_type_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_entity_ids", - ), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), patch( - "homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids" + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" ), ): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -134,15 +127,15 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - session.add(old_db_schema.Events.from_event(custom_event)) session.add(old_db_schema.States.from_event(state_changed_event)) - await recorder.get_instance(hass).async_add_executor_job(_add_data) + await instance.async_add_executor_job(_add_data) await hass.async_block_till_done() - await recorder.get_instance(hass).async_block_till_done() + await instance.async_block_till_done() - states_indexes = await recorder.get_instance(hass).async_add_executor_job( + states_indexes = await instance.async_add_executor_job( _get_states_index_names ) states_index_names = {index["name"] for index in states_indexes} - assert recorder.get_instance(hass).use_legacy_events_index is True + assert instance.use_legacy_events_index is True await hass.async_stop() await hass.async_block_till_done() @@ -150,17 +143,16 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - assert "ix_states_event_id" in states_index_names # Test that the duplicates are removed during migration from schema 23 - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_block_till_done() # We need to wait for all the migration tasks to complete # before we can check the database. for _ in range(number_of_migrations): - await recorder.get_instance(hass).async_block_till_done() + await instance.async_block_till_done() await async_wait_recording_done(hass) def _get_test_data_from_db(): @@ -184,9 +176,9 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - session.expunge_all() return events_result, states_result - events_result, states_result = await recorder.get_instance( - hass - ).async_add_executor_job(_get_test_data_from_db) + events_result, states_result = await instance.async_add_executor_job( + _get_test_data_from_db + ) assert len(events_result) == 1 assert events_result[0].time_fired_ts == now_timestamp @@ -198,38 +190,32 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - with session_scope(hass=hass) as session: return inspect(session.connection()).get_indexes("events") - events_indexes = await recorder.get_instance(hass).async_add_executor_job( - _get_events_index_names - ) + events_indexes = await instance.async_add_executor_job(_get_events_index_names) events_index_names = {index["name"] for index in events_indexes} assert "ix_events_context_id_bin" in events_index_names assert "ix_events_context_id" not in events_index_names - states_indexes = await recorder.get_instance(hass).async_add_executor_job( - _get_states_index_names - ) + states_indexes = await instance.async_add_executor_job(_get_states_index_names) states_index_names = {index["name"] for index in states_indexes} - # sqlite does not support dropping foreign keys so the - # ix_states_event_id index is not dropped in this case - # but use_legacy_events_index is still False - assert "ix_states_event_id" in states_index_names + # sqlite does not support dropping foreign keys so we had to + # create a new table and copy the data over + assert "ix_states_event_id" not in states_index_names - assert recorder.get_instance(hass).use_legacy_events_index is False + assert instance.use_legacy_events_index is False await hass.async_stop() +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_can_resume_entity_id_post_migration( - caplog: pytest.LogCaptureFixture, tmp_path: Path + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + recorder_db_url: str, ) -> None: """Test we resume the entity id post migration after a restart.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] now = dt_util.utcnow() @@ -274,28 +260,15 @@ async def test_migrate_can_resume_entity_id_post_migration( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch( - "homeassistant.components.recorder.Recorder._migrate_events_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_states_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_event_type_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_entity_ids", - ), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), patch( - "homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids" + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" ), ): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -305,15 +278,15 @@ async def test_migrate_can_resume_entity_id_post_migration( session.add(old_db_schema.Events.from_event(custom_event)) session.add(old_db_schema.States.from_event(state_changed_event)) - await recorder.get_instance(hass).async_add_executor_job(_add_data) + await instance.async_add_executor_job(_add_data) await hass.async_block_till_done() - await recorder.get_instance(hass).async_block_till_done() + await instance.async_block_till_done() - states_indexes = await recorder.get_instance(hass).async_add_executor_job( + states_indexes = await instance.async_add_executor_job( _get_states_index_names ) states_index_names = {index["name"] for index in states_indexes} - assert recorder.get_instance(hass).use_legacy_events_index is True + assert instance.use_legacy_events_index is True await hass.async_stop() await hass.async_block_till_done() @@ -321,46 +294,520 @@ async def test_migrate_can_resume_entity_id_post_migration( assert "ix_states_event_id" in states_index_names assert "ix_states_entity_id_last_updated_ts" in states_index_names - with patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job(_get_states_index_names) + states_index_names = {index["name"] for index in states_indexes} + assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + + await hass.async_stop() + + +@pytest.mark.parametrize("enable_migrate_event_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_migrate_can_resume_ix_states_event_id_removed( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + recorder_db_url: str, +) -> None: + """Test we resume the entity id post migration after a restart. + + This case tests the migration still happens if + ix_states_event_id is removed from the states table. + """ + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + now = dt_util.utcnow() + one_second_past = now - timedelta(seconds=1) + mock_state = State( + "sensor.test", + "old", + {"last_reset": now.isoformat()}, + last_changed=one_second_past, + last_updated=now, + ) + state_changed_event = Event( + EVENT_STATE_CHANGED, + { + "entity_id": "sensor.test", + "old_state": None, + "new_state": mock_state, + }, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + custom_event = Event( + "custom_event", + {"entity_id": "sensor.custom"}, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + number_of_migrations = 5 + + def _get_event_id_foreign_keys(): + assert instance.engine is not None + return next( + ( + fk # type: ignore[misc] + for fk in inspect(instance.engine).get_foreign_keys("states") + if fk["constrained_columns"] == ["event_id"] + ), + None, + ) + + def _get_states_index_names(): + with session_scope(hass=hass) as session: + return inspect(session.connection()).get_indexes("states") + + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), + patch.object(core, "StatesMeta", old_db_schema.StatesMeta), + patch.object(core, "EventTypes", old_db_schema.EventTypes), + patch.object(core, "EventData", old_db_schema.EventData), + patch.object(core, "States", old_db_schema.States), + patch.object(core, "Events", old_db_schema.Events), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), + patch( + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + def _add_data(): + with session_scope(hass=hass) as session: + session.add(old_db_schema.Events.from_event(custom_event)) + session.add(old_db_schema.States.from_event(state_changed_event)) + + await instance.async_add_executor_job(_add_data) + await hass.async_block_till_done() + await instance.async_block_till_done() + + await instance.async_add_executor_job( + migration._drop_index, + instance.get_session, + "states", + "ix_states_event_id", ) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert ( + await instance.async_add_executor_job(_get_event_id_foreign_keys) + is not None + ) + + await hass.async_stop() + await hass.async_block_till_done() + + assert "ix_states_entity_id_last_updated_ts" in states_index_names + + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job(_get_states_index_names) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is False + assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None + + await hass.async_stop() + + +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.parametrize("enable_migrate_event_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_out_of_disk_space_while_rebuild_states_table( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + recorder_db_url: str, +) -> None: + """Test that we can recover from out of disk space while rebuilding the states table. + + This case tests the migration still happens if + ix_states_event_id is removed from the states table. + """ + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + now = dt_util.utcnow() + one_second_past = now - timedelta(seconds=1) + mock_state = State( + "sensor.test", + "old", + {"last_reset": now.isoformat()}, + last_changed=one_second_past, + last_updated=now, + ) + state_changed_event = Event( + EVENT_STATE_CHANGED, + { + "entity_id": "sensor.test", + "old_state": None, + "new_state": mock_state, + }, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + custom_event = Event( + "custom_event", + {"entity_id": "sensor.custom"}, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + number_of_migrations = 5 + + def _get_event_id_foreign_keys(): + assert instance.engine is not None + return next( + ( + fk # type: ignore[misc] + for fk in inspect(instance.engine).get_foreign_keys("states") + if fk["constrained_columns"] == ["event_id"] + ), + None, + ) + + def _get_states_index_names(): + with session_scope(hass=hass) as session: + return inspect(session.connection()).get_indexes("states") + + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), + patch.object(core, "StatesMeta", old_db_schema.StatesMeta), + patch.object(core, "EventTypes", old_db_schema.EventTypes), + patch.object(core, "EventData", old_db_schema.EventData), + patch.object(core, "States", old_db_schema.States), + patch.object(core, "Events", old_db_schema.Events), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), + patch( + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + def _add_data(): + with session_scope(hass=hass) as session: + session.add(old_db_schema.Events.from_event(custom_event)) + session.add(old_db_schema.States.from_event(state_changed_event)) + + await instance.async_add_executor_job(_add_data) + await hass.async_block_till_done() + await instance.async_block_till_done() + + await instance.async_add_executor_job( + migration._drop_index, + instance.get_session, + "states", + "ix_states_event_id", + ) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert ( + await instance.async_add_executor_job(_get_event_id_foreign_keys) + is not None + ) + + await hass.async_stop() + await hass.async_block_till_done() + + assert "ix_states_entity_id_last_updated_ts" in states_index_names + + # Simulate out of disk space while rebuilding the states table by + # - patching CreateTable to raise SQLAlchemyError for SQLite + # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL + with ( + patch( + "homeassistant.components.recorder.migration.CreateTable", + side_effect=SQLAlchemyError, + ), + patch( + "homeassistant.components.recorder.migration.DropConstraint", + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_block_till_done() # We need to wait for all the migration tasks to complete # before we can check the database. for _ in range(number_of_migrations): - await recorder.get_instance(hass).async_block_till_done() + await instance.async_block_till_done() await async_wait_recording_done(hass) - states_indexes = await recorder.get_instance(hass).async_add_executor_job( + states_indexes = await instance.async_add_executor_job( _get_states_index_names ) states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert "Error recreating SQLite table states" in caplog.text + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) + + await hass.async_stop() + + # Now run it again to verify the table rebuild tries again + caplog.clear() + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job(_get_states_index_names) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is False + assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + assert "Rebuilding SQLite table states finished" in caplog.text + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None + + await hass.async_stop() + + +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.parametrize("enable_migrate_event_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_out_of_disk_space_while_removing_foreign_key( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + recorder_db_url: str, +) -> None: + """Test that we can recover from out of disk space while removing the foreign key. + + This case tests the migration still happens if + ix_states_event_id is removed from the states table. + + Note that the test is somewhat forced; the states.event_id foreign key constraint is + removed when migrating to schema version 44, inspecting the schema in + cleanup_legacy_states_event_ids is not likely to fail. + """ + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + now = dt_util.utcnow() + one_second_past = now - timedelta(seconds=1) + mock_state = State( + "sensor.test", + "old", + {"last_reset": now.isoformat()}, + last_changed=one_second_past, + last_updated=now, + ) + state_changed_event = Event( + EVENT_STATE_CHANGED, + { + "entity_id": "sensor.test", + "old_state": None, + "new_state": mock_state, + }, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + custom_event = Event( + "custom_event", + {"entity_id": "sensor.custom"}, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + number_of_migrations = 5 + + def _get_event_id_foreign_keys(): + assert instance.engine is not None + return next( + ( + fk # type: ignore[misc] + for fk in inspect(instance.engine).get_foreign_keys("states") + if fk["constrained_columns"] == ["event_id"] + ), + None, + ) + + def _get_states_index_names(): + with session_scope(hass=hass) as session: + return inspect(session.connection()).get_indexes("states") + + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), + patch.object(core, "StatesMeta", old_db_schema.StatesMeta), + patch.object(core, "EventTypes", old_db_schema.EventTypes), + patch.object(core, "EventData", old_db_schema.EventData), + patch.object(core, "States", old_db_schema.States), + patch.object(core, "Events", old_db_schema.Events), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), + patch( + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + def _add_data(): + with session_scope(hass=hass) as session: + session.add(old_db_schema.Events.from_event(custom_event)) + session.add(old_db_schema.States.from_event(state_changed_event)) + + await instance.async_add_executor_job(_add_data) + await hass.async_block_till_done() + await instance.async_block_till_done() + + await instance.async_add_executor_job( + migration._drop_index, + instance.get_session, + "states", + "ix_states_event_id", + ) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert ( + await instance.async_add_executor_job(_get_event_id_foreign_keys) + is not None + ) + await hass.async_stop() await hass.async_block_till_done() assert "ix_states_entity_id_last_updated_ts" in states_index_names async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + with instrument_migration(hass) as instrumented_migration: + # Allow migration to start, but stall when live migration is completed + instrumented_migration.migration_stall.set() + instrumented_migration.live_migration_done_stall.clear() + + async with async_test_recorder(hass, wait_recorder=False) as instance: + await hass.async_block_till_done() + + # Wait for live migration to complete + await hass.async_add_executor_job( + instrumented_migration.live_migration_done.wait + ) + + # Simulate out of disk space while removing the foreign key from the states table by + # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL + with ( + patch( + "homeassistant.components.recorder.migration.sqlalchemy.inspect", + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ), + ): + instrumented_migration.live_migration_done_stall.set() + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + # The states.event_id foreign key constraint was removed when + # migration to schema version 44 + assert ( + await instance.async_add_executor_job( + _get_event_id_foreign_keys + ) + is None + ) + + await hass.async_stop() + + # Now run it again to verify the table rebuild tries again + caplog.clear() + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_block_till_done() # We need to wait for all the migration tasks to complete # before we can check the database. for _ in range(number_of_migrations): - await recorder.get_instance(hass).async_block_till_done() + await instance.async_block_till_done() await async_wait_recording_done(hass) - states_indexes = await recorder.get_instance(hass).async_add_executor_job( - _get_states_index_names - ) + states_indexes = await instance.async_add_executor_job(_get_states_index_names) states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is False assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None await hass.async_stop() diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index cc187a1e6ad..8efbf226bc1 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -3,7 +3,7 @@ import datetime from datetime import timedelta from statistics import fmean -import threading +import sys from unittest.mock import ANY, patch from freezegun import freeze_time @@ -35,11 +35,21 @@ from .common import ( async_wait_recording_done, create_engine_test, do_adhoc_statistics, + get_start_time, statistics_during_period, ) +from .conftest import InstrumentedMigration from tests.common import async_fire_time_changed -from tests.typing import WebSocketGenerator +from tests.typing import RecorderInstanceGenerator, WebSocketGenerator + + +@pytest.fixture +async def mock_recorder_before_hass( + async_setup_recorder_instance: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + DISTANCE_SENSOR_FT_ATTRIBUTES = { "device_class": "distance", @@ -146,12 +156,17 @@ async def test_statistics_during_period( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistics_during_period.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = US_CUSTOMARY_SYSTEM await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", 10, attributes=POWER_SENSOR_KW_ATTRIBUTES) + hass.states.async_set( + "sensor.test", + 10, + attributes=POWER_SENSOR_KW_ATTRIBUTES, + timestamp=now.timestamp(), + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -599,7 +614,12 @@ async def test_statistic_during_period( } # Test we can automatically convert units - hass.states.async_set("sensor.test", None, attributes=ENERGY_SENSOR_WH_ATTRIBUTES) + hass.states.async_set( + "sensor.test", + None, + attributes=ENERGY_SENSOR_WH_ATTRIBUTES, + timestamp=now.timestamp(), + ) await client.send_json_auto_id( { "type": "recorder/statistic_during_period", @@ -810,7 +830,7 @@ async def test_statistic_during_period_partial_overlap( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, - frozen_time: datetime, + frozen_time: datetime.datetime, ) -> None: """Test statistic_during_period.""" client = await hass_ws_client() @@ -1256,11 +1276,13 @@ async def test_statistics_during_period_unit_conversion( converted_value, ) -> None: """Test statistics_during_period.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -1341,12 +1363,16 @@ async def test_sum_statistics_during_period_unit_conversion( converted_value, ) -> None: """Test statistics_during_period.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", 0, attributes=attributes) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", 0, attributes=attributes, timestamp=now.timestamp() + ) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -1462,7 +1488,7 @@ async def test_statistics_during_period_in_the_past( ) -> None: """Test statistics_during_period in the past.""" await hass.config.async_set_time_zone("UTC") - now = dt_util.utcnow().replace() + now = get_start_time(dt_util.utcnow()) hass.config.units = US_CUSTOMARY_SYSTEM await async_setup_component(hass, "sensor", {}) @@ -1717,7 +1743,7 @@ async def test_list_statistic_ids( unit_class, ) -> None: """Test list_statistic_ids.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" has_sum = not has_mean @@ -1731,7 +1757,9 @@ async def test_list_statistic_ids( assert response["success"] assert response["result"] == [] - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) await client.send_json_auto_id({"type": "recorder/list_statistic_ids"}) @@ -1881,7 +1909,7 @@ async def test_list_statistic_ids_unit_change( unit_class, ) -> None: """Test list_statistic_ids.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" has_sum = not has_mean @@ -1894,7 +1922,9 @@ async def test_list_statistic_ids_unit_change( assert response["success"] assert response["result"] == [] - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -1917,7 +1947,9 @@ async def test_list_statistic_ids_unit_change( ] # Change the state unit - hass.states.async_set("sensor.test", 10, attributes=attributes2) + hass.states.async_set( + "sensor.test", 10, attributes=attributes2, timestamp=now.timestamp() + ) await client.send_json_auto_id({"type": "recorder/list_statistic_ids"}) response = await client.receive_json() @@ -1956,7 +1988,7 @@ async def test_clear_statistics( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test removing statistics.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES @@ -1966,9 +1998,15 @@ async def test_clear_statistics( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test1", state, attributes=attributes) - hass.states.async_set("sensor.test2", state * 2, attributes=attributes) - hass.states.async_set("sensor.test3", state * 3, attributes=attributes) + hass.states.async_set( + "sensor.test1", state, attributes=attributes, timestamp=now.timestamp() + ) + hass.states.async_set( + "sensor.test2", state * 2, attributes=attributes, timestamp=now.timestamp() + ) + hass.states.async_set( + "sensor.test3", state * 3, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -2079,7 +2117,7 @@ async def test_update_statistics_metadata( new_display_unit, ) -> None: """Test removing statistics.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES | {"device_class": None} @@ -2088,7 +2126,9 @@ async def test_update_statistics_metadata( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, period="hourly", start=now) @@ -2168,7 +2208,7 @@ async def test_change_statistics_unit( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test change unit of recorded statistics.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES | {"device_class": None} @@ -2177,7 +2217,9 @@ async def test_change_statistics_unit( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, period="hourly", start=now) @@ -2313,7 +2355,7 @@ async def test_change_statistics_unit_errors( caplog: pytest.LogCaptureFixture, ) -> None: """Test change unit of recorded statistics.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES | {"device_class": None} @@ -2367,7 +2409,9 @@ async def test_change_statistics_unit_errors( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, period="hourly", start=now) @@ -2457,7 +2501,7 @@ async def test_recorder_info_bad_recorder_config( client = await hass_ws_client() - with patch("homeassistant.components.recorder.migration.migrate_schema"): + with patch("homeassistant.components.recorder.migration._migrate_schema"): recorder_helper.async_initialize_recorder(hass) assert not await async_setup_component( hass, recorder.DOMAIN, {recorder.DOMAIN: config} @@ -2482,7 +2526,7 @@ async def test_recorder_info_no_instance( client = await hass_ws_client() with patch( - "homeassistant.components.recorder.websocket_api.get_instance", + "homeassistant.components.recorder.basic_websocket_api.get_instance", return_value=None, ): await client.send_json_auto_id({"type": "recorder/info"}) @@ -2493,70 +2537,60 @@ async def test_recorder_info_no_instance( async def test_recorder_info_migration_queue_exhausted( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + async_test_recorder: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, ) -> None: """Test getting recorder status when recorder queue is exhausted.""" assert recorder.util.async_migration_in_progress(hass) is False - migration_done = threading.Event() - - real_migration = recorder.migration._apply_update - - def stalled_migration(*args): - """Make migration stall.""" - nonlocal migration_done - migration_done.wait() - return real_migration(*args) - with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), - patch("homeassistant.components.recorder.Recorder.async_periodic_statistics"), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), - patch( - "homeassistant.components.recorder.migration._apply_update", - wraps=stalled_migration, + patch.object( + recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize ), ): - recorder_helper.async_initialize_recorder(hass) - hass.create_task( - async_setup_component( - hass, "recorder", {"recorder": {"db_url": "sqlite://"}} + async with async_test_recorder( + hass, wait_recorder=False, wait_recorder_setup=False + ): + await hass.async_add_executor_job( + instrument_migration.migration_started.wait ) - ) - await recorder_helper.async_wait_recorder(hass) - hass.states.async_set("my.entity", "on", {}) - await hass.async_block_till_done() + assert recorder.util.async_migration_in_progress(hass) is True + await recorder_helper.async_wait_recorder(hass) + hass.states.async_set("my.entity", "on", {}) + await hass.async_block_till_done() - # Detect queue full - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=2)) - await hass.async_block_till_done() + # Detect queue full + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=2)) + await hass.async_block_till_done() - client = await hass_ws_client() + client = await hass_ws_client() - # Check the status - await client.send_json_auto_id({"type": "recorder/info"}) - response = await client.receive_json() - assert response["success"] - assert response["result"]["migration_in_progress"] is True - assert response["result"]["recording"] is False - assert response["result"]["thread_running"] is True + # Check the status + await client.send_json_auto_id({"type": "recorder/info"}) + response = await client.receive_json() + assert response["success"] + assert response["result"]["migration_in_progress"] is True + assert response["result"]["recording"] is False + assert response["result"]["thread_running"] is True - # Let migration finish - migration_done.set() - await async_wait_recording_done(hass) + # Let migration finish + instrument_migration.migration_stall.set() + await async_wait_recording_done(hass) - # Check the status after migration finished - await client.send_json_auto_id({"type": "recorder/info"}) - response = await client.receive_json() - assert response["success"] - assert response["result"]["migration_in_progress"] is False - assert response["result"]["recording"] is True - assert response["result"]["thread_running"] is True + # Check the status after migration finished + await client.send_json_auto_id({"type": "recorder/info"}) + response = await client.receive_json() + assert response["success"] + assert response["result"]["migration_in_progress"] is False + assert response["result"]["recording"] is True + assert response["result"]["thread_running"] is True async def test_backup_start_no_recorder( @@ -2602,7 +2636,7 @@ async def test_get_statistics_metadata( unit_class, ) -> None: """Test get_statistics_metadata.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" has_sum = not has_mean @@ -2681,10 +2715,14 @@ async def test_get_statistics_metadata( } ] - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) - hass.states.async_set("sensor.test2", 10, attributes=attributes) + hass.states.async_set( + "sensor.test2", 10, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) await client.send_json_auto_id( diff --git a/tests/components/reddit/test_sensor.py b/tests/components/reddit/test_sensor.py index 52dac07d621..98cf2b79db3 100644 --- a/tests/components/reddit/test_sensor.py +++ b/tests/components/reddit/test_sensor.py @@ -66,7 +66,7 @@ INVALID_SORT_BY_CONFIG = { class ObjectView: """Use dict properties as attributes.""" - def __init__(self, d): + def __init__(self, d) -> None: """Set dict as internal dict.""" self.__dict__ = d diff --git a/tests/components/refoss/conftest.py b/tests/components/refoss/conftest.py index 80b3f4d8b75..5ded3e9489d 100644 --- a/tests/components/refoss/conftest.py +++ b/tests/components/refoss/conftest.py @@ -1,9 +1,9 @@ """Pytest module configuration.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/remote/test_device_action.py b/tests/components/remote/test_device_action.py index a6e890937b5..e224fcf4939 100644 --- a/tests/components/remote/test_device_action.py +++ b/tests/components/remote/test_device_action.py @@ -7,7 +7,7 @@ from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.remote import DOMAIN from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component @@ -24,12 +24,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -114,7 +108,6 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -189,7 +182,6 @@ async def test_action_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) diff --git a/tests/components/remote/test_device_condition.py b/tests/components/remote/test_device_condition.py index d13a0480355..6c9334aeac4 100644 --- a/tests/components/remote/test_device_condition.py +++ b/tests/components/remote/test_device_condition.py @@ -20,7 +20,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -183,7 +176,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -249,20 +242,20 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" @pytest.mark.usefixtures("enable_custom_integrations") @@ -270,7 +263,7 @@ async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -315,13 +308,13 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -329,7 +322,7 @@ async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for firing if condition is on with delay.""" point1 = dt_util.utcnow() @@ -378,26 +371,26 @@ async def test_if_fires_on_for_condition( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future freezer.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 20 secs into the future freezer.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_off event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/remote/test_device_trigger.py b/tests/components/remote/test_device_trigger.py index 8a1a0c318d7..c647faba2c1 100644 --- a/tests/components/remote/test_device_trigger.py +++ b/tests/components/remote/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -181,7 +174,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -267,20 +260,20 @@ async def test_if_fires_on_state_change( ] }, ) - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -291,7 +284,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -335,13 +328,13 @@ async def test_if_fires_on_state_change_legacy( ] }, ) - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - None" ) @@ -351,7 +344,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -397,16 +390,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/renault/conftest.py b/tests/components/renault/conftest.py index a5af01b504a..9be41eb7ba0 100644 --- a/tests/components/renault/conftest.py +++ b/tests/components/renault/conftest.py @@ -1,5 +1,6 @@ """Provide common Renault fixtures.""" +from collections.abc import Generator, Iterator import contextlib from types import MappingProxyType from typing import Any @@ -8,7 +9,6 @@ from unittest.mock import AsyncMock, patch import pytest from renault_api.kamereon import exceptions, schemas from renault_api.renault_account import RenaultAccount -from typing_extensions import Generator from homeassistant.components.renault.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntry @@ -200,7 +200,7 @@ def patch_fixtures_with_no_data(): @contextlib.contextmanager -def _patch_fixtures_with_side_effect(side_effect: Any): +def _patch_fixtures_with_side_effect(side_effect: Any) -> Iterator[None]: """Mock fixtures.""" with ( patch( diff --git a/tests/components/renault/fixtures/hvac_status.1.json b/tests/components/renault/fixtures/hvac_status.1.json index 7cbd7a9fe37..f48cbae68ae 100644 --- a/tests/components/renault/fixtures/hvac_status.1.json +++ b/tests/components/renault/fixtures/hvac_status.1.json @@ -2,6 +2,6 @@ "data": { "type": "Car", "id": "VF1AAAAA555777999", - "attributes": { "externalTemperature": 8.0, "hvacStatus": 1 } + "attributes": { "externalTemperature": 8.0, "hvacStatus": "off" } } } diff --git a/tests/components/renault/fixtures/hvac_status.2.json b/tests/components/renault/fixtures/hvac_status.2.json index 8bb4f941e06..a2ca08a71e9 100644 --- a/tests/components/renault/fixtures/hvac_status.2.json +++ b/tests/components/renault/fixtures/hvac_status.2.json @@ -4,7 +4,7 @@ "id": "VF1AAAAA555777999", "attributes": { "socThreshold": 30.0, - "hvacStatus": 1, + "hvacStatus": "off", "lastUpdateTime": "2020-12-03T00:00:00Z" } } diff --git a/tests/components/renault/snapshots/test_binary_sensor.ambr b/tests/components/renault/snapshots/test_binary_sensor.ambr index 7f30faac38e..9dac0c323ce 100644 --- a/tests/components/renault/snapshots/test_binary_sensor.ambr +++ b/tests/components/renault/snapshots/test_binary_sensor.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -320,8 +322,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -704,8 +708,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -872,8 +878,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', @@ -1298,8 +1306,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -1596,8 +1606,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -1980,8 +1992,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -2148,8 +2162,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', diff --git a/tests/components/renault/snapshots/test_button.ambr b/tests/components/renault/snapshots/test_button.ambr index daef84b5c0a..c4732ad1458 100644 --- a/tests/components/renault/snapshots/test_button.ambr +++ b/tests/components/renault/snapshots/test_button.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -104,8 +106,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -270,8 +274,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -436,8 +442,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', @@ -602,8 +610,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -684,8 +694,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -850,8 +862,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -1016,8 +1030,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', diff --git a/tests/components/renault/snapshots/test_device_tracker.ambr b/tests/components/renault/snapshots/test_device_tracker.ambr index 8fe1713dc0b..5e7813316a2 100644 --- a/tests/components/renault/snapshots/test_device_tracker.ambr +++ b/tests/components/renault/snapshots/test_device_tracker.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -105,8 +107,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -188,8 +192,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -228,8 +234,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', @@ -311,8 +319,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -397,8 +407,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -483,8 +495,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -523,8 +537,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', diff --git a/tests/components/renault/snapshots/test_diagnostics.ambr b/tests/components/renault/snapshots/test_diagnostics.ambr index ae90115fcb6..a2921dff35e 100644 --- a/tests/components/renault/snapshots/test_diagnostics.ambr +++ b/tests/components/renault/snapshots/test_diagnostics.ambr @@ -22,7 +22,7 @@ }), 'hvac_status': dict({ 'externalTemperature': 8.0, - 'hvacStatus': 1, + 'hvacStatus': 'off', }), 'res_state': dict({ }), @@ -227,7 +227,7 @@ }), 'hvac_status': dict({ 'externalTemperature': 8.0, - 'hvacStatus': 1, + 'hvacStatus': 'off', }), 'res_state': dict({ }), diff --git a/tests/components/renault/snapshots/test_select.ambr b/tests/components/renault/snapshots/test_select.ambr index 0722cb5cab3..ccdc76f0130 100644 --- a/tests/components/renault/snapshots/test_select.ambr +++ b/tests/components/renault/snapshots/test_select.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -62,8 +64,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -157,8 +161,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -252,8 +258,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', @@ -347,8 +355,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -387,8 +397,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -482,8 +494,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -577,8 +591,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', diff --git a/tests/components/renault/snapshots/test_sensor.ambr b/tests/components/renault/snapshots/test_sensor.ambr index 5909c66bc5c..e4bb2d74297 100644 --- a/tests/components/renault/snapshots/test_sensor.ambr +++ b/tests/components/renault/snapshots/test_sensor.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -330,8 +332,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -1083,8 +1087,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -1832,8 +1838,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', @@ -2624,8 +2632,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -2932,8 +2942,10 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'XJB1SU', @@ -3685,8 +3697,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X101VE', @@ -4434,8 +4448,10 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'X102VE', diff --git a/tests/components/renault/test_binary_sensor.py b/tests/components/renault/test_binary_sensor.py index a0264493544..52b6de33f14 100644 --- a/tests/components/renault/test_binary_sensor.py +++ b/tests/components/renault/test_binary_sensor.py @@ -1,10 +1,10 @@ """Tests for Renault binary sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/renault/test_button.py b/tests/components/renault/test_button.py index bed188d8881..32c5ce651ae 100644 --- a/tests/components/renault/test_button.py +++ b/tests/components/renault/test_button.py @@ -1,11 +1,11 @@ """Tests for Renault sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from renault_api.kamereon import schemas from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/renault/test_device_tracker.py b/tests/components/renault/test_device_tracker.py index d8bee097eda..39f37d12a4d 100644 --- a/tests/components/renault/test_device_tracker.py +++ b/tests/components/renault/test_device_tracker.py @@ -1,10 +1,10 @@ """Tests for Renault sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/renault/test_init.py b/tests/components/renault/test_init.py index 90963fd3521..0f9d9cbaf5b 100644 --- a/tests/components/renault/test_init.py +++ b/tests/components/renault/test_init.py @@ -1,12 +1,12 @@ """Tests for Renault setup process.""" +from collections.abc import Generator from typing import Any from unittest.mock import Mock, patch import aiohttp import pytest from renault_api.gigya.exceptions import GigyaException, InvalidCredentialsException -from typing_extensions import Generator from homeassistant.components.renault.const import DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigEntryState diff --git a/tests/components/renault/test_select.py b/tests/components/renault/test_select.py index 0577966d514..7b589d86863 100644 --- a/tests/components/renault/test_select.py +++ b/tests/components/renault/test_select.py @@ -1,11 +1,11 @@ """Tests for Renault selects.""" +from collections.abc import Generator from unittest.mock import patch import pytest from renault_api.kamereon import schemas from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.select import ( ATTR_OPTION, diff --git a/tests/components/renault/test_sensor.py b/tests/components/renault/test_sensor.py index 7e8e4f24c77..d69ab5c0b7f 100644 --- a/tests/components/renault/test_sensor.py +++ b/tests/components/renault/test_sensor.py @@ -1,10 +1,10 @@ """Tests for Renault sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/renault/test_services.py b/tests/components/renault/test_services.py index d30626e4117..4e3460b9afa 100644 --- a/tests/components/renault/test_services.py +++ b/tests/components/renault/test_services.py @@ -1,5 +1,6 @@ """Tests for Renault sensors.""" +from collections.abc import Generator from datetime import datetime from unittest.mock import patch @@ -7,7 +8,6 @@ import pytest from renault_api.exceptions import RenaultException from renault_api.kamereon import schemas from renault_api.kamereon.models import ChargeSchedule -from typing_extensions import Generator from homeassistant.components.renault.const import DOMAIN from homeassistant.components.renault.services import ( diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index 3541aa1f856..ddea36cb292 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -1,9 +1,10 @@ """Setup the Reolink tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator +from reolink_aio.api import Chime from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL @@ -35,6 +36,7 @@ TEST_NVR_NAME = "test_reolink_name" TEST_NVR_NAME2 = "test2_reolink_name" TEST_USE_HTTPS = True TEST_HOST_MODEL = "RLN8-410" +TEST_ITEM_NUMBER = "P000" TEST_CAM_MODEL = "RLC-123" @@ -51,10 +53,6 @@ def mock_setup_entry() -> Generator[AsyncMock]: def reolink_connect_class() -> Generator[MagicMock]: """Mock reolink connection and return both the host_mock and host_mock_class.""" with ( - patch( - "homeassistant.components.reolink.host.webhook.async_register", - return_value=True, - ), patch( "homeassistant.components.reolink.host.Host", autospec=True ) as host_mock_class, @@ -83,10 +81,12 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.sw_version = "v1.0.0.0.0.0000" host_mock.manufacturer = "Reolink" host_mock.model = TEST_HOST_MODEL + host_mock.item_number = TEST_ITEM_NUMBER host_mock.camera_model.return_value = TEST_CAM_MODEL host_mock.camera_name.return_value = TEST_NVR_NAME host_mock.camera_hardware_version.return_value = "IPC_00001" host_mock.camera_sw_version.return_value = "v1.1.0.0.0.0000" + host_mock.camera_sw_version_update_required.return_value = False host_mock.camera_uid.return_value = TEST_UID_CAM host_mock.channel_for_uid.return_value = 0 host_mock.get_encoding.return_value = "h264" @@ -104,6 +104,14 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.capabilities = {"Host": ["RTSP"], "0": ["motion_detection"]} host_mock.checked_api_versions = {"GetEvents": 1} host_mock.abilities = {"abilityChn": [{"aiTrack": {"permit": 0, "ver": 0}}]} + + # enums + host_mock.whiteled_mode.return_value = 1 + host_mock.whiteled_mode_list.return_value = ["off", "auto"] + host_mock.doorbell_led.return_value = "Off" + host_mock.doorbell_led_list.return_value = ["stayoff", "auto"] + host_mock.auto_track_method.return_value = 3 + host_mock.daynight_state.return_value = "Black&White" yield host_mock_class @@ -142,3 +150,26 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: ) config_entry.add_to_hass(hass) return config_entry + + +@pytest.fixture +def test_chime(reolink_connect: MagicMock) -> None: + """Mock a reolink chime.""" + TEST_CHIME = Chime( + host=reolink_connect, + dev_id=12345678, + channel=0, + ) + TEST_CHIME.name = "Test chime" + TEST_CHIME.volume = 3 + TEST_CHIME.connect_state = 2 + TEST_CHIME.led_state = True + TEST_CHIME.event_info = { + "md": {"switch": 0, "musicId": 0}, + "people": {"switch": 0, "musicId": 1}, + "visitor": {"switch": 1, "musicId": 2}, + } + + reolink_connect.chime_list = [TEST_CHIME] + reolink_connect.chime.return_value = TEST_CHIME + return TEST_CHIME diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py new file mode 100644 index 00000000000..e02742afe1d --- /dev/null +++ b/tests/components/reolink/test_binary_sensor.py @@ -0,0 +1,52 @@ +"""Test the Reolink binary sensor platform.""" + +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL, const +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import TEST_NVR_NAME, TEST_UID + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import ClientSessionGenerator + + +async def test_motion_sensor( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test binary sensor entity with motion sensor.""" + reolink_connect.model = "Reolink Duo PoE" + reolink_connect.motion_detected.return_value = True + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion_lens_0" + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.motion_detected.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + # test webhook callback + reolink_connect.motion_detected.return_value = True + reolink_connect.ONVIF_event_callback.return_value = [0] + webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{webhook_id}", data="test_data") + + assert hass.states.get(entity_id).state == STATE_ON diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index de1e7a0bc83..55dd0d4fea9 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Reolink config flow.""" -from datetime import timedelta import json from typing import Any from unittest.mock import AsyncMock, MagicMock, call +from freezegun.api import FrozenDateTimeFactory import pytest from reolink_aio.exceptions import ApiError, CredentialsInvalidError, ReolinkError @@ -25,7 +25,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.device_registry import format_mac -from homeassistant.util.dt import utcnow from .conftest import ( DHCP_FORMATTED_MAC, @@ -166,8 +165,23 @@ async def test_config_flow_errors( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert result["errors"] == {CONF_HOST: "invalid_auth"} + assert result["errors"] == {CONF_PASSWORD: "invalid_auth"} + reolink_connect.valid_password.return_value = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_HOST: TEST_HOST, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {CONF_PASSWORD: "password_incompatible"} + + reolink_connect.valid_password.return_value = True reolink_connect.get_host_data.side_effect = ApiError("Test error") result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -397,7 +411,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No None, None, TEST_HOST2, - [TEST_HOST, TEST_HOST2, TEST_HOST2], + [TEST_HOST, TEST_HOST2], ), ( True, @@ -424,6 +438,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No ) async def test_dhcp_ip_update( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, reolink_connect_class: MagicMock, reolink_connect: MagicMock, last_update_success: bool, @@ -457,9 +472,8 @@ async def test_dhcp_ip_update( if not last_update_success: # ensure the last_update_succes is False for the device_coordinator. reolink_connect.get_states = AsyncMock(side_effect=ReolinkError("Test error")) - async_fire_time_changed( - hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(minutes=1) - ) + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() dhcp_data = dhcp.DhcpServiceInfo( @@ -475,8 +489,8 @@ async def test_dhcp_ip_update( const.DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data ) - expected_calls = [ - call( + for host in host_call_list: + expected_call = call( host, TEST_USERNAME, TEST_PASSWORD, @@ -485,10 +499,10 @@ async def test_dhcp_ip_update( protocol=DEFAULT_PROTOCOL, timeout=DEFAULT_TIMEOUT, ) - for host in host_call_list - ] + assert expected_call in reolink_connect_class.call_args_list - assert reolink_connect_class.call_args_list == expected_calls + for exc_call in reolink_connect_class.call_args_list: + assert exc_call[0][0] in host_call_list assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py new file mode 100644 index 00000000000..690bfd035f8 --- /dev/null +++ b/tests/components/reolink/test_host.py @@ -0,0 +1,85 @@ +"""Test the Reolink host.""" + +from asyncio import CancelledError +from unittest.mock import AsyncMock, MagicMock + +from aiohttp import ClientResponseError +import pytest + +from homeassistant.components.reolink import const +from homeassistant.components.webhook import async_handle_webhook +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.util.aiohttp import MockRequest + +from .conftest import TEST_UID + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + + +async def test_webhook_callback( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test webhook callback with motion sensor.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + + signal_all = MagicMock() + signal_ch = MagicMock() + async_dispatcher_connect(hass, f"{webhook_id}_all", signal_all) + async_dispatcher_connect(hass, f"{webhook_id}_0", signal_ch) + + client = await hass_client_no_auth() + + # test webhook callback success all channels + reolink_connect.ONVIF_event_callback.return_value = None + await client.post(f"/api/webhook/{webhook_id}") + signal_all.assert_called_once() + + # test webhook callback all channels with failure to read motion_state + signal_all.reset_mock() + reolink_connect.get_motion_state_all_ch.return_value = False + await client.post(f"/api/webhook/{webhook_id}") + signal_all.assert_not_called() + + # test webhook callback success single channel + reolink_connect.ONVIF_event_callback.return_value = [0] + await client.post(f"/api/webhook/{webhook_id}", data="test_data") + signal_ch.assert_called_once() + + # test webhook callback single channel with error in event callback + signal_ch.reset_mock() + reolink_connect.ONVIF_event_callback = AsyncMock( + side_effect=Exception("Test error") + ) + await client.post(f"/api/webhook/{webhook_id}", data="test_data") + signal_ch.assert_not_called() + + # test failure to read date from webhook post + request = MockRequest( + method="POST", + content=bytes("test", "utf-8"), + mock_source="test", + ) + request.read = AsyncMock(side_effect=ConnectionResetError("Test error")) + await async_handle_webhook(hass, webhook_id, request) + signal_all.assert_not_called() + + request.read = AsyncMock(side_effect=ClientResponseError("Test error", "Test")) + await async_handle_webhook(hass, webhook_id, request) + signal_all.assert_not_called() + + request.read = AsyncMock(side_effect=CancelledError("Test error")) + with pytest.raises(CancelledError): + await async_handle_webhook(hass, webhook_id, request) + signal_all.assert_not_called() diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 466836e52ef..f5cd56a05d2 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -1,24 +1,30 @@ """Test the Reolink init.""" -from datetime import timedelta +import asyncio from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory import pytest +from reolink_aio.api import Chime from reolink_aio.exceptions import CredentialsInvalidError, ReolinkError -from homeassistant.components.reolink import FIRMWARE_UPDATE_INTERVAL, const +from homeassistant.components.reolink import ( + DEVICE_UPDATE_INTERVAL, + FIRMWARE_UPDATE_INTERVAL, + NUM_CRED_ERRORS, + const, +) from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.helpers import ( device_registry as dr, entity_registry as er, issue_registry as ir, ) from homeassistant.setup import async_setup_component -from homeassistant.util.dt import utcnow from .conftest import ( TEST_CAM_MODEL, @@ -30,9 +36,17 @@ from .conftest import ( ) from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import WebSocketGenerator pytestmark = pytest.mark.usefixtures("reolink_connect", "reolink_platforms") +CHIME_MODEL = "Reolink Chime" + + +async def test_wait(*args, **key_args) -> None: + """Ensure a mocked function takes a bit of time to be able to timeout in test.""" + await asyncio.sleep(0) + @pytest.mark.parametrize( ("attr", "value", "expected"), @@ -58,7 +72,7 @@ pytestmark = pytest.mark.usefixtures("reolink_connect", "reolink_platforms") ConfigEntryState.SETUP_RETRY, ), ( - "get_states", + "get_host_data", AsyncMock(side_effect=CredentialsInvalidError("Test error")), ConfigEntryState.SETUP_ERROR, ), @@ -89,6 +103,7 @@ async def test_failures_parametrized( async def test_firmware_error_twice( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, reolink_connect: MagicMock, config_entry: MockConfigEntry, ) -> None: @@ -97,20 +112,46 @@ async def test_firmware_error_twice( side_effect=ReolinkError("Test error") ) with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.UPDATE}.{TEST_NVR_NAME}_firmware" - assert hass.states.is_state(entity_id, STATE_OFF) + assert hass.states.get(entity_id).state == STATE_OFF - async_fire_time_changed( - hass, utcnow() + FIRMWARE_UPDATE_INTERVAL + timedelta(minutes=1) - ) + freezer.tick(FIRMWARE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.is_state(entity_id, STATE_UNAVAILABLE) + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + +async def test_credential_error_three( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test when the update gives credential error 3 times.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + reolink_connect.get_states = AsyncMock( + side_effect=CredentialsInvalidError("Test error") + ) + + issue_id = f"config_entry_reauth_{const.DOMAIN}_{config_entry.entry_id}" + for _ in range(NUM_CRED_ERRORS): + assert (HOMEASSISTANT_DOMAIN, issue_id) not in issue_registry.issues + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (HOMEASSISTANT_DOMAIN, issue_id) in issue_registry.issues async def test_entry_reloading( @@ -141,16 +182,27 @@ async def test_entry_reloading( None, [TEST_HOST_MODEL, TEST_CAM_MODEL], ), + ( + "is_nvr", + False, + [TEST_HOST_MODEL, TEST_CAM_MODEL], + ), ("channels", [], [TEST_HOST_MODEL]), ( - "camera_model", - Mock(return_value="RLC-567"), - [TEST_HOST_MODEL, "RLC-567"], + "camera_online", + Mock(return_value=False), + [TEST_HOST_MODEL], + ), + ( + "channel_for_uid", + Mock(return_value=-1), + [TEST_HOST_MODEL], ), ], ) -async def test_cleanup_disconnected_cams( +async def test_removing_disconnected_cams( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, config_entry: MockConfigEntry, reolink_connect: MagicMock, device_registry: dr.DeviceRegistry, @@ -159,8 +211,10 @@ async def test_cleanup_disconnected_cams( value: Any, expected_models: list[str], ) -> None: - """Test device and entity registry are cleaned up when camera is disconnected from NVR.""" + """Test device and entity registry are cleaned up when camera is removed.""" reolink_connect.channels = [0] + assert await async_setup_component(hass, "config", {}) + client = await hass_ws_client(hass) # setup CH 0 and NVR switch entities/device with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -172,11 +226,87 @@ async def test_cleanup_disconnected_cams( device_models = [device.model for device in device_entries] assert sorted(device_models) == sorted([TEST_HOST_MODEL, TEST_CAM_MODEL]) - # reload integration after 'disconnecting' a camera. + # Try to remove the device after 'disconnecting' a camera. if attr is not None: setattr(reolink_connect, attr, value) + expected_success = TEST_CAM_MODEL not in expected_models + for device in device_entries: + if device.model == TEST_CAM_MODEL: + response = await client.remove_device(device.id, config_entry.entry_id) + assert response["success"] == expected_success + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + device_models = [device.model for device in device_entries] + assert sorted(device_models) == sorted(expected_models) + + +@pytest.mark.parametrize( + ("attr", "value", "expected_models"), + [ + ( + None, + None, + [TEST_HOST_MODEL, TEST_CAM_MODEL, CHIME_MODEL], + ), + ( + "connect_state", + -1, + [TEST_HOST_MODEL, TEST_CAM_MODEL], + ), + ( + "remove", + -1, + [TEST_HOST_MODEL, TEST_CAM_MODEL], + ), + ], +) +async def test_removing_chime( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + attr: str | None, + value: Any, + expected_models: list[str], +) -> None: + """Test removing a chime.""" + reolink_connect.channels = [0] + assert await async_setup_component(hass, "config", {}) + client = await hass_ws_client(hass) + # setup CH 0 and NVR switch entities/device with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_reload(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + device_models = [device.model for device in device_entries] + assert sorted(device_models) == sorted( + [TEST_HOST_MODEL, TEST_CAM_MODEL, CHIME_MODEL] + ) + + if attr == "remove": + + async def test_remove_chime(*args, **key_args): + """Remove chime.""" + test_chime.connect_state = -1 + + test_chime.remove = test_remove_chime + elif attr is not None: + setattr(test_chime, attr, value) + + # Try to remove the device after 'disconnecting' a chime. + expected_success = CHIME_MODEL not in expected_models + for device in device_entries: + if device.model == CHIME_MODEL: + response = await client.remove_device(device.id, config_entry.entry_id) + assert response["success"] == expected_success device_entries = dr.async_entries_for_config_entry( device_registry, config_entry.entry_id @@ -223,6 +353,15 @@ async def test_cleanup_disconnected_cams( True, False, ), + ( + f"{TEST_MAC}_chime123456789_play_ringtone", + f"{TEST_UID}_chime123456789_play_ringtone", + f"{TEST_MAC}_chime123456789", + f"{TEST_UID}_chime123456789", + Platform.SELECT, + True, + False, + ), ( f"{TEST_MAC}_0_record_audio", f"{TEST_MAC}_{TEST_UID_CAM}_record_audio", @@ -345,9 +484,13 @@ async def test_no_repair_issue( async def test_https_repair_issue( - hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + issue_registry: ir.IssueRegistry, ) -> None: """Test repairs issue is raised when https local url is used.""" + reolink_connect.get_states = test_wait await async_process_ha_core_config( hass, {"country": "GB", "internal_url": "https://test_homeassistant_address"} ) @@ -368,9 +511,13 @@ async def test_https_repair_issue( async def test_ssl_repair_issue( - hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + issue_registry: ir.IssueRegistry, ) -> None: """Test repairs issue is raised when global ssl certificate is used.""" + reolink_connect.get_states = test_wait assert await async_setup_component(hass, "webhook", {}) hass.config.api.use_ssl = True @@ -414,9 +561,13 @@ async def test_port_repair_issue( async def test_webhook_repair_issue( - hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + issue_registry: ir.IssueRegistry, ) -> None: """Test repairs issue is raised when the webhook url is unreachable.""" + reolink_connect.get_states = test_wait with ( patch("homeassistant.components.reolink.host.FIRST_ONVIF_TIMEOUT", new=0), patch( @@ -439,7 +590,7 @@ async def test_firmware_repair_issue( issue_registry: ir.IssueRegistry, ) -> None: """Test firmware issue is raised when too old firmware is used.""" - reolink_connect.sw_version_update_required = True + reolink_connect.camera_sw_version_update_required.return_value = True assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index 0d86106e8e5..b09c267fcfd 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -54,6 +54,7 @@ TEST_FILE_NAME = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00" TEST_FILE_NAME_MP4 = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00.mp4" TEST_STREAM = "main" TEST_CHANNEL = "0" +TEST_CAM_NAME = "Cam new name" TEST_MIME_TYPE = "application/x-mpegURL" TEST_MIME_TYPE_MP4 = "video/mp4" @@ -130,6 +131,7 @@ async def test_browsing( """Test browsing the Reolink three.""" entry_id = config_entry.entry_id reolink_connect.api_version.return_value = 1 + reolink_connect.model = "Reolink TrackMix PoE" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(entry_id) is True @@ -137,7 +139,7 @@ async def test_browsing( entries = dr.async_entries_for_config_entry(device_registry, entry_id) assert len(entries) > 0 - device_registry.async_update_device(entries[0].id, name_by_user="Cam new name") + device_registry.async_update_device(entries[0].id, name_by_user=TEST_CAM_NAME) caplog.set_level(logging.DEBUG) @@ -149,6 +151,7 @@ async def test_browsing( assert browse.title == "Reolink" assert browse.identifier is None assert browse.children[0].identifier == browse_root_id + assert browse.children[0].title == f"{TEST_CAM_NAME} lens 0" # browse resolution select browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_root_id}") @@ -272,7 +275,7 @@ async def test_browsing_rec_playback_unsupported( reolink_connect.api_version.return_value = 0 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # browse root @@ -293,7 +296,7 @@ async def test_browsing_errors( reolink_connect.api_version.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # browse root @@ -312,7 +315,7 @@ async def test_browsing_not_loaded( reolink_connect.api_version.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() reolink_connect.get_host_data = AsyncMock(side_effect=ReolinkError("Test error")) diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py new file mode 100644 index 00000000000..5536e85afb9 --- /dev/null +++ b/tests/components/reolink/test_select.py @@ -0,0 +1,157 @@ +"""Test the Reolink select platform.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from reolink_aio.api import Chime +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL +from homeassistant.components.select import DOMAIN as SELECT_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_SELECT_OPTION, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_floodlight_mode_select( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test select entity with floodlight_mode.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_floodlight_mode" + assert hass.states.get(entity_id).state == "auto" + + reolink_connect.set_whiteled = AsyncMock() + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + reolink_connect.set_whiteled.assert_called_once() + + reolink_connect.set_whiteled = AsyncMock(side_effect=ReolinkError("Test error")) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + reolink_connect.set_whiteled = AsyncMock( + side_effect=InvalidParameterError("Test error") + ) + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + reolink_connect.whiteled_mode.return_value = -99 # invalid value + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_UNKNOWN + + +async def test_play_quick_reply_message( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test select play_quick_reply_message entity.""" + reolink_connect.quick_reply_dict.return_value = {0: "off", 1: "test message"} + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_play_quick_reply_message" + assert hass.states.get(entity_id).state == STATE_UNKNOWN + + reolink_connect.play_quick_reply = AsyncMock() + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "test message"}, + blocking=True, + ) + reolink_connect.play_quick_reply.assert_called_once() + + +async def test_chime_select( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, + entity_registry: er.EntityRegistry, +) -> None: + """Test chime select entity.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" + assert hass.states.get(entity_id).state == "pianokey" + + test_chime.set_tone = AsyncMock() + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + test_chime.set_tone.assert_called_once() + + test_chime.set_tone = AsyncMock(side_effect=ReolinkError("Test error")) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + test_chime.set_tone = AsyncMock(side_effect=InvalidParameterError("Test error")) + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + test_chime.event_info = {} + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_UNKNOWN diff --git a/tests/components/reolink/test_switch.py b/tests/components/reolink/test_switch.py new file mode 100644 index 00000000000..ebf805b593d --- /dev/null +++ b/tests/components/reolink/test_switch.py @@ -0,0 +1,81 @@ +"""Test the Reolink switch platform.""" + +from unittest.mock import MagicMock, patch + +from homeassistant.components.reolink import const +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir + +from .conftest import TEST_UID + +from tests.common import MockConfigEntry + + +async def test_cleanup_hdr_switch_( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test cleanup of the HDR switch entity.""" + original_id = f"{TEST_UID}_hdr" + domain = Platform.SWITCH + + reolink_connect.channels = [0] + reolink_connect.supported.return_value = True + + entity_registry.async_get_or_create( + domain=domain, + platform=const.DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + + # setup CH 0 and host entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [domain]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) is None + ) + + +async def test_hdr_switch_deprecated_repair_issue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test repairs issue is raised when hdr switch entity used.""" + original_id = f"{TEST_UID}_hdr" + domain = Platform.SWITCH + + reolink_connect.channels = [0] + reolink_connect.supported.return_value = True + + entity_registry.async_get_or_create( + domain=domain, + platform=const.DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=None, + ) + + assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + + # setup CH 0 and host entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [domain]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + + assert (const.DOMAIN, "hdr_switch_deprecated") in issue_registry.issues diff --git a/tests/components/repairs/test_websocket_api.py b/tests/components/repairs/test_websocket_api.py index 60d0364b985..dcc6932cf4a 100644 --- a/tests/components/repairs/test_websocket_api.py +++ b/tests/components/repairs/test_websocket_api.py @@ -115,7 +115,7 @@ class MockFixFlowAbort(RepairsFlow): @pytest.fixture(autouse=True) -async def mock_repairs_integration(hass): +async def mock_repairs_integration(hass: HomeAssistant) -> None: """Mock a repairs integration.""" hass.config.components.add("fake_integration") diff --git a/tests/components/rest/test_init.py b/tests/components/rest/test_init.py index 0fda89cc329..02dfe6364ff 100644 --- a/tests/components/rest/test_init.py +++ b/tests/components/rest/test_init.py @@ -16,7 +16,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, UnitOfInformation, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -468,7 +468,7 @@ async def test_config_schema_via_packages(hass: HomeAssistant) -> None: "pack_11": {"rest": {"resource": "http://url1"}}, "pack_list": {"rest": [{"resource": "http://url2"}]}, } - config = {hass_config.HA_DOMAIN: {hass_config.CONF_PACKAGES: packages}} + config = {HOMEASSISTANT_DOMAIN: {hass_config.CONF_PACKAGES: packages}} await hass_config.merge_packages_config(hass, config, packages) assert len(config) == 2 diff --git a/tests/components/rflink/test_binary_sensor.py b/tests/components/rflink/test_binary_sensor.py index c92eaa30fe8..9329edb3a00 100644 --- a/tests/components/rflink/test_binary_sensor.py +++ b/tests/components/rflink/test_binary_sensor.py @@ -7,6 +7,7 @@ automatic sensor creation. from datetime import timedelta from freezegun import freeze_time +import pytest from homeassistant.components.rflink import CONF_RECONNECT_INTERVAL from homeassistant.const import ( @@ -45,7 +46,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the rflink sensor component.""" # setup mocking rflink module event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch) @@ -84,7 +87,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get("binary_sensor.test").state == STATE_OFF -async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: +async def test_entity_availability( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """If Rflink device is disconnected, entities should become unavailable.""" # Make sure Rflink mock does not 'recover' to quickly from the # disconnect or else the unavailability cannot be measured @@ -125,7 +130,7 @@ async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get("binary_sensor.test").state == STATE_ON -async def test_off_delay(hass: HomeAssistant, monkeypatch) -> None: +async def test_off_delay(hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch) -> None: """Test off_delay option.""" # setup mocking rflink module event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch) @@ -188,7 +193,9 @@ async def test_off_delay(hass: HomeAssistant, monkeypatch) -> None: assert len(events) == 3 -async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: +async def test_restore_state( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" mock_restore_cache( hass, (State(f"{DOMAIN}.test", STATE_ON), State(f"{DOMAIN}.test2", STATE_ON)) diff --git a/tests/components/rflink/test_cover.py b/tests/components/rflink/test_cover.py index 0829fddef51..0f14e76620f 100644 --- a/tests/components/rflink/test_cover.py +++ b/tests/components/rflink/test_cover.py @@ -5,6 +5,8 @@ control of RFLink cover devices. """ +import pytest + from homeassistant.components.rflink import EVENT_BUTTON_PRESSED from homeassistant.const import ( ATTR_ENTITY_ID, @@ -37,7 +39,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the RFLink cover component.""" # setup mocking rflink module event_callback, create, protocol, _ = await mock_rflink( @@ -107,7 +111,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_args_list[1][0][1] == "UP" -async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: +async def test_firing_bus_event( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Incoming RFLink command events should be put on the HA event bus.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -142,7 +148,9 @@ async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: assert calls[0].data == {"state": "down", "entity_id": f"{DOMAIN}.test"} -async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Command should be sent amount of configured repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -180,7 +188,9 @@ async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_count == 5 -async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions_alternation( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Simultaneously switching entities must alternate repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -211,7 +221,9 @@ async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) assert protocol.send_command_ack.call_args_list[3][0][0] == "protocol_0_1" -async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions_cancelling( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Cancel outstanding repetitions when state changed.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -240,7 +252,9 @@ async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) - assert protocol.send_command_ack.call_args_list[3][0][1] == "UP" -async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_group_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Group aliases should only respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -270,7 +284,9 @@ async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN -async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Non group aliases should not respond to group commands.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -303,7 +319,9 @@ async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN -async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_device_id( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Device id that do not respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -331,7 +349,9 @@ async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN -async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: +async def test_restore_state( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -377,7 +397,9 @@ async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: # The code checks the ID, it will use the # 'inverted' class when the name starts with # 'newkaku' -async def test_inverted_cover(hass: HomeAssistant, monkeypatch) -> None: +async def test_inverted_cover( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, diff --git a/tests/components/rflink/test_init.py b/tests/components/rflink/test_init.py index f901e46aea1..1caae302748 100644 --- a/tests/components/rflink/test_init.py +++ b/tests/components/rflink/test_init.py @@ -5,7 +5,6 @@ from unittest.mock import Mock import pytest from voluptuous.error import MultipleInvalid -from homeassistant.bootstrap import async_setup_component from homeassistant.components.rflink import ( CONF_KEEPALIVE_IDLE, CONF_RECONNECT_INTERVAL, @@ -28,10 +27,16 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component async def mock_rflink( - hass, config, domain, monkeypatch, failures=None, failcommand=False + hass: HomeAssistant, + config, + domain, + monkeypatch: pytest.MonkeyPatch, + failures=None, + failcommand=False, ): """Create mock RFLink asyncio protocol, test component setup.""" transport, protocol = (Mock(), Mock()) @@ -77,7 +82,9 @@ async def mock_rflink( return event_callback, mock_create, protocol, disconnect_callback -async def test_version_banner(hass: HomeAssistant, monkeypatch) -> None: +async def test_version_banner( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test sending unknown commands doesn't cause issues.""" # use sensor domain during testing main platform domain = "sensor" @@ -102,7 +109,9 @@ async def test_version_banner(hass: HomeAssistant, monkeypatch) -> None: ) -async def test_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: +async def test_send_no_wait( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test command sending without ack.""" domain = "switch" config = { @@ -126,7 +135,9 @@ async def test_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command.call_args_list[0][0][1] == "off" -async def test_cover_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: +async def test_cover_send_no_wait( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test command sending to a cover device without ack.""" domain = "cover" config = { @@ -150,7 +161,9 @@ async def test_cover_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command.call_args_list[0][0][1] == "STOP" -async def test_send_command(hass: HomeAssistant, monkeypatch) -> None: +async def test_send_command( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test send_command service.""" domain = "rflink" config = {"rflink": {"port": "/dev/ttyABC0"}} @@ -168,7 +181,9 @@ async def test_send_command(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_args_list[0][0][1] == "on" -async def test_send_command_invalid_arguments(hass: HomeAssistant, monkeypatch) -> None: +async def test_send_command_invalid_arguments( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test send_command service.""" domain = "rflink" config = {"rflink": {"port": "/dev/ttyABC0"}} @@ -201,7 +216,9 @@ async def test_send_command_invalid_arguments(hass: HomeAssistant, monkeypatch) assert not success, "send command should not succeed for unknown command" -async def test_send_command_event_propagation(hass: HomeAssistant, monkeypatch) -> None: +async def test_send_command_event_propagation( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test event propagation for send_command service.""" domain = "light" config = { @@ -243,7 +260,9 @@ async def test_send_command_event_propagation(hass: HomeAssistant, monkeypatch) assert hass.states.get(f"{domain}.test1").state == "off" -async def test_reconnecting_after_disconnect(hass: HomeAssistant, monkeypatch) -> None: +async def test_reconnecting_after_disconnect( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """An unexpected disconnect should cause a reconnect.""" domain = "sensor" config = { @@ -267,7 +286,9 @@ async def test_reconnecting_after_disconnect(hass: HomeAssistant, monkeypatch) - assert mock_create.call_count == 2 -async def test_reconnecting_after_failure(hass: HomeAssistant, monkeypatch) -> None: +async def test_reconnecting_after_failure( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """A failure to reconnect should be retried.""" domain = "sensor" config = { @@ -294,7 +315,9 @@ async def test_reconnecting_after_failure(hass: HomeAssistant, monkeypatch) -> N assert mock_create.call_count == 3 -async def test_error_when_not_connected(hass: HomeAssistant, monkeypatch) -> None: +async def test_error_when_not_connected( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Sending command should error when not connected.""" domain = "switch" config = { @@ -324,7 +347,9 @@ async def test_error_when_not_connected(hass: HomeAssistant, monkeypatch) -> Non assert not success, "changing state should not succeed when disconnected" -async def test_async_send_command_error(hass: HomeAssistant, monkeypatch) -> None: +async def test_async_send_command_error( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Sending command should error when protocol fails.""" domain = "rflink" config = {"rflink": {"port": "/dev/ttyABC0"}} @@ -345,7 +370,9 @@ async def test_async_send_command_error(hass: HomeAssistant, monkeypatch) -> Non assert protocol.send_command_ack.call_args_list[0][0][1] == SERVICE_TURN_OFF -async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: +async def test_race_condition( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test race condition for unknown components.""" domain = "light" config = {"rflink": {"port": "/dev/ttyABC0"}, domain: {"platform": "rflink"}} @@ -381,7 +408,7 @@ async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: assert new_sensor.state == "on" -async def test_not_connected(hass: HomeAssistant, monkeypatch) -> None: +async def test_not_connected() -> None: """Test Error when sending commands to a disconnected device.""" test_device = RflinkCommand("DUMMY_DEVICE") RflinkCommand.set_rflink_protocol(None) @@ -390,7 +417,9 @@ async def test_not_connected(hass: HomeAssistant, monkeypatch) -> None: async def test_keepalive( - hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Validate negative keepalive values.""" keepalive_value = -3 @@ -418,7 +447,9 @@ async def test_keepalive( async def test_keepalive_2( - hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Validate very short keepalive values.""" keepalive_value = 30 @@ -446,7 +477,9 @@ async def test_keepalive_2( async def test_keepalive_3( - hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Validate keepalive=0 value.""" domain = RFLINK_DOMAIN @@ -466,7 +499,9 @@ async def test_keepalive_3( async def test_default_keepalive( - hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Validate keepalive=0 value.""" domain = RFLINK_DOMAIN @@ -485,7 +520,9 @@ async def test_default_keepalive( async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry, monkeypatch + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Validate the device unique_id.""" diff --git a/tests/components/rflink/test_light.py b/tests/components/rflink/test_light.py index 5ee2375bc36..ceb2b19e192 100644 --- a/tests/components/rflink/test_light.py +++ b/tests/components/rflink/test_light.py @@ -5,6 +5,8 @@ control of RFLink switch devices. """ +import pytest + from homeassistant.components.light import ATTR_BRIGHTNESS from homeassistant.components.rflink import EVENT_BUTTON_PRESSED from homeassistant.const import ( @@ -38,7 +40,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the RFLink switch component.""" # setup mocking rflink module event_callback, create, protocol, _ = await mock_rflink( @@ -146,7 +150,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_args_list[5][0][1] == "7" -async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: +async def test_firing_bus_event( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Incoming RFLink command events should be put on the HA event bus.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -181,7 +187,9 @@ async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: assert calls[0].data == {"state": "off", "entity_id": f"{DOMAIN}.test"} -async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Command should be sent amount of configured repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -237,7 +245,9 @@ async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_count == 8 -async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions_alternation( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Simultaneously switching entities must alternate repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -268,7 +278,9 @@ async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) assert protocol.send_command_ack.call_args_list[3][0][0] == "protocol_0_1" -async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions_cancelling( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Cancel outstanding repetitions when state changed.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -302,7 +314,9 @@ async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) - ] -async def test_type_toggle(hass: HomeAssistant, monkeypatch) -> None: +async def test_type_toggle( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test toggle type lights (on/on).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -347,7 +361,9 @@ async def test_type_toggle(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.toggle_test").state == "off" -async def test_set_level_command(hass: HomeAssistant, monkeypatch) -> None: +async def test_set_level_command( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test 'set_level=XX' events.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -434,7 +450,9 @@ async def test_set_level_command(hass: HomeAssistant, monkeypatch) -> None: assert state.attributes[ATTR_BRIGHTNESS] == 0 -async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_group_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Group aliases should only respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -471,7 +489,9 @@ async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test2").state == "on" -async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Non group aliases should not respond to group commands.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -504,7 +524,9 @@ async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_device_id( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Device id that do not respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -532,7 +554,9 @@ async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: +async def test_disable_automatic_add( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """If disabled new devices should not be automatically added.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -550,7 +574,9 @@ async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: assert not hass.states.get(f"{DOMAIN}.protocol_0_0") -async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: +async def test_restore_state( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, diff --git a/tests/components/rflink/test_sensor.py b/tests/components/rflink/test_sensor.py index e375f3ae863..278dd45a114 100644 --- a/tests/components/rflink/test_sensor.py +++ b/tests/components/rflink/test_sensor.py @@ -5,6 +5,8 @@ automatic sensor creation. """ +import pytest + from homeassistant.components.rflink import ( CONF_RECONNECT_INTERVAL, DATA_ENTITY_LOOKUP, @@ -39,7 +41,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the rflink sensor component.""" # setup mocking rflink module event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch) @@ -100,7 +104,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert bat_sensor.attributes[ATTR_ICON] == "mdi:battery" -async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: +async def test_disable_automatic_add( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """If disabled new devices should not be automatically added.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -125,7 +131,9 @@ async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: assert not hass.states.get("sensor.test2") -async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: +async def test_entity_availability( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """If Rflink device is disconnected, entities should become unavailable.""" # Make sure Rflink mock does not 'recover' to quickly from the # disconnect or else the unavailability cannot be measured @@ -160,7 +168,7 @@ async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get("sensor.test").state == STATE_UNKNOWN -async def test_aliases(hass: HomeAssistant, monkeypatch) -> None: +async def test_aliases(hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch) -> None: """Validate the response to sensor's alias (with aliases).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -202,7 +210,9 @@ async def test_aliases(hass: HomeAssistant, monkeypatch) -> None: assert updated_sensor.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE -async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: +async def test_race_condition( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test race condition for unknown components.""" config = {"rflink": {"port": "/dev/ttyABC0"}, DOMAIN: {"platform": "rflink"}} tmp_entity = TMP_ENTITY.format("test3") @@ -241,7 +251,9 @@ async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: assert new_sensor.state == "ko" -async def test_sensor_attributes(hass: HomeAssistant, monkeypatch) -> None: +async def test_sensor_attributes( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Validate the sensor attributes.""" config = { diff --git a/tests/components/rflink/test_switch.py b/tests/components/rflink/test_switch.py index 705856565ae..2aab145f847 100644 --- a/tests/components/rflink/test_switch.py +++ b/tests/components/rflink/test_switch.py @@ -5,6 +5,8 @@ control of Rflink switch devices. """ +import pytest + from homeassistant.components.rflink import EVENT_BUTTON_PRESSED from homeassistant.const import ( ATTR_ENTITY_ID, @@ -33,7 +35,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the rflink switch component.""" # setup mocking rflink module event_callback, create, protocol, _ = await mock_rflink( @@ -93,7 +97,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_args_list[1][0][1] == "on" -async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_group_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Group aliases should only respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -123,7 +129,9 @@ async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Non group aliases should not respond to group commands.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -156,7 +164,9 @@ async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_device_id( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Device id that do not respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -184,7 +194,9 @@ async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_device_defaults(hass: HomeAssistant, monkeypatch) -> None: +async def test_device_defaults( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Event should fire if device_defaults config says so.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -216,7 +228,9 @@ async def test_device_defaults(hass: HomeAssistant, monkeypatch) -> None: assert calls[0].data == {"state": "off", "entity_id": f"{DOMAIN}.test"} -async def test_not_firing_default(hass: HomeAssistant, monkeypatch) -> None: +async def test_not_firing_default( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """By default no bus events should be fired.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -246,7 +260,9 @@ async def test_not_firing_default(hass: HomeAssistant, monkeypatch) -> None: assert not calls, "an event has been fired" -async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: +async def test_restore_state( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, diff --git a/tests/components/rflink/test_utils.py b/tests/components/rflink/test_utils.py index 170a05f8623..38804d14ecc 100644 --- a/tests/components/rflink/test_utils.py +++ b/tests/components/rflink/test_utils.py @@ -4,10 +4,9 @@ from homeassistant.components.rflink.utils import ( brightness_to_rflink, rflink_to_brightness, ) -from homeassistant.core import HomeAssistant -async def test_utils(hass: HomeAssistant, monkeypatch) -> None: +async def test_utils() -> None: """Test all utils methods.""" # test brightness_to_rflink assert brightness_to_rflink(0) == 0 diff --git a/tests/components/rfxtrx/conftest.py b/tests/components/rfxtrx/conftest.py index 88450638d6c..be5c72e6483 100644 --- a/tests/components/rfxtrx/conftest.py +++ b/tests/components/rfxtrx/conftest.py @@ -2,7 +2,9 @@ from __future__ import annotations -from unittest.mock import Mock, patch +from collections.abc import Callable, Coroutine, Generator +from typing import Any +from unittest.mock import MagicMock, Mock, patch from freezegun import freeze_time import pytest @@ -67,7 +69,7 @@ async def setup_rfx_test_cfg( @pytest.fixture(autouse=True) -async def transport_mock(hass): +def transport_mock() -> Generator[Mock]: """Fixture that make sure all transports are fake.""" transport = Mock(spec=RFXtrxTransport) with ( @@ -78,14 +80,14 @@ async def transport_mock(hass): @pytest.fixture(autouse=True) -async def connect_mock(hass): +def connect_mock() -> Generator[MagicMock]: """Fixture that make sure connect class is mocked.""" with patch("RFXtrx.Connect") as connect: yield connect @pytest.fixture(autouse=True, name="rfxtrx") -def rfxtrx_fixture(hass, connect_mock): +def rfxtrx_fixture(hass: HomeAssistant, connect_mock: MagicMock) -> Mock: """Fixture that cleans up threads from integration.""" rfx = Mock(spec=Connect) @@ -114,19 +116,21 @@ def rfxtrx_fixture(hass, connect_mock): @pytest.fixture(name="rfxtrx_automatic") -async def rfxtrx_automatic_fixture(hass, rfxtrx): +async def rfxtrx_automatic_fixture(hass: HomeAssistant, rfxtrx: Mock) -> Mock: """Fixture that starts up with automatic additions.""" await setup_rfx_test_cfg(hass, automatic_add=True, devices={}) return rfxtrx @pytest.fixture -async def timestep(hass): +def timestep( + hass: HomeAssistant, +) -> Generator[Callable[[int], Coroutine[Any, Any, None]]]: """Step system time forward.""" with freeze_time(utcnow()) as frozen_time: - async def delay(seconds): + async def delay(seconds: int) -> None: """Trigger delay in system.""" frozen_time.tick(delta=seconds) async_fire_time_changed(hass) diff --git a/tests/components/ridwell/conftest.py b/tests/components/ridwell/conftest.py index 32907ac8037..6ea9d91f8e9 100644 --- a/tests/components/ridwell/conftest.py +++ b/tests/components/ridwell/conftest.py @@ -1,6 +1,8 @@ """Define test fixtures for Ridwell.""" +from collections.abc import Generator from datetime import date +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aioridwell.model import EventState, RidwellPickup, RidwellPickupEvent @@ -8,6 +10,7 @@ import pytest from homeassistant.components.ridwell.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -19,7 +22,7 @@ TEST_USER_ID = "12345" @pytest.fixture(name="account") -def account_fixture(): +def account_fixture() -> Mock: """Define a Ridwell account.""" return Mock( account_id=TEST_ACCOUNT_ID, @@ -44,7 +47,7 @@ def account_fixture(): @pytest.fixture(name="client") -def client_fixture(account): +def client_fixture(account: Mock) -> Mock: """Define an aioridwell client.""" return Mock( async_authenticate=AsyncMock(), @@ -55,7 +58,9 @@ def client_fixture(account): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -68,7 +73,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_USERNAME: TEST_USERNAME, @@ -77,7 +82,7 @@ def config_fixture(hass): @pytest.fixture(name="mock_aioridwell") -async def mock_aioridwell_fixture(hass, client, config): +def mock_aioridwell_fixture(client: Mock, config: dict[str, Any]) -> Generator[None]: """Define a fixture to patch aioridwell.""" with ( patch( @@ -93,7 +98,9 @@ async def mock_aioridwell_fixture(hass, client, config): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_aioridwell): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_aioridwell: None +) -> None: """Define a fixture to set up ridwell.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/ridwell/test_diagnostics.py b/tests/components/ridwell/test_diagnostics.py index adfbb525283..45683bba903 100644 --- a/tests/components/ridwell/test_diagnostics.py +++ b/tests/components/ridwell/test_diagnostics.py @@ -1,6 +1,7 @@ """Test Ridwell diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -16,7 +17,6 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/ring/conftest.py b/tests/components/ring/conftest.py index 58e77184f55..cd4447c1a9a 100644 --- a/tests/components/ring/conftest.py +++ b/tests/components/ring/conftest.py @@ -1,11 +1,11 @@ """Configuration for Ring tests.""" +from collections.abc import Generator from itertools import chain from unittest.mock import AsyncMock, Mock, create_autospec, patch import pytest import ring_doorbell -from typing_extensions import Generator from homeassistant.components.ring import DOMAIN from homeassistant.const import CONF_USERNAME diff --git a/tests/components/ring/device_mocks.py b/tests/components/ring/device_mocks.py index f43370c918d..88ad37bdd36 100644 --- a/tests/components/ring/device_mocks.py +++ b/tests/components/ring/device_mocks.py @@ -142,6 +142,9 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): DOORBOT_HISTORY if device_family != "other" else INTERCOM_HISTORY ) + if has_capability(RingCapability.VIDEO): + mock_device.recording_url = MagicMock(return_value="http://dummy.url") + if has_capability(RingCapability.MOTION_DETECTION): mock_device.configure_mock( motion_detection=device_dict["settings"].get("motion_detection_enabled"), diff --git a/tests/components/ring/test_camera.py b/tests/components/ring/test_camera.py index 20a9ed5f0c9..49b7dc10f05 100644 --- a/tests/components/ring/test_camera.py +++ b/tests/components/ring/test_camera.py @@ -1,18 +1,33 @@ """The tests for the Ring switch platform.""" -from unittest.mock import PropertyMock +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch +from aiohttp.test_utils import make_mocked_request +from freezegun.api import FrozenDateTimeFactory import pytest import ring_doorbell +from homeassistant.components import camera +from homeassistant.components.ring.camera import FORCE_REFRESH_INTERVAL +from homeassistant.components.ring.const import SCAN_INTERVAL from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from homeassistant.util.aiohttp import MockStreamReader from .common import setup_platform +from tests.common import async_fire_time_changed + +SMALLEST_VALID_JPEG = ( + "ffd8ffe000104a46494600010101004800480000ffdb00430003020202020203020202030303030406040404040408060" + "6050609080a0a090809090a0c0f0c0a0b0e0b09090d110d0e0f101011100a0c12131210130f101010ffc9000b08000100" + "0101011100ffcc000600101005ffda0008010100003f00d2cf20ffd9" +) +SMALLEST_VALID_JPEG_BYTES = bytes.fromhex(SMALLEST_VALID_JPEG) + async def test_entity_registry( hass: HomeAssistant, @@ -52,7 +67,7 @@ async def test_camera_motion_detection_state_reports_correctly( assert state.attributes.get("friendly_name") == friendly_name -async def test_camera_motion_detection_can_be_turned_on( +async def test_camera_motion_detection_can_be_turned_on_and_off( hass: HomeAssistant, mock_ring_client ) -> None: """Tests the siren turns on correctly.""" @@ -73,6 +88,55 @@ async def test_camera_motion_detection_can_be_turned_on( state = hass.states.get("camera.front") assert state.attributes.get("motion_detection") is True + await hass.services.async_call( + "camera", + "disable_motion_detection", + {"entity_id": "camera.front"}, + blocking=True, + ) + + await hass.async_block_till_done() + + state = hass.states.get("camera.front") + assert state.attributes.get("motion_detection") is None + + +async def test_camera_motion_detection_not_supported( + hass: HomeAssistant, + mock_ring_client, + mock_ring_devices, + caplog: pytest.LogCaptureFixture, +) -> None: + """Tests the siren turns on correctly.""" + front_camera_mock = mock_ring_devices.get_device(765432) + has_capability = front_camera_mock.has_capability.side_effect + + def _has_capability(capability): + if capability == "motion_detection": + return False + return has_capability(capability) + + front_camera_mock.has_capability.side_effect = _has_capability + + await setup_platform(hass, Platform.CAMERA) + + state = hass.states.get("camera.front") + assert state.attributes.get("motion_detection") is None + + await hass.services.async_call( + "camera", + "enable_motion_detection", + {"entity_id": "camera.front"}, + blocking=True, + ) + + await hass.async_block_till_done() + state = hass.states.get("camera.front") + assert state.attributes.get("motion_detection") is None + assert ( + "Entity camera.front does not have motion detection capability" in caplog.text + ) + async def test_updates_work( hass: HomeAssistant, mock_ring_client, mock_ring_devices @@ -136,3 +200,117 @@ async def test_motion_detection_errors_when_turned_on( ) == reauth_expected ) + + +async def test_camera_handle_mjpeg_stream( + hass: HomeAssistant, + mock_ring_client, + mock_ring_devices, + freezer: FrozenDateTimeFactory, +) -> None: + """Test camera returns handle mjpeg stream when available.""" + await setup_platform(hass, Platform.CAMERA) + + front_camera_mock = mock_ring_devices.get_device(765432) + front_camera_mock.recording_url.return_value = None + + state = hass.states.get("camera.front") + assert state is not None + + mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) + + # history not updated yet + front_camera_mock.history.assert_not_called() + front_camera_mock.recording_url.assert_not_called() + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is None + + # Video url will be none so no stream + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + front_camera_mock.history.assert_called_once() + front_camera_mock.recording_url.assert_called_once() + + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is None + + # Stop the history updating so we can update the values manually + front_camera_mock.history = MagicMock() + front_camera_mock.last_history[0]["recording"]["status"] = "not ready" + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + front_camera_mock.recording_url.assert_called_once() + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is None + + # If the history id hasn't changed the camera will not check again for the video url + # until the FORCE_REFRESH_INTERVAL has passed + front_camera_mock.last_history[0]["recording"]["status"] = "ready" + front_camera_mock.recording_url = MagicMock(return_value="http://dummy.url") + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + front_camera_mock.recording_url.assert_not_called() + + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is None + + freezer.tick(FORCE_REFRESH_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + front_camera_mock.recording_url.assert_called_once() + + # Now the stream should be returned + stream_reader = MockStreamReader(SMALLEST_VALID_JPEG_BYTES) + with patch("homeassistant.components.ring.camera.CameraMjpeg") as mock_camera: + mock_camera.return_value.get_reader = AsyncMock(return_value=stream_reader) + mock_camera.return_value.open_camera = AsyncMock() + mock_camera.return_value.close = AsyncMock() + + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is not None + # Check the stream has been read + assert not await stream_reader.read(-1) + + +async def test_camera_image( + hass: HomeAssistant, + mock_ring_client, + mock_ring_devices, + freezer: FrozenDateTimeFactory, +) -> None: + """Test camera will return still image when available.""" + await setup_platform(hass, Platform.CAMERA) + + front_camera_mock = mock_ring_devices.get_device(765432) + + state = hass.states.get("camera.front") + assert state is not None + + # history not updated yet + front_camera_mock.history.assert_not_called() + front_camera_mock.recording_url.assert_not_called() + with ( + patch( + "homeassistant.components.ring.camera.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ), + pytest.raises(HomeAssistantError), + ): + image = await camera.async_get_image(hass, "camera.front") + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + # history updated so image available + front_camera_mock.history.assert_called_once() + front_camera_mock.recording_url.assert_called_once() + + with patch( + "homeassistant.components.ring.camera.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ): + image = await camera.async_get_image(hass, "camera.front") + assert image.content == SMALLEST_VALID_JPEG_BYTES diff --git a/tests/components/risco/conftest.py b/tests/components/risco/conftest.py index ab3b64b245d..3961d85d694 100644 --- a/tests/components/risco/conftest.py +++ b/tests/components/risco/conftest.py @@ -1,7 +1,10 @@ """Fixtures for Risco tests.""" +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import MagicMock, PropertyMock, patch +from pyrisco.cloud.event import Event import pytest from homeassistant.components.risco.const import DOMAIN, TYPE_LOCAL @@ -13,6 +16,7 @@ from homeassistant.const import ( CONF_TYPE, CONF_USERNAME, ) +from homeassistant.core import HomeAssistant from .util import TEST_SITE_NAME, TEST_SITE_UUID, system_mock, zone_mock @@ -116,19 +120,19 @@ def two_zone_local(): @pytest.fixture -def options(): +def options() -> dict[str, Any]: """Fixture for default (empty) options.""" return {} @pytest.fixture -def events(): +def events() -> list[Event]: """Fixture for default (empty) events.""" return [] @pytest.fixture -def cloud_config_entry(hass, options): +def cloud_config_entry(hass: HomeAssistant, options: dict[str, Any]) -> MockConfigEntry: """Fixture for a cloud config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -151,7 +155,9 @@ def login_with_error(exception): @pytest.fixture -async def setup_risco_cloud(hass, cloud_config_entry, events): +async def setup_risco_cloud( + hass: HomeAssistant, cloud_config_entry: MockConfigEntry, events: list[Event] +) -> AsyncGenerator[MockConfigEntry]: """Set up a Risco integration for testing.""" with ( patch( @@ -181,7 +187,7 @@ async def setup_risco_cloud(hass, cloud_config_entry, events): @pytest.fixture -def local_config_entry(hass, options): +def local_config_entry(hass: HomeAssistant, options: dict[str, Any]) -> MockConfigEntry: """Fixture for a local config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=TEST_LOCAL_CONFIG, options=options @@ -201,7 +207,9 @@ def connect_with_error(exception): @pytest.fixture -async def setup_risco_local(hass, local_config_entry): +async def setup_risco_local( + hass: HomeAssistant, local_config_entry: MockConfigEntry +) -> AsyncGenerator[MockConfigEntry]: """Set up a local Risco integration for testing.""" with ( patch( diff --git a/tests/components/risco/test_sensor.py b/tests/components/risco/test_sensor.py index 72444bdc9f2..2b1094554ae 100644 --- a/tests/components/risco/test_sensor.py +++ b/tests/components/risco/test_sensor.py @@ -160,7 +160,7 @@ def _check_state(hass, category, entity_id): @pytest.fixture -async def _set_utc_time_zone(hass): +async def _set_utc_time_zone(hass: HomeAssistant) -> None: await hass.config.async_set_time_zone("UTC") @@ -174,11 +174,10 @@ def save_mock(): @pytest.mark.parametrize("events", [TEST_EVENTS]) +@pytest.mark.usefixtures("two_zone_cloud", "_set_utc_time_zone") async def test_cloud_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, - two_zone_cloud, - _set_utc_time_zone, save_mock, setup_risco_cloud, ) -> None: @@ -207,11 +206,9 @@ async def test_cloud_setup( _check_state(hass, category, entity_id) +@pytest.mark.usefixtures("setup_risco_local", "_no_zones_and_partitions") async def test_local_setup( - hass: HomeAssistant, entity_registry: er.EntityRegistry, - setup_risco_local, - _no_zones_and_partitions, ) -> None: """Test entity setup.""" for entity_id in ENTITY_IDS.values(): diff --git a/tests/components/roborock/conftest.py b/tests/components/roborock/conftest.py index d3bb0a221b1..357c644e2fe 100644 --- a/tests/components/roborock/conftest.py +++ b/tests/components/roborock/conftest.py @@ -1,9 +1,13 @@ """Global fixtures for Roborock integration.""" +from copy import deepcopy from unittest.mock import patch import pytest -from roborock import RoomMapping +from roborock import RoborockCategory, RoomMapping +from roborock.code_mappings import DyadError, RoborockDyadStateCode, ZeoError, ZeoState +from roborock.roborock_message import RoborockDyadDataProtocol, RoborockZeoProtocol +from roborock.version_a01_apis import RoborockMqttClientA01 from homeassistant.components.roborock.const import ( CONF_BASE_URL, @@ -28,6 +32,36 @@ from .mock_data import ( from tests.common import MockConfigEntry +class A01Mock(RoborockMqttClientA01): + """A class to mock the A01 client.""" + + def __init__(self, user_data, device_info, category) -> None: + """Initialize the A01Mock.""" + super().__init__(user_data, device_info, category) + if category == RoborockCategory.WET_DRY_VAC: + self.protocol_responses = { + RoborockDyadDataProtocol.STATUS: RoborockDyadStateCode.drying.name, + RoborockDyadDataProtocol.POWER: 100, + RoborockDyadDataProtocol.MESH_LEFT: 111, + RoborockDyadDataProtocol.BRUSH_LEFT: 222, + RoborockDyadDataProtocol.ERROR: DyadError.none.name, + RoborockDyadDataProtocol.TOTAL_RUN_TIME: 213, + } + elif category == RoborockCategory.WASHING_MACHINE: + self.protocol_responses: list[RoborockZeoProtocol] = { + RoborockZeoProtocol.STATE: ZeoState.drying.name, + RoborockZeoProtocol.COUNTDOWN: 0, + RoborockZeoProtocol.WASHING_LEFT: 253, + RoborockZeoProtocol.ERROR: ZeoError.none.name, + } + + async def update_values( + self, dyad_data_protocols: list[RoborockDyadDataProtocol | RoborockZeoProtocol] + ): + """Update values with a predetermined response that can be overridden.""" + return {prot: self.protocol_responses[prot] for prot in dyad_data_protocols} + + @pytest.fixture(name="bypass_api_fixture") def bypass_api_fixture() -> None: """Skip calls to the API.""" @@ -35,7 +69,7 @@ def bypass_api_fixture() -> None: patch("homeassistant.components.roborock.RoborockMqttClientV1.async_connect"), patch("homeassistant.components.roborock.RoborockMqttClientV1._send_command"), patch( - "homeassistant.components.roborock.RoborockApiClient.get_home_data", + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", return_value=HOME_DATA, ), patch( @@ -95,6 +129,23 @@ def bypass_api_fixture() -> None: "homeassistant.components.roborock.coordinator.RoborockMqttClientV1.get_map_v1", return_value=b"123", ), + patch( + "homeassistant.components.roborock.coordinator.RoborockClientA01", + A01Mock, + ), + patch("homeassistant.components.roborock.RoborockMqttClientA01", A01Mock), + ): + yield + + +@pytest.fixture +def bypass_api_fixture_v1_only(bypass_api_fixture) -> None: + """Bypass api for tests that require only having v1 devices.""" + home_data_copy = deepcopy(HOME_DATA) + home_data_copy.received_devices = [] + with patch( + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", + return_value=home_data_copy, ): yield diff --git a/tests/components/roborock/snapshots/test_diagnostics.ambr b/tests/components/roborock/snapshots/test_diagnostics.ambr index 3d78e5fd638..805a498041a 100644 --- a/tests/components/roborock/snapshots/test_diagnostics.ambr +++ b/tests/components/roborock/snapshots/test_diagnostics.ambr @@ -588,6 +588,718 @@ }), }), }), + '**REDACTED-2**': dict({ + 'api': dict({ + 'misc_info': dict({ + }), + }), + 'roborock_device_info': dict({ + 'device': dict({ + 'activeTime': 1700754026, + 'deviceStatus': dict({ + '10001': '{"f":"t"}', + '10002': '', + '10004': '{"sid_in_use":25,"sid_version":5,"location":"de","bom":"A.03.0291","language":"en"}', + '10005': '{"sn":"dyad_sn","ssid":"dyad_ssid","timezone":"Europe/Stockholm","posix_timezone":"CET-1CEST,M3.5.0,M10.5.0/3","ip":"1.123.12.1","mac":"b0:4a:33:33:33:33","oba":{"language":"en","name":"A.03.0291_CE","bom":"A.03.0291","location":"de","wifiplan":"EU","timezone":"CET-1CEST,M3.5.0,M10.5.0/3;Europe/Berlin","logserver":"awsde0","featureset":"0"}"}', + '10007': '{"mqttOtaData":{"mqttOtaStatus":{"status":"IDLE"}}}', + '200': 0, + '201': 3, + '202': 0, + '203': 2, + '204': 1, + '205': 1, + '206': 3, + '207': 4, + '208': 1, + '209': 100, + '210': 0, + '212': 1, + '213': 1, + '214': 513, + '215': 513, + '216': 0, + '221': 100, + '222': 0, + '223': 2, + '224': 1, + '225': 360, + '226': 0, + '227': 1320, + '228': 360, + '229': '000,000,003,000,005,000,000,000,003,000,005,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,012,003,000,000', + '230': 352, + '235': 0, + '237': 0, + }), + 'duid': '**REDACTED**', + 'f': False, + 'fv': '01.12.34', + 'iconUrl': '', + 'localKey': '**REDACTED**', + 'name': 'Dyad Pro', + 'online': True, + 'productId': 'dyad_product', + 'pv': 'A01', + 'share': True, + 'shareTime': 1701367095, + 'silentOtaSwitch': False, + 'timeZoneId': 'Europe/Stockholm', + 'tuyaMigrated': False, + }), + 'product': dict({ + 'capability': 2, + 'category': 'roborock.wetdryvac', + 'id': 'dyad_product', + 'model': 'roborock.wetdryvac.a56', + 'name': 'Roborock Dyad Pro', + 'schema': list([ + dict({ + 'code': 'drying_status', + 'id': '134', + 'mode': 'ro', + 'name': '烘干状态', + 'type': 'RAW', + }), + dict({ + 'code': 'start', + 'id': '200', + 'mode': 'rw', + 'name': '启停', + 'type': 'VALUE', + }), + dict({ + 'code': 'status', + 'id': '201', + 'mode': 'ro', + 'name': '状态', + 'type': 'VALUE', + }), + dict({ + 'code': 'self_clean_mode', + 'id': '202', + 'mode': 'rw', + 'name': '自清洁模式', + 'type': 'VALUE', + }), + dict({ + 'code': 'self_clean_level', + 'id': '203', + 'mode': 'rw', + 'name': '自清洁强度', + 'type': 'VALUE', + }), + dict({ + 'code': 'warm_level', + 'id': '204', + 'mode': 'rw', + 'name': '烘干强度', + 'type': 'VALUE', + }), + dict({ + 'code': 'clean_mode', + 'id': '205', + 'mode': 'rw', + 'name': '洗地模式', + 'type': 'VALUE', + }), + dict({ + 'code': 'suction', + 'id': '206', + 'mode': 'rw', + 'name': '吸力', + 'type': 'VALUE', + }), + dict({ + 'code': 'water_level', + 'id': '207', + 'mode': 'rw', + 'name': '水量', + 'type': 'VALUE', + }), + dict({ + 'code': 'brush_speed', + 'id': '208', + 'mode': 'rw', + 'name': '滚刷转速', + 'type': 'VALUE', + }), + dict({ + 'code': 'power', + 'id': '209', + 'mode': 'ro', + 'name': '电量', + 'type': 'VALUE', + }), + dict({ + 'code': 'countdown_time', + 'id': '210', + 'mode': 'rw', + 'name': '预约时间', + 'type': 'VALUE', + }), + dict({ + 'code': 'auto_self_clean_set', + 'id': '212', + 'mode': 'rw', + 'name': '自动自清洁', + 'type': 'VALUE', + }), + dict({ + 'code': 'auto_dry', + 'id': '213', + 'mode': 'rw', + 'name': '自动烘干', + 'type': 'VALUE', + }), + dict({ + 'code': 'mesh_left', + 'id': '214', + 'mode': 'ro', + 'name': '滤网已工作时间', + 'type': 'VALUE', + }), + dict({ + 'code': 'brush_left', + 'id': '215', + 'mode': 'ro', + 'name': '滚刷已工作时间', + 'type': 'VALUE', + }), + dict({ + 'code': 'error', + 'id': '216', + 'mode': 'ro', + 'name': '错误值', + 'type': 'VALUE', + }), + dict({ + 'code': 'mesh_reset', + 'id': '218', + 'mode': 'rw', + 'name': '滤网重置', + 'type': 'VALUE', + }), + dict({ + 'code': 'brush_reset', + 'id': '219', + 'mode': 'rw', + 'name': '滚刷重置', + 'type': 'VALUE', + }), + dict({ + 'code': 'volume_set', + 'id': '221', + 'mode': 'rw', + 'name': '音量', + 'type': 'VALUE', + }), + dict({ + 'code': 'stand_lock_auto_run', + 'id': '222', + 'mode': 'rw', + 'name': '直立解锁自动运行开关', + 'type': 'VALUE', + }), + dict({ + 'code': 'auto_self_clean_set_mode', + 'id': '223', + 'mode': 'rw', + 'name': '自动自清洁 - 模式', + 'type': 'VALUE', + }), + dict({ + 'code': 'auto_dry_mode', + 'id': '224', + 'mode': 'rw', + 'name': '自动烘干 - 模式', + 'type': 'VALUE', + }), + dict({ + 'code': 'silent_dry_duration', + 'id': '225', + 'mode': 'rw', + 'name': '静音烘干时长', + 'type': 'VALUE', + }), + dict({ + 'code': 'silent_mode', + 'id': '226', + 'mode': 'rw', + 'name': '勿扰模式开关', + 'type': 'VALUE', + }), + dict({ + 'code': 'silent_mode_start_time', + 'id': '227', + 'mode': 'rw', + 'name': '勿扰开启时间', + 'type': 'VALUE', + }), + dict({ + 'code': 'silent_mode_end_time', + 'id': '228', + 'mode': 'rw', + 'name': '勿扰结束时间', + 'type': 'VALUE', + }), + dict({ + 'code': 'recent_run_time', + 'id': '229', + 'mode': 'rw', + 'name': '近30天每天洗地时长', + 'type': 'STRING', + }), + dict({ + 'code': 'total_run_time', + 'id': '230', + 'mode': 'rw', + 'name': '洗地总时长', + 'type': 'VALUE', + }), + dict({ + 'code': 'feature_info', + 'id': '235', + 'mode': 'ro', + 'name': 'featureinfo', + 'type': 'VALUE', + }), + dict({ + 'code': 'recover_settings', + 'id': '236', + 'mode': 'rw', + 'name': '恢复初始设置', + 'type': 'VALUE', + }), + dict({ + 'code': 'dry_countdown', + 'id': '237', + 'mode': 'ro', + 'name': '烘干倒计时', + 'type': 'VALUE', + }), + dict({ + 'code': 'id_query', + 'id': '10000', + 'mode': 'rw', + 'name': 'ID点数据查询', + 'type': 'STRING', + }), + dict({ + 'code': 'f_c', + 'id': '10001', + 'mode': 'ro', + 'name': '防串货', + 'type': 'STRING', + }), + dict({ + 'code': 'schedule_task', + 'id': '10002', + 'mode': 'rw', + 'name': '定时任务', + 'type': 'STRING', + }), + dict({ + 'code': 'snd_switch', + 'id': '10003', + 'mode': 'rw', + 'name': '语音包切换', + 'type': 'STRING', + }), + dict({ + 'code': 'snd_state', + 'id': '10004', + 'mode': 'rw', + 'name': '语音包/OBA信息', + 'type': 'STRING', + }), + dict({ + 'code': 'product_info', + 'id': '10005', + 'mode': 'ro', + 'name': '产品信息', + 'type': 'STRING', + }), + dict({ + 'code': 'privacy_info', + 'id': '10006', + 'mode': 'rw', + 'name': '隐私协议', + 'type': 'STRING', + }), + dict({ + 'code': 'ota_nfo', + 'id': '10007', + 'mode': 'ro', + 'name': 'OTA info', + 'type': 'STRING', + }), + dict({ + 'code': 'rpc_req', + 'id': '10101', + 'mode': 'wo', + 'name': 'rpc req', + 'type': 'STRING', + }), + dict({ + 'code': 'rpc_resp', + 'id': '10102', + 'mode': 'ro', + 'name': 'rpc resp', + 'type': 'STRING', + }), + ]), + }), + }), + }), + '**REDACTED-3**': dict({ + 'api': dict({ + 'misc_info': dict({ + }), + }), + 'roborock_device_info': dict({ + 'device': dict({ + 'activeTime': 1699964128, + 'deviceStatus': dict({ + '10001': '{"f":"t"}', + '10005': '{"sn":"zeo_sn","ssid":"internet","timezone":"Europe/Berlin","posix_timezone":"CET-1CEST,M3.5.0,M10.5.0/3","ip":"192.111.11.11","mac":"b0:4a:00:00:00:00","rssi":-57,"oba":{"language":"en","name":"A.03.0403_CE","bom":"A.03.0403","location":"de","wifiplan":"EU","timezone":"CET-1CEST,M3.5.0,M10.5.0/3;Europe/Berlin","logserver":"awsde0","loglevel":"4","featureset":"0"}}', + '10007': '{"mqttOtaData":{"mqttOtaStatus":{"status":"IDLE"}}}', + '200': 1, + '201': 0, + '202': 1, + '203': 7, + '204': 1, + '205': 33, + '206': 0, + '207': 4, + '208': 2, + '209': 7, + '210': 1, + '211': 1, + '212': 1, + '213': 2, + '214': 2, + '217': 0, + '218': 227, + '219': 0, + '220': 0, + '221': 0, + '222': 347414, + '223': 0, + '224': 21, + '225': 0, + '226': 0, + '227': 1, + '232': 0, + }), + 'duid': '**REDACTED**', + 'f': False, + 'featureSet': '0', + 'fv': '01.00.94', + 'iconUrl': '', + 'localKey': '**REDACTED**', + 'name': 'Zeo One', + 'newFeatureSet': '40', + 'online': True, + 'productId': 'zeo_id', + 'pv': 'A01', + 'share': True, + 'shareTime': 1712763572, + 'silentOtaSwitch': False, + 'sn': 'zeo_sn', + 'timeZoneId': 'Europe/Berlin', + 'tuyaMigrated': False, + }), + 'product': dict({ + 'capability': 2, + 'category': 'roborock.wm', + 'id': 'zeo_id', + 'model': 'roborock.wm.a102', + 'name': 'Zeo One', + 'schema': list([ + dict({ + 'code': 'drying_status', + 'id': '134', + 'mode': 'ro', + 'name': '烘干状态', + 'type': 'RAW', + }), + dict({ + 'code': 'start', + 'id': '200', + 'mode': 'rw', + 'name': '启动', + 'type': 'BOOL', + }), + dict({ + 'code': 'pause', + 'id': '201', + 'mode': 'rw', + 'name': '暂停', + 'type': 'BOOL', + }), + dict({ + 'code': 'shutdown', + 'id': '202', + 'mode': 'rw', + 'name': '关机', + 'type': 'BOOL', + }), + dict({ + 'code': 'status', + 'id': '203', + 'mode': 'ro', + 'name': '状态', + 'type': 'VALUE', + }), + dict({ + 'code': 'mode', + 'id': '204', + 'mode': 'rw', + 'name': '模式', + 'type': 'VALUE', + }), + dict({ + 'code': 'program', + 'id': '205', + 'mode': 'rw', + 'name': '程序', + 'type': 'VALUE', + }), + dict({ + 'code': 'child_lock', + 'id': '206', + 'mode': 'rw', + 'name': '童锁', + 'type': 'BOOL', + }), + dict({ + 'code': 'temp', + 'id': '207', + 'mode': 'rw', + 'name': '洗涤温度', + 'type': 'VALUE', + }), + dict({ + 'code': 'rinse_times', + 'id': '208', + 'mode': 'rw', + 'name': '漂洗次数', + 'type': 'VALUE', + }), + dict({ + 'code': 'spin_level', + 'id': '209', + 'mode': 'rw', + 'name': '滚筒转速', + 'type': 'VALUE', + }), + dict({ + 'code': 'drying_mode', + 'id': '210', + 'mode': 'rw', + 'name': '干燥度', + 'type': 'VALUE', + }), + dict({ + 'code': 'detergent_set', + 'id': '211', + 'mode': 'rw', + 'name': '自动投放-洗衣液', + 'type': 'BOOL', + }), + dict({ + 'code': 'softener_set', + 'id': '212', + 'mode': 'rw', + 'name': '自动投放-柔顺剂', + 'type': 'BOOL', + }), + dict({ + 'code': 'detergent_type', + 'id': '213', + 'mode': 'rw', + 'name': '洗衣液投放量', + 'type': 'VALUE', + }), + dict({ + 'code': 'softener_type', + 'id': '214', + 'mode': 'rw', + 'name': '柔顺剂投放量', + 'type': 'VALUE', + }), + dict({ + 'code': 'countdown', + 'id': '217', + 'mode': 'rw', + 'name': '预约时间', + 'type': 'VALUE', + }), + dict({ + 'code': 'washing_left', + 'id': '218', + 'mode': 'ro', + 'name': '洗衣剩余时间', + 'type': 'VALUE', + }), + dict({ + 'code': 'doorlock_state', + 'id': '219', + 'mode': 'ro', + 'name': '门锁状态', + 'type': 'BOOL', + }), + dict({ + 'code': 'error', + 'id': '220', + 'mode': 'ro', + 'name': '故障', + 'type': 'VALUE', + }), + dict({ + 'code': 'custom_param_save', + 'id': '221', + 'mode': 'rw', + 'name': '云程序设置', + 'type': 'VALUE', + }), + dict({ + 'code': 'custom_param_get', + 'id': '222', + 'mode': 'ro', + 'name': '云程序读取', + 'type': 'VALUE', + }), + dict({ + 'code': 'sound_set', + 'id': '223', + 'mode': 'rw', + 'name': '提示音', + 'type': 'BOOL', + }), + dict({ + 'code': 'times_after_clean', + 'id': '224', + 'mode': 'ro', + 'name': '距离上次筒自洁次数', + 'type': 'VALUE', + }), + dict({ + 'code': 'default_setting', + 'id': '225', + 'mode': 'rw', + 'name': '记忆洗衣偏好开关', + 'type': 'BOOL', + }), + dict({ + 'code': 'detergent_empty', + 'id': '226', + 'mode': 'ro', + 'name': '洗衣液用尽', + 'type': 'BOOL', + }), + dict({ + 'code': 'softener_empty', + 'id': '227', + 'mode': 'ro', + 'name': '柔顺剂用尽', + 'type': 'BOOL', + }), + dict({ + 'code': 'light_setting', + 'id': '229', + 'mode': 'rw', + 'name': '筒灯设定', + 'type': 'BOOL', + }), + dict({ + 'code': 'detergent_volume', + 'id': '230', + 'mode': 'rw', + 'name': '洗衣液投放量(单次)', + 'type': 'VALUE', + }), + dict({ + 'code': 'softener_volume', + 'id': '231', + 'mode': 'rw', + 'name': '柔顺剂投放量(单次)', + 'type': 'VALUE', + }), + dict({ + 'code': 'app_authorization', + 'id': '232', + 'mode': 'rw', + 'name': '远程控制授权', + 'type': 'VALUE', + }), + dict({ + 'code': 'id_query', + 'id': '10000', + 'mode': 'rw', + 'name': 'ID点查询', + 'type': 'STRING', + }), + dict({ + 'code': 'f_c', + 'id': '10001', + 'mode': 'ro', + 'name': '防串货', + 'type': 'STRING', + }), + dict({ + 'code': 'snd_state', + 'id': '10004', + 'mode': 'rw', + 'name': '语音包/OBA信息', + 'type': 'STRING', + }), + dict({ + 'code': 'product_info', + 'id': '10005', + 'mode': 'ro', + 'name': '产品信息', + 'type': 'STRING', + }), + dict({ + 'code': 'privacy_info', + 'id': '10006', + 'mode': 'rw', + 'name': '隐私协议', + 'type': 'STRING', + }), + dict({ + 'code': 'ota_nfo', + 'id': '10007', + 'mode': 'rw', + 'name': 'OTA info', + 'type': 'STRING', + }), + dict({ + 'code': 'washing_log', + 'id': '10008', + 'mode': 'ro', + 'name': '洗衣记录', + 'type': 'BOOL', + }), + dict({ + 'code': 'rpc_req', + 'id': '10101', + 'mode': 'wo', + 'name': 'rpc req', + 'type': 'STRING', + }), + dict({ + 'code': 'rpc_resp', + 'id': '10102', + 'mode': 'ro', + 'name': 'rpc resp', + 'type': 'STRING', + }), + ]), + }), + }), + }), }), }) # --- diff --git a/tests/components/roborock/test_config_flow.py b/tests/components/roborock/test_config_flow.py index 5134ef7eea2..a5a86e44372 100644 --- a/tests/components/roborock/test_config_flow.py +++ b/tests/components/roborock/test_config_flow.py @@ -11,9 +11,10 @@ from roborock.exceptions import ( RoborockInvalidEmail, RoborockUrlException, ) +from vacuum_map_parser_base.config.drawable import Drawable from homeassistant import config_entries -from homeassistant.components.roborock.const import CONF_ENTRY_CODE, DOMAIN +from homeassistant.components.roborock.const import CONF_ENTRY_CODE, DOMAIN, DRAWABLES from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -185,6 +186,28 @@ async def test_config_flow_failures_code_login( assert len(mock_setup.mock_calls) == 1 +async def test_options_flow_drawables( + hass: HomeAssistant, setup_entry: MockConfigEntry +) -> None: + """Test that the options flow works.""" + result = await hass.config_entries.options.async_init(setup_entry.entry_id) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == DRAWABLES + with patch( + "homeassistant.components.roborock.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={Drawable.PREDICTED_PATH: True}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert setup_entry.options[DRAWABLES][Drawable.PREDICTED_PATH] is True + assert len(mock_setup.mock_calls) == 1 + + async def test_reauth_flow( hass: HomeAssistant, bypass_api_fixture, mock_roborock_entry: MockConfigEntry ) -> None: diff --git a/tests/components/roborock/test_init.py b/tests/components/roborock/test_init.py index de858ef7cb2..cace9a8ed67 100644 --- a/tests/components/roborock/test_init.py +++ b/tests/components/roborock/test_init.py @@ -1,7 +1,9 @@ """Test for Roborock init.""" +from copy import deepcopy from unittest.mock import patch +import pytest from roborock import RoborockException, RoborockInvalidCredentials from homeassistant.components.roborock.const import DOMAIN @@ -9,6 +11,8 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from .mock_data import HOME_DATA + from tests.common import MockConfigEntry @@ -25,7 +29,6 @@ async def test_unload_entry( await hass.async_block_till_done() assert mock_disconnect.call_count == 2 assert setup_entry.state is ConfigEntryState.NOT_LOADED - assert not hass.data.get(DOMAIN) async def test_config_entry_not_ready( @@ -34,7 +37,7 @@ async def test_config_entry_not_ready( """Test that when coordinator update fails, entry retries.""" with ( patch( - "homeassistant.components.roborock.RoborockApiClient.get_home_data", + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", ), patch( "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", @@ -51,7 +54,7 @@ async def test_config_entry_not_ready_home_data( """Test that when we fail to get home data, entry retries.""" with ( patch( - "homeassistant.components.roborock.RoborockApiClient.get_home_data", + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", side_effect=RoborockException(), ), patch( @@ -64,7 +67,9 @@ async def test_config_entry_not_ready_home_data( async def test_get_networking_fails( - hass: HomeAssistant, mock_roborock_entry: MockConfigEntry, bypass_api_fixture + hass: HomeAssistant, + mock_roborock_entry: MockConfigEntry, + bypass_api_fixture_v1_only, ) -> None: """Test that when networking fails, we attempt to retry.""" with patch( @@ -76,7 +81,9 @@ async def test_get_networking_fails( async def test_get_networking_fails_none( - hass: HomeAssistant, mock_roborock_entry: MockConfigEntry, bypass_api_fixture + hass: HomeAssistant, + mock_roborock_entry: MockConfigEntry, + bypass_api_fixture_v1_only, ) -> None: """Test that when networking returns None, we attempt to retry.""" with patch( @@ -88,7 +95,9 @@ async def test_get_networking_fails_none( async def test_cloud_client_fails_props( - hass: HomeAssistant, mock_roborock_entry: MockConfigEntry, bypass_api_fixture + hass: HomeAssistant, + mock_roborock_entry: MockConfigEntry, + bypass_api_fixture_v1_only, ) -> None: """Test that if networking succeeds, but we can't communicate with the vacuum, we can't get props, fail.""" with ( @@ -106,7 +115,9 @@ async def test_cloud_client_fails_props( async def test_local_client_fails_props( - hass: HomeAssistant, mock_roborock_entry: MockConfigEntry, bypass_api_fixture + hass: HomeAssistant, + mock_roborock_entry: MockConfigEntry, + bypass_api_fixture_v1_only, ) -> None: """Test that if networking succeeds, but we can't communicate locally with the vacuum, we can't get props, fail.""" with patch( @@ -118,7 +129,9 @@ async def test_local_client_fails_props( async def test_fails_maps_continue( - hass: HomeAssistant, mock_roborock_entry: MockConfigEntry, bypass_api_fixture + hass: HomeAssistant, + mock_roborock_entry: MockConfigEntry, + bypass_api_fixture_v1_only, ) -> None: """Test that if we fail to get the maps, we still setup.""" with patch( @@ -136,7 +149,7 @@ async def test_reauth_started( ) -> None: """Test reauth flow started.""" with patch( - "homeassistant.components.roborock.RoborockApiClient.get_home_data", + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", side_effect=RoborockInvalidCredentials(), ): await async_setup_component(hass, DOMAIN, {}) @@ -145,3 +158,39 @@ async def test_reauth_started( flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 assert flows[0]["step_id"] == "reauth_confirm" + + +async def test_not_supported_protocol( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that we output a message on incorrect protocol.""" + home_data_copy = deepcopy(HOME_DATA) + home_data_copy.received_devices[0].pv = "random" + with patch( + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", + return_value=home_data_copy, + ): + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + assert "because its protocol version random" in caplog.text + + +async def test_not_supported_a01_device( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that we output a message on incorrect category.""" + home_data_copy = deepcopy(HOME_DATA) + home_data_copy.products[2].category = "random" + with patch( + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", + return_value=home_data_copy, + ): + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + assert "The device you added is not yet supported" in caplog.text diff --git a/tests/components/roborock/test_select.py b/tests/components/roborock/test_select.py index c8626818749..ce846107d93 100644 --- a/tests/components/roborock/test_select.py +++ b/tests/components/roborock/test_select.py @@ -1,13 +1,18 @@ """Test Roborock Select platform.""" +import copy from unittest.mock import patch import pytest from roborock.exceptions import RoborockException -from homeassistant.const import SERVICE_SELECT_OPTION +from homeassistant.components.roborock import DOMAIN +from homeassistant.const import SERVICE_SELECT_OPTION, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.setup import async_setup_component + +from .mock_data import PROP from tests.common import MockConfigEntry @@ -17,6 +22,7 @@ from tests.common import MockConfigEntry [ ("select.roborock_s7_maxv_mop_mode", "deep"), ("select.roborock_s7_maxv_mop_intensity", "mild"), + ("select.roborock_s7_maxv_selected_map", "Downstairs"), ], ) async def test_update_success( @@ -62,3 +68,21 @@ async def test_update_failure( blocking=True, target={"entity_id": "select.roborock_s7_maxv_mop_mode"}, ) + + +async def test_none_map_select( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, +) -> None: + """Test that the select entity correctly handles not having a current map.""" + prop = copy.deepcopy(PROP) + # Set map status to None so that current map is never set + prop.status.map_status = None + with patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ): + await async_setup_component(hass, DOMAIN, {}) + select_entity = hass.states.get("select.roborock_s7_maxv_selected_map") + assert select_entity.state == STATE_UNKNOWN diff --git a/tests/components/roborock/test_sensor.py b/tests/components/roborock/test_sensor.py index 88ed6e1098c..908754f3b92 100644 --- a/tests/components/roborock/test_sensor.py +++ b/tests/components/roborock/test_sensor.py @@ -21,7 +21,7 @@ from tests.common import MockConfigEntry async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> None: """Test sensors and check test values are correctly set.""" - assert len(hass.states.async_all("sensor")) == 28 + assert len(hass.states.async_all("sensor")) == 38 assert hass.states.get("sensor.roborock_s7_maxv_main_brush_time_left").state == str( MAIN_BRUSH_REPLACE_TIME - 74382 ) @@ -54,6 +54,16 @@ async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> Non hass.states.get("sensor.roborock_s7_maxv_last_clean_end").state == "2023-01-01T03:43:58+00:00" ) + assert hass.states.get("sensor.dyad_pro_status").state == "drying" + assert hass.states.get("sensor.dyad_pro_battery").state == "100" + assert hass.states.get("sensor.dyad_pro_filter_time_left").state == "111" + assert hass.states.get("sensor.dyad_pro_roller_left").state == "222" + assert hass.states.get("sensor.dyad_pro_error").state == "none" + assert hass.states.get("sensor.dyad_pro_total_cleaning_time").state == "213" + assert hass.states.get("sensor.zeo_one_state").state == "drying" + assert hass.states.get("sensor.zeo_one_countdown").state == "0" + assert hass.states.get("sensor.zeo_one_washing_left").state == "253" + assert hass.states.get("sensor.zeo_one_error").state == "none" async def test_listener_update( diff --git a/tests/components/roku/conftest.py b/tests/components/roku/conftest.py index 160a1bf3127..7ac332a1a6c 100644 --- a/tests/components/roku/conftest.py +++ b/tests/components/roku/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Roku integration tests.""" +from collections.abc import Generator import json from unittest.mock import MagicMock, patch import pytest from rokuecp import Device as RokuDevice -from typing_extensions import Generator from homeassistant.components.roku.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/roon/test_config_flow.py b/tests/components/roon/test_config_flow.py index 9822c88fa48..9539a9c0f5b 100644 --- a/tests/components/roon/test_config_flow.py +++ b/tests/components/roon/test_config_flow.py @@ -48,7 +48,7 @@ class RoonApiMockException(RoonApiMock): @property def token(self): """Throw exception.""" - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 class RoonDiscoveryMock: diff --git a/tests/components/rova/snapshots/test_init.ambr b/tests/components/rova/snapshots/test_init.ambr index 340b0e6d472..5e607e6a8df 100644 --- a/tests/components/rova/snapshots/test_init.ambr +++ b/tests/components/rova/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': '8381BE 13', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/rtsp_to_webrtc/conftest.py b/tests/components/rtsp_to_webrtc/conftest.py index 6e790b4ff00..956825f6372 100644 --- a/tests/components/rtsp_to_webrtc/conftest.py +++ b/tests/components/rtsp_to_webrtc/conftest.py @@ -2,13 +2,12 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import AsyncGenerator, Awaitable, Callable from typing import Any from unittest.mock import patch import pytest import rtsp_to_webrtc -from typing_extensions import AsyncGenerator from homeassistant.components import camera from homeassistant.components.rtsp_to_webrtc import DOMAIN diff --git a/tests/components/rtsp_to_webrtc/test_config_flow.py b/tests/components/rtsp_to_webrtc/test_config_flow.py index 504ede68ac7..5daf9400396 100644 --- a/tests/components/rtsp_to_webrtc/test_config_flow.py +++ b/tests/components/rtsp_to_webrtc/test_config_flow.py @@ -25,7 +25,7 @@ async def test_web_full_flow(hass: HomeAssistant) -> None: ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "user" - assert result.get("data_schema").schema.get("server_url") == str + assert result.get("data_schema").schema.get("server_url") is str assert not result.get("errors") with ( patch("rtsp_to_webrtc.client.Client.heartbeat"), @@ -64,7 +64,7 @@ async def test_invalid_url(hass: HomeAssistant) -> None: ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "user" - assert result.get("data_schema").schema.get("server_url") == str + assert result.get("data_schema").schema.get("server_url") is str assert not result.get("errors") result = await hass.config_entries.flow.async_configure( result["flow_id"], {"server_url": "not-a-url"} diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py new file mode 100644 index 00000000000..96171071907 --- /dev/null +++ b/tests/components/russound_rio/__init__.py @@ -0,0 +1 @@ +"""Tests for the Russound RIO integration.""" diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py new file mode 100644 index 00000000000..a87d0a74fa8 --- /dev/null +++ b/tests/components/russound_rio/conftest.py @@ -0,0 +1,48 @@ +"""Test fixtures for Russound RIO integration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .const import HARDWARE_MAC, MOCK_CONFIG, MOCK_CONTROLLERS, MODEL + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry(): + """Prevent setup.""" + with patch( + "homeassistant.components.russound_rio.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Mock a Russound RIO config entry.""" + entry = MockConfigEntry( + domain=DOMAIN, data=MOCK_CONFIG, unique_id=HARDWARE_MAC, title=MODEL + ) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +def mock_russound() -> Generator[AsyncMock]: + """Mock the Russound RIO client.""" + with ( + patch( + "homeassistant.components.russound_rio.Russound", autospec=True + ) as mock_client, + patch( + "homeassistant.components.russound_rio.config_flow.Russound", + return_value=mock_client, + ), + ): + mock_client.enumerate_controllers.return_value = MOCK_CONTROLLERS + yield mock_client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py new file mode 100644 index 00000000000..527f4fe3377 --- /dev/null +++ b/tests/components/russound_rio/const.py @@ -0,0 +1,16 @@ +"""Constants for russound_rio tests.""" + +from collections import namedtuple + +HOST = "127.0.0.1" +PORT = 9621 +MODEL = "MCA-C5" +HARDWARE_MAC = "00:11:22:33:44:55" + +MOCK_CONFIG = { + "host": HOST, + "port": PORT, +} + +_CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 +MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py new file mode 100644 index 00000000000..8bc7bd738a1 --- /dev/null +++ b/tests/components/russound_rio/test_config_flow.py @@ -0,0 +1,135 @@ +"""Test the Russound RIO config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import MOCK_CONFIG, MOCK_CONTROLLERS, MODEL + + +async def test_form( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + mock_russound.connect.side_effect = TimeoutError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + # Recover with correct information + mock_russound.connect.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_no_primary_controller( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we handle no primary controller error.""" + mock_russound.enumerate_controllers.return_value = {} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + user_input = MOCK_CONFIG + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "no_primary_controller"} + + # Recover with correct information + mock_russound.enumerate_controllers.return_value = MOCK_CONTROLLERS + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we import a config entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import_cannot_connect( + hass: HomeAssistant, mock_russound: AsyncMock +) -> None: + """Test we handle import cannot connect error.""" + mock_russound.connect.side_effect = TimeoutError + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_import_no_primary_controller( + hass: HomeAssistant, mock_russound: AsyncMock +) -> None: + """Test import with no primary controller error.""" + mock_russound.enumerate_controllers.return_value = {} + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_primary_controller" diff --git a/tests/components/sabnzbd/conftest.py b/tests/components/sabnzbd/conftest.py index 7d68d3108f0..b5450e5134f 100644 --- a/tests/components/sabnzbd/conftest.py +++ b/tests/components/sabnzbd/conftest.py @@ -1,9 +1,9 @@ """Configuration for Sabnzbd tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/samsungtv/conftest.py b/tests/components/samsungtv/conftest.py index 8d38adad06d..ec12031ef96 100644 --- a/tests/components/samsungtv/conftest.py +++ b/tests/components/samsungtv/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from datetime import datetime from socket import AddressFamily # pylint: disable=no-name-in-module from typing import Any @@ -19,16 +19,12 @@ from samsungtvws.encrypted.remote import SamsungTVEncryptedWSAsyncRemote from samsungtvws.event import ED_INSTALLED_APP_EVENT from samsungtvws.exceptions import ResponseError from samsungtvws.remote import ChannelEmitCommand -from typing_extensions import Generator from homeassistant.components.samsungtv.const import WEBSOCKET_SSL_PORT -from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.util.dt as dt_util from .const import SAMPLE_DEVICE_INFO_UE48JU6400, SAMPLE_DEVICE_INFO_WIFI -from tests.common import async_mock_service - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -40,7 +36,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(autouse=True) -async def silent_ssdp_scanner(hass): +def silent_ssdp_scanner() -> Generator[None]: """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), @@ -183,7 +179,7 @@ def rest_api_fixture_non_ssl_only() -> Mock: class MockSamsungTVAsyncRest: """Mock for a MockSamsungTVAsyncRest.""" - def __init__(self, host, session, port, timeout): + def __init__(self, host, session, port, timeout) -> None: """Mock a MockSamsungTVAsyncRest.""" self.port = port self.host = host @@ -300,9 +296,3 @@ def mac_address_fixture() -> Mock: """Patch getmac.get_mac_address.""" with patch("getmac.get_mac_address", return_value=None) as mac: yield mac - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") diff --git a/tests/components/samsungtv/snapshots/test_init.ambr b/tests/components/samsungtv/snapshots/test_init.ambr index 42a3f4fb396..061b5bc1836 100644 --- a/tests/components/samsungtv/snapshots/test_init.ambr +++ b/tests/components/samsungtv/snapshots/test_init.ambr @@ -30,8 +30,10 @@ }), 'manufacturer': None, 'model': '82GXARRS', + 'model_id': None, 'name': 'fake', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -50,6 +52,10 @@ 'mac', 'aa:bb:cc:dd:ee:ff', ), + tuple( + 'mac', + 'none', + ), }), 'disabled_by': None, 'entry_type': None, @@ -66,8 +72,10 @@ }), 'manufacturer': None, 'model': '82GXARRS', + 'model_id': None, 'name': 'fake', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/samsungtv/test_device_trigger.py b/tests/components/samsungtv/test_device_trigger.py index e16ea718cbb..acc7ecb904d 100644 --- a/tests/components/samsungtv/test_device_trigger.py +++ b/tests/components/samsungtv/test_device_trigger.py @@ -45,7 +45,9 @@ async def test_get_triggers( @pytest.mark.usefixtures("remoteencws", "rest_api") async def test_if_fires_on_turn_on_request( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" await setup_samsungtv_entry(hass, MOCK_ENTRYDATA_ENCRYPTED_WS) @@ -95,11 +97,11 @@ async def test_if_fires_on_turn_on_request( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 - assert calls[1].data["some"] == entity_id - assert calls[1].data["id"] == 0 + assert len(service_calls) == 3 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 + assert service_calls[2].data["some"] == entity_id + assert service_calls[2].data["id"] == 0 @pytest.mark.usefixtures("remoteencws", "rest_api") diff --git a/tests/components/samsungtv/test_diagnostics.py b/tests/components/samsungtv/test_diagnostics.py index 7b20002ae5b..b1bdf034bc1 100644 --- a/tests/components/samsungtv/test_diagnostics.py +++ b/tests/components/samsungtv/test_diagnostics.py @@ -16,6 +16,7 @@ from .const import ( SAMPLE_DEVICE_INFO_WIFI, ) +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -29,6 +30,7 @@ async def test_entry_diagnostics( assert await get_diagnostics_for_config_entry(hass, hass_client, config_entry) == { "entry": { + "created_at": ANY, "data": { "host": "fake_host", "ip_address": "test", @@ -43,6 +45,7 @@ async def test_entry_diagnostics( "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, + "modified_at": ANY, "options": {}, "pref_disable_new_entities": False, "pref_disable_polling": False, @@ -65,6 +68,7 @@ async def test_entry_diagnostics_encrypted( assert await get_diagnostics_for_config_entry(hass, hass_client, config_entry) == { "entry": { + "created_at": ANY, "data": { "host": "fake_host", "ip_address": "test", @@ -80,6 +84,7 @@ async def test_entry_diagnostics_encrypted( "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, + "modified_at": ANY, "options": {}, "pref_disable_new_entities": False, "pref_disable_polling": False, @@ -102,6 +107,7 @@ async def test_entry_diagnostics_encrypte_offline( assert await get_diagnostics_for_config_entry(hass, hass_client, config_entry) == { "entry": { + "created_at": ANY, "data": { "host": "fake_host", "ip_address": "test", @@ -116,6 +122,7 @@ async def test_entry_diagnostics_encrypte_offline( "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, + "modified_at": ANY, "options": {}, "pref_disable_new_entities": False, "pref_disable_polling": False, diff --git a/tests/components/samsungtv/test_init.py b/tests/components/samsungtv/test_init.py index 479664d4ec0..5715bd4b0aa 100644 --- a/tests/components/samsungtv/test_init.py +++ b/tests/components/samsungtv/test_init.py @@ -6,13 +6,16 @@ import pytest from samsungtvws.async_remote import SamsungTVWSAsyncRemote from syrupy.assertion import SnapshotAssertion -from homeassistant.components.media_player import DOMAIN, MediaPlayerEntityFeature +from homeassistant.components.media_player import ( + DOMAIN as MP_DOMAIN, + MediaPlayerEntityFeature, +) from homeassistant.components.samsungtv.const import ( CONF_MANUFACTURER, CONF_SESSION_ID, CONF_SSDP_MAIN_TV_AGENT_LOCATION, CONF_SSDP_RENDERING_CONTROL_LOCATION, - DOMAIN as SAMSUNGTV_DOMAIN, + DOMAIN, LEGACY_PORT, METHOD_LEGACY, METHOD_WEBSOCKET, @@ -47,7 +50,7 @@ from .const import ( from tests.common import MockConfigEntry -ENTITY_ID = f"{DOMAIN}.fake_name" +ENTITY_ID = f"{MP_DOMAIN}.fake_name" MOCK_CONFIG = { CONF_HOST: "fake_host", CONF_NAME: "fake_name", @@ -71,7 +74,7 @@ async def test_setup(hass: HomeAssistant) -> None: # test host and port await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) @@ -94,7 +97,7 @@ async def test_setup_without_port_device_offline(hass: HomeAssistant) -> None: ): await setup_samsungtv_entry(hass, MOCK_CONFIG) - config_entries_domain = hass.config_entries.async_entries(SAMSUNGTV_DOMAIN) + config_entries_domain = hass.config_entries.async_entries(DOMAIN) assert len(config_entries_domain) == 1 assert config_entries_domain[0].state is ConfigEntryState.SETUP_RETRY @@ -104,7 +107,7 @@ async def test_setup_without_port_device_online(hass: HomeAssistant) -> None: """Test import from yaml when the device is online.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) - config_entries_domain = hass.config_entries.async_entries(SAMSUNGTV_DOMAIN) + config_entries_domain = hass.config_entries.async_entries(DOMAIN) assert len(config_entries_domain) == 1 assert config_entries_domain[0].data[CONF_MAC] == "aa:bb:aa:aa:aa:aa" @@ -183,7 +186,7 @@ async def test_update_imported_legacy_without_method(hass: HomeAssistant) -> Non hass, {CONF_HOST: "fake_host", CONF_MANUFACTURER: "Samsung"} ) - entries = hass.config_entries.async_entries(SAMSUNGTV_DOMAIN) + entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 assert entries[0].data[CONF_METHOD] == METHOD_LEGACY assert entries[0].data[CONF_PORT] == LEGACY_PORT @@ -214,7 +217,7 @@ async def test_incorrectly_formatted_mac_fixed(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - config_entries = hass.config_entries.async_entries(SAMSUNGTV_DOMAIN) + config_entries = hass.config_entries.async_entries(DOMAIN) assert len(config_entries) == 1 assert config_entries[0].data[CONF_MAC] == "aa:bb:aa:aa:aa:aa" @@ -229,7 +232,7 @@ async def test_cleanup_mac( Reverted due to device registry collisions in #119249 / #119082 """ entry = MockConfigEntry( - domain=SAMSUNGTV_DOMAIN, + domain=DOMAIN, data=MOCK_ENTRY_WS_WITH_MAC, entry_id="123456", unique_id="any", diff --git a/tests/components/samsungtv/test_media_player.py b/tests/components/samsungtv/test_media_player.py index 4c7ee0e116d..ef7e58251e8 100644 --- a/tests/components/samsungtv/test_media_player.py +++ b/tests/components/samsungtv/test_media_player.py @@ -31,7 +31,7 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, - DOMAIN, + DOMAIN as MP_DOMAIN, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, MediaPlayerDeviceClass, @@ -39,7 +39,7 @@ from homeassistant.components.media_player import ( ) from homeassistant.components.samsungtv.const import ( CONF_SSDP_RENDERING_CONTROL_LOCATION, - DOMAIN as SAMSUNGTV_DOMAIN, + DOMAIN, ENCRYPTED_WEBSOCKET_PORT, METHOD_ENCRYPTED_WEBSOCKET, METHOD_WEBSOCKET, @@ -91,7 +91,7 @@ from .const import ( from tests.common import MockConfigEntry, async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.fake" +ENTITY_ID = f"{MP_DOMAIN}.fake" MOCK_CONFIGWS = { CONF_HOST: "fake_host", CONF_NAME: "fake", @@ -145,7 +145,7 @@ async def test_setup_websocket(hass: HomeAssistant) -> None: await hass.async_block_till_done() - config_entries = hass.config_entries.async_entries(SAMSUNGTV_DOMAIN) + config_entries = hass.config_entries.async_entries(DOMAIN) assert len(config_entries) == 1 assert config_entries[0].data[CONF_MAC] == "aa:bb:aa:aa:aa:aa" @@ -155,16 +155,16 @@ async def test_setup_websocket_2( hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_now: datetime ) -> None: """Test setup of platform from config entry.""" - entity_id = f"{DOMAIN}.fake" + entity_id = f"{MP_DOMAIN}.fake" entry = MockConfigEntry( - domain=SAMSUNGTV_DOMAIN, + domain=DOMAIN, data=MOCK_ENTRY_WS, unique_id=entity_id, ) entry.add_to_hass(hass) - config_entries = hass.config_entries.async_entries(SAMSUNGTV_DOMAIN) + config_entries = hass.config_entries.async_entries(DOMAIN) assert len(config_entries) == 1 assert entry is config_entries[0] @@ -549,7 +549,7 @@ async def test_send_key(hass: HomeAssistant, remote: Mock) -> None: """Test for send key.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) # key called @@ -563,7 +563,7 @@ async def test_send_key_broken_pipe(hass: HomeAssistant, remote: Mock) -> None: await setup_samsungtv_entry(hass, MOCK_CONFIG) remote.control = Mock(side_effect=BrokenPipeError("Boom")) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) assert state.state == STATE_ON @@ -578,7 +578,7 @@ async def test_send_key_connection_closed_retry_succeed( side_effect=[exceptions.ConnectionClosed("Boom"), DEFAULT_MOCK, DEFAULT_MOCK] ) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) # key because of retry two times @@ -595,7 +595,7 @@ async def test_send_key_unhandled_response(hass: HomeAssistant, remote: Mock) -> await setup_samsungtv_entry(hass, MOCK_CONFIG) remote.control = Mock(side_effect=exceptions.UnhandledResponse("Boom")) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) assert state.state == STATE_ON @@ -607,7 +607,7 @@ async def test_send_key_websocketexception(hass: HomeAssistant, remotews: Mock) await setup_samsungtv_entry(hass, MOCK_CONFIGWS) remotews.send_commands = Mock(side_effect=WebSocketException("Boom")) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) assert state.state == STATE_ON @@ -621,7 +621,7 @@ async def test_send_key_websocketexception_encrypted( await setup_samsungtv_entry(hass, MOCK_ENTRYDATA_ENCRYPTED_WS) remoteencws.send_commands = Mock(side_effect=WebSocketException("Boom")) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) assert state.state == STATE_ON @@ -633,7 +633,7 @@ async def test_send_key_os_error_ws(hass: HomeAssistant, remotews: Mock) -> None await setup_samsungtv_entry(hass, MOCK_CONFIGWS) remotews.send_commands = Mock(side_effect=OSError("Boom")) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) assert state.state == STATE_ON @@ -647,7 +647,7 @@ async def test_send_key_os_error_ws_encrypted( await setup_samsungtv_entry(hass, MOCK_ENTRYDATA_ENCRYPTED_WS) remoteencws.send_commands = Mock(side_effect=OSError("Boom")) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) assert state.state == STATE_ON @@ -658,7 +658,7 @@ async def test_send_key_os_error(hass: HomeAssistant, remote: Mock) -> None: await setup_samsungtv_entry(hass, MOCK_CONFIG) remote.control = Mock(side_effect=OSError("Boom")) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) assert state.state == STATE_ON @@ -677,12 +677,12 @@ async def test_state(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> Non """Test for state property.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) assert state.state == STATE_ON await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) state = hass.states.get(ENTITY_ID) # Should be STATE_UNAVAILABLE after the timer expires @@ -733,7 +733,7 @@ async def test_turn_off_websocket( remotews.send_commands.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remotews.send_commands.call_count == 1 @@ -745,11 +745,11 @@ async def test_turn_off_websocket( # commands not sent : power off in progress remotews.send_commands.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert "TV is powering off, not sending keys: ['KEY_VOLUP']" in caplog.text await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_SELECT_SOURCE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_INPUT_SOURCE: "Deezer"}, True, @@ -772,7 +772,7 @@ async def test_turn_off_websocket_frame( remotews.send_commands.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remotews.send_commands.call_count == 1 @@ -800,7 +800,7 @@ async def test_turn_off_encrypted_websocket( caplog.clear() await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remoteencws.send_commands.call_count == 1 @@ -815,7 +815,7 @@ async def test_turn_off_encrypted_websocket( # commands not sent : power off in progress remoteencws.send_commands.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert "TV is powering off, not sending keys: ['KEY_VOLUP']" in caplog.text remoteencws.send_commands.assert_not_called() @@ -841,7 +841,7 @@ async def test_turn_off_encrypted_websocket_key_type( caplog.clear() await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remoteencws.send_commands.call_count == 1 @@ -856,7 +856,7 @@ async def test_turn_off_legacy(hass: HomeAssistant, remote: Mock) -> None: """Test for turn_off.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remote.control.call_count == 1 @@ -871,7 +871,7 @@ async def test_turn_off_os_error( await setup_samsungtv_entry(hass, MOCK_CONFIG) remote.close = Mock(side_effect=OSError("BOOM")) await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert "Could not establish connection" in caplog.text @@ -885,7 +885,7 @@ async def test_turn_off_ws_os_error( await setup_samsungtv_entry(hass, MOCK_CONFIGWS) remotews.close = Mock(side_effect=OSError("BOOM")) await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert "Error closing connection" in caplog.text @@ -899,7 +899,7 @@ async def test_turn_off_encryptedws_os_error( await setup_samsungtv_entry(hass, MOCK_ENTRYDATA_ENCRYPTED_WS) remoteencws.close = Mock(side_effect=OSError("BOOM")) await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert "Error closing connection" in caplog.text @@ -908,7 +908,7 @@ async def test_volume_up(hass: HomeAssistant, remote: Mock) -> None: """Test for volume_up.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remote.control.call_count == 1 @@ -919,7 +919,7 @@ async def test_volume_down(hass: HomeAssistant, remote: Mock) -> None: """Test for volume_down.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, SERVICE_VOLUME_DOWN, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_VOLUME_DOWN, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remote.control.call_count == 1 @@ -930,7 +930,7 @@ async def test_mute_volume(hass: HomeAssistant, remote: Mock) -> None: """Test for mute_volume.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_VOLUME_MUTE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_VOLUME_MUTED: True}, True, @@ -944,14 +944,14 @@ async def test_media_play(hass: HomeAssistant, remote: Mock) -> None: """Test for media_play.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, SERVICE_MEDIA_PLAY, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_MEDIA_PLAY, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remote.control.call_count == 1 assert remote.control.call_args_list == [call("KEY_PLAY")] await hass.services.async_call( - DOMAIN, SERVICE_MEDIA_PLAY_PAUSE, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_MEDIA_PLAY_PAUSE, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remote.control.call_count == 2 @@ -962,14 +962,14 @@ async def test_media_pause(hass: HomeAssistant, remote: Mock) -> None: """Test for media_pause.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, SERVICE_MEDIA_PAUSE, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_MEDIA_PAUSE, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remote.control.call_count == 1 assert remote.control.call_args_list == [call("KEY_PAUSE")] await hass.services.async_call( - DOMAIN, SERVICE_MEDIA_PLAY_PAUSE, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_MEDIA_PLAY_PAUSE, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remote.control.call_count == 2 @@ -980,7 +980,7 @@ async def test_media_next_track(hass: HomeAssistant, remote: Mock) -> None: """Test for media_next_track.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, SERVICE_MEDIA_NEXT_TRACK, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_MEDIA_NEXT_TRACK, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remote.control.call_count == 1 @@ -991,7 +991,7 @@ async def test_media_previous_track(hass: HomeAssistant, remote: Mock) -> None: """Test for media_previous_track.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # key called assert remote.control.call_count == 1 @@ -1002,7 +1002,7 @@ async def test_media_previous_track(hass: HomeAssistant, remote: Mock) -> None: async def test_turn_on_wol(hass: HomeAssistant) -> None: """Test turn on.""" entry = MockConfigEntry( - domain=SAMSUNGTV_DOMAIN, + domain=DOMAIN, data=MOCK_ENTRY_WS_WITH_MAC, unique_id="any", ) @@ -1013,7 +1013,7 @@ async def test_turn_on_wol(hass: HomeAssistant) -> None: "homeassistant.components.samsungtv.entity.send_magic_packet" ) as mock_send_magic_packet: await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) await hass.async_block_till_done() assert mock_send_magic_packet.called @@ -1024,7 +1024,7 @@ async def test_turn_on_without_turnon(hass: HomeAssistant, remote: Mock) -> None await setup_samsungtv_entry(hass, MOCK_CONFIG) with pytest.raises(HomeAssistantError, match="does not support this service"): await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) # nothing called as not supported feature assert remote.control.call_count == 0 @@ -1035,7 +1035,7 @@ async def test_play_media(hass: HomeAssistant, remote: Mock) -> None: await setup_samsungtv_entry(hass, MOCK_CONFIG) with patch("homeassistant.components.samsungtv.bridge.asyncio.sleep") as sleep: await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: ENTITY_ID, @@ -1062,7 +1062,7 @@ async def test_play_media_invalid_type(hass: HomeAssistant) -> None: await setup_samsungtv_entry(hass, MOCK_CONFIG) remote.reset_mock() await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: ENTITY_ID, @@ -1082,7 +1082,7 @@ async def test_play_media_channel_as_string(hass: HomeAssistant) -> None: await setup_samsungtv_entry(hass, MOCK_CONFIG) remote.reset_mock() await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: ENTITY_ID, @@ -1101,7 +1101,7 @@ async def test_play_media_channel_as_non_positive(hass: HomeAssistant) -> None: await setup_samsungtv_entry(hass, MOCK_CONFIG) remote.reset_mock() await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: ENTITY_ID, @@ -1118,7 +1118,7 @@ async def test_select_source(hass: HomeAssistant, remote: Mock) -> None: """Test for select_source.""" await setup_samsungtv_entry(hass, MOCK_CONFIG) await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_SELECT_SOURCE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_INPUT_SOURCE: "HDMI"}, True, @@ -1134,7 +1134,7 @@ async def test_select_source_invalid_source(hass: HomeAssistant) -> None: await setup_samsungtv_entry(hass, MOCK_CONFIG) remote.reset_mock() await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_SELECT_SOURCE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_INPUT_SOURCE: "INVALID"}, True, @@ -1150,7 +1150,7 @@ async def test_play_media_app(hass: HomeAssistant, remotews: Mock) -> None: remotews.send_commands.reset_mock() await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: ENTITY_ID, @@ -1174,7 +1174,7 @@ async def test_select_source_app(hass: HomeAssistant, remotews: Mock) -> None: remotews.send_commands.reset_mock() await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_SELECT_SOURCE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_INPUT_SOURCE: "Deezer"}, True, @@ -1199,7 +1199,7 @@ async def test_websocket_unsupported_remote_control( remotews.send_commands.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + MP_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) remotews.raise_mock_ws_event_callback( "ms.error", @@ -1248,7 +1248,7 @@ async def test_volume_control_upnp( # Upnp action succeeds await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_VOLUME_SET, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_VOLUME_LEVEL: 0.5}, True, @@ -1262,7 +1262,7 @@ async def test_volume_control_upnp( status=500, error_code=501, error_desc="Action Failed" ) await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_VOLUME_SET, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_VOLUME_LEVEL: 0.6}, True, @@ -1281,7 +1281,7 @@ async def test_upnp_not_available( # Upnp action fails await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_VOLUME_SET, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_VOLUME_LEVEL: 0.6}, True, @@ -1299,7 +1299,7 @@ async def test_upnp_missing_service( # Upnp action fails await hass.services.async_call( - DOMAIN, + MP_DOMAIN, SERVICE_VOLUME_SET, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_MEDIA_VOLUME_LEVEL: 0.6}, True, diff --git a/tests/components/samsungtv/test_remote.py b/tests/components/samsungtv/test_remote.py index 98cf712e0d2..854c92207bf 100644 --- a/tests/components/samsungtv/test_remote.py +++ b/tests/components/samsungtv/test_remote.py @@ -10,7 +10,7 @@ from homeassistant.components.remote import ( DOMAIN as REMOTE_DOMAIN, SERVICE_SEND_COMMAND, ) -from homeassistant.components.samsungtv.const import DOMAIN as SAMSUNGTV_DOMAIN +from homeassistant.components.samsungtv.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -102,7 +102,7 @@ async def test_send_command_service(hass: HomeAssistant, remoteencws: Mock) -> N async def test_turn_on_wol(hass: HomeAssistant) -> None: """Test turn on.""" entry = MockConfigEntry( - domain=SAMSUNGTV_DOMAIN, + domain=DOMAIN, data=MOCK_ENTRY_WS_WITH_MAC, unique_id="any", ) diff --git a/tests/components/samsungtv/test_trigger.py b/tests/components/samsungtv/test_trigger.py index 6607c60b8e8..8076ceb2807 100644 --- a/tests/components/samsungtv/test_trigger.py +++ b/tests/components/samsungtv/test_trigger.py @@ -21,7 +21,7 @@ from tests.common import MockEntity, MockEntityPlatform @pytest.mark.parametrize("entity_domain", ["media_player", "remote"]) async def test_turn_on_trigger_device_id( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_domain: str, ) -> None: @@ -60,14 +60,14 @@ async def test_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 with patch("homeassistant.config.load_yaml_dict", return_value={}): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) - calls.clear() + service_calls.clear() # Ensure WOL backup is called when trigger not present with patch( @@ -78,14 +78,14 @@ async def test_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 mock_send_magic_packet.assert_called() @pytest.mark.usefixtures("remoteencws", "rest_api") @pytest.mark.parametrize("entity_domain", ["media_player", "remote"]) async def test_turn_on_trigger_entity_id( - hass: HomeAssistant, calls: list[ServiceCall], entity_domain: str + hass: HomeAssistant, service_calls: list[ServiceCall], entity_domain: str ) -> None: """Test for turn_on triggers by entity_id firing.""" await setup_samsungtv_entry(hass, MOCK_ENTRYDATA_ENCRYPTED_WS) @@ -119,9 +119,9 @@ async def test_turn_on_trigger_entity_id( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == entity_id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == entity_id + assert service_calls[1].data["id"] == 0 @pytest.mark.usefixtures("remoteencws", "rest_api") diff --git a/tests/components/sanix/conftest.py b/tests/components/sanix/conftest.py index 86eaa870770..405cad8b60b 100644 --- a/tests/components/sanix/conftest.py +++ b/tests/components/sanix/conftest.py @@ -1,5 +1,6 @@ """Sanix tests configuration.""" +from collections.abc import Generator from datetime import datetime from unittest.mock import AsyncMock, patch from zoneinfo import ZoneInfo @@ -16,7 +17,6 @@ from sanix import ( ATTR_API_TIME, ) from sanix.models import Measurement -from typing_extensions import Generator from homeassistant.components.sanix.const import CONF_SERIAL_NUMBER, DOMAIN from homeassistant.const import CONF_TOKEN diff --git a/tests/components/schedule/test_init.py b/tests/components/schedule/test_init.py index c43b2500ccb..7cd59f19033 100644 --- a/tests/components/schedule/test_init.py +++ b/tests/components/schedule/test_init.py @@ -31,11 +31,12 @@ from homeassistant.const import ( CONF_ICON, CONF_ID, CONF_NAME, + EVENT_STATE_CHANGED, SERVICE_RELOAD, STATE_OFF, STATE_ON, ) -from homeassistant.core import EVENT_STATE_CHANGED, Context, HomeAssistant +from homeassistant.core import Context, HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component diff --git a/tests/components/schlage/conftest.py b/tests/components/schlage/conftest.py index dcb6bc52a7b..9d61bb877d9 100644 --- a/tests/components/schlage/conftest.py +++ b/tests/components/schlage/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Schlage tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, create_autospec, patch from pyschlage.lock import Lock import pytest -from typing_extensions import Generator from homeassistant.components.schlage.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/scrape/conftest.py b/tests/components/scrape/conftest.py index f6109dbc19a..5b84f4fd44a 100644 --- a/tests/components/scrape/conftest.py +++ b/tests/components/scrape/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch import uuid import pytest -from typing_extensions import Generator from homeassistant.components.rest.data import DEFAULT_TIMEOUT from homeassistant.components.rest.schema import DEFAULT_METHOD, DEFAULT_VERIFY_SSL diff --git a/tests/components/screenlogic/__init__.py b/tests/components/screenlogic/__init__.py index 9c8a21b1ba4..169c1f28900 100644 --- a/tests/components/screenlogic/__init__.py +++ b/tests/components/screenlogic/__init__.py @@ -20,7 +20,7 @@ GATEWAY_IMPORT_PATH = "homeassistant.components.screenlogic.ScreenLogicGateway" GATEWAY_DISCOVERY_IMPORT_PATH = "homeassistant.components.screenlogic.coordinator.async_discover_gateways_by_unique_id" -def num_key_string_to_int(data: dict) -> None: +def num_key_string_to_int(data: dict) -> dict: """Convert all string number dict keys to integer. This needed for screenlogicpy's data dict format. diff --git a/tests/components/screenlogic/test_diagnostics.py b/tests/components/screenlogic/test_diagnostics.py index c6d6ea60e87..77e1ce58dad 100644 --- a/tests/components/screenlogic/test_diagnostics.py +++ b/tests/components/screenlogic/test_diagnostics.py @@ -4,6 +4,7 @@ from unittest.mock import DEFAULT, patch from screenlogicpy import ScreenLogicGateway from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -56,4 +57,4 @@ async def test_diagnostics( hass, hass_client, mock_config_entry ) - assert diag == snapshot + assert diag == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/screenlogic/test_services.py b/tests/components/screenlogic/test_services.py index d175ea27c84..0fc79fad0e5 100644 --- a/tests/components/screenlogic/test_services.py +++ b/tests/components/screenlogic/test_services.py @@ -1,12 +1,12 @@ """Tests for ScreenLogic integration service calls.""" +from collections.abc import AsyncGenerator from typing import Any from unittest.mock import DEFAULT, AsyncMock, patch import pytest from screenlogicpy import ScreenLogicGateway from screenlogicpy.device_const.system import COLOR_MODE -from typing_extensions import AsyncGenerator from homeassistant.components.screenlogic import DOMAIN from homeassistant.components.screenlogic.const import ( diff --git a/tests/components/script/test_blueprint.py b/tests/components/script/test_blueprint.py index b956aa588cb..aef22b93bcf 100644 --- a/tests/components/script/test_blueprint.py +++ b/tests/components/script/test_blueprint.py @@ -74,7 +74,7 @@ async def test_confirmable_notification( "message": "Throw ring in mountain?", "confirm_action": [ { - "service": "homeassistant.turn_on", + "action": "homeassistant.turn_on", "target": {"entity_id": "mount.doom"}, } ], diff --git a/tests/components/script/test_init.py b/tests/components/script/test_init.py index 2352e9c64e6..a5eda3757a9 100644 --- a/tests/components/script/test_init.py +++ b/tests/components/script/test_init.py @@ -3,7 +3,7 @@ import asyncio from datetime import timedelta from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import ANY, Mock, patch import pytest @@ -29,8 +29,8 @@ from homeassistant.core import ( callback, split_entity_id, ) -from homeassistant.exceptions import ServiceNotFound -from homeassistant.helpers import device_registry as dr, entity_registry as er, template +from homeassistant.exceptions import ServiceNotFound, TemplateError +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.event import async_track_state_change from homeassistant.helpers.script import ( SCRIPT_MODE_CHOICES, @@ -47,11 +47,13 @@ import homeassistant.util.dt as dt_util from tests.common import ( MockConfigEntry, + MockUser, async_fire_time_changed, async_mock_service, mock_restore_cache, ) from tests.components.logbook.common import MockRow, mock_humanify +from tests.components.repairs import get_repairs from tests.typing import WebSocketGenerator ENTITY_ID = "script.test" @@ -83,7 +85,7 @@ async def test_passing_variables(hass: HomeAssistant) -> None: "script": { "test": { "sequence": { - "service": "test.script", + "action": "test.script", "data_template": {"hello": "{{ greeting }}"}, } } @@ -113,8 +115,14 @@ async def test_passing_variables(hass: HomeAssistant) -> None: @pytest.mark.parametrize("toggle", [False, True]) -async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: - """Verify turn_on, turn_off & toggle services.""" +@pytest.mark.parametrize("action_schema_variations", ["action", "service"]) +async def test_turn_on_off_toggle( + hass: HomeAssistant, toggle: bool, action_schema_variations: str +) -> None: + """Verify turn_on, turn_off & toggle services. + + Ensures backward compatibility with the old service action schema is maintained. + """ event = "test_event" event_mock = Mock() @@ -130,9 +138,15 @@ async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: async_track_state_change(hass, ENTITY_ID, state_listener, to_state="on") if toggle: - turn_off_step = {"service": "script.toggle", "entity_id": ENTITY_ID} + turn_off_step = { + action_schema_variations: "script.toggle", + "entity_id": ENTITY_ID, + } else: - turn_off_step = {"service": "script.turn_off", "entity_id": ENTITY_ID} + turn_off_step = { + action_schema_variations: "script.turn_off", + "entity_id": ENTITY_ID, + } assert await async_setup_component( hass, "script", @@ -163,7 +177,7 @@ async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: invalid_configs = [ {"test": {}}, {"test hello world": {"sequence": [{"event": "bla"}]}}, - {"test": {"sequence": {"event": "test_event", "service": "homeassistant.turn_on"}}}, + {"test": {"sequence": {"event": "test_event", "action": "homeassistant.turn_on"}}}, ] @@ -178,7 +192,7 @@ invalid_configs = [ "test": { "sequence": { "event": "test_event", - "service": "homeassistant.turn_on", + "action": "homeassistant.turn_on", } } }, @@ -233,7 +247,7 @@ async def test_bad_config_validation_critical( "good_script": { "alias": "good_script", "sequence": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -252,13 +266,14 @@ async def test_bad_config_validation_critical( @pytest.mark.parametrize( - ("object_id", "broken_config", "problem", "details"), + ("object_id", "broken_config", "problem", "details", "issue"), [ ( "bad_script", {}, "could not be validated", "required key not provided @ data['sequence']", + "validation_failed_schema", ), ( "bad_script", @@ -270,18 +285,22 @@ async def test_bad_config_validation_critical( "state": "blah", }, }, - "failed to setup actions", + "failed to setup sequence", "Unknown entity registry entry abcdabcdabcdabcdabcdabcdabcdabcd.", + "validation_failed_sequence", ), ], ) async def test_bad_config_validation( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, + hass_admin_user: MockUser, object_id, broken_config, problem, details, + issue, ) -> None: """Test bad script configuration which can be detected during validation.""" assert await async_setup_component( @@ -293,7 +312,7 @@ async def test_bad_config_validation( "good_script": { "alias": "good_script", "sequence": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -301,11 +320,22 @@ async def test_bad_config_validation( }, ) - # Check we get the expected error message + # Check we get the expected error message and issue assert ( f"Script with alias 'bad_script' {problem} and has been disabled: {details}" in caplog.text ) + issues = await get_repairs(hass, hass_ws_client) + assert len(issues) == 1 + assert issues[0]["issue_id"] == f"script.bad_script_{issue}" + assert issues[0]["translation_key"] == issue + assert issues[0]["translation_placeholders"] == { + "edit": "/config/script/edit/bad_script", + "entity_id": "script.bad_script", + "error": ANY, + "name": "bad_script", + } + assert issues[0]["translation_placeholders"]["error"].startswith(details) # Make sure both scripts are setup assert set(hass.states.async_entity_ids("script")) == { @@ -315,6 +345,31 @@ async def test_bad_config_validation( # The script failing validation should be unavailable assert hass.states.get("script.bad_script").state == STATE_UNAVAILABLE + # Reloading the automation with fixed config should clear the issue + with patch( + "homeassistant.config.load_yaml_config_file", + autospec=True, + return_value={ + script.DOMAIN: { + object_id: { + "alias": "bad_script", + "sequence": { + "action": "test.automation", + "entity_id": "hello.world", + }, + }, + } + }, + ): + await hass.services.async_call( + script.DOMAIN, + SERVICE_RELOAD, + context=Context(user_id=hass_admin_user.id), + blocking=True, + ) + issues = await get_repairs(hass, hass_ws_client) + assert len(issues) == 0 + @pytest.mark.parametrize("running", ["no", "same", "different"]) async def test_reload_service(hass: HomeAssistant, running) -> None: @@ -387,7 +442,7 @@ async def test_reload_unchanged_does_not_stop( "sequence": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.script"}, + {"action": "test.script"}, ], } } @@ -430,13 +485,13 @@ async def test_reload_unchanged_does_not_stop( [ { "test": { - "sequence": [{"service": "test.script"}], + "sequence": [{"action": "test.script"}], } }, # A script using templates { "test": { - "sequence": [{"service": "{{ 'test.script' }}"}], + "sequence": [{"action": "{{ 'test.script' }}"}], } }, # A script using blueprint @@ -623,7 +678,7 @@ async def test_logging_script_error( assert await async_setup_component( hass, "script", - {"script": {"hello": {"sequence": [{"service": "non.existing"}]}}}, + {"script": {"hello": {"sequence": [{"action": "non.existing"}]}}}, ) with pytest.raises(ServiceNotFound) as err: await hass.services.async_call("script", "hello", blocking=True) @@ -647,7 +702,7 @@ async def test_async_get_descriptions_script(hass: HomeAssistant) -> None: """Test async_set_service_schema for the script integration.""" script_config = { DOMAIN: { - "test1": {"sequence": [{"service": "homeassistant.restart"}]}, + "test1": {"sequence": [{"action": "homeassistant.restart"}]}, "test2": { "description": "test2", "fields": { @@ -656,7 +711,7 @@ async def test_async_get_descriptions_script(hass: HomeAssistant) -> None: "example": "param_example", } }, - "sequence": [{"service": "homeassistant.restart"}], + "sequence": [{"action": "homeassistant.restart"}], }, } } @@ -752,11 +807,11 @@ async def test_extraction_functions( "test1": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_first"}, }, { @@ -766,15 +821,15 @@ async def test_extraction_functions( "device_id": device_in_both.id, }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, ] @@ -782,7 +837,7 @@ async def test_extraction_functions( "test2": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -808,7 +863,7 @@ async def test_extraction_functions( "test3": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -818,27 +873,27 @@ async def test_extraction_functions( }, {"scene": "scene.hello"}, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-last"}, }, ], @@ -985,11 +1040,11 @@ async def test_concurrent_script(hass: HomeAssistant, concurrently) -> None: """Test calling script concurrently or not.""" if concurrently: call_script_2 = { - "service": "script.turn_on", + "action": "script.turn_on", "data": {"entity_id": "script.script2"}, } else: - call_script_2 = {"service": "script.script2"} + call_script_2 = {"action": "script.script2"} assert await async_setup_component( hass, "script", @@ -1002,17 +1057,17 @@ async def test_concurrent_script(hass: HomeAssistant, concurrently) -> None: { "wait_template": "{{ is_state('input_boolean.test1', 'on') }}" }, - {"service": "test.script", "data": {"value": "script1"}}, + {"action": "test.script", "data": {"value": "script1"}}, ], }, "script2": { "mode": "parallel", "sequence": [ - {"service": "test.script", "data": {"value": "script2a"}}, + {"action": "test.script", "data": {"value": "script2a"}}, { "wait_template": "{{ is_state('input_boolean.test2', 'on') }}" }, - {"service": "test.script", "data": {"value": "script2b"}}, + {"action": "test.script", "data": {"value": "script2b"}}, ], }, } @@ -1083,7 +1138,7 @@ async def test_script_variables( }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "value": "{{ test_var }}", "templated_config_var": "{{ templated_config_var }}", @@ -1099,7 +1154,7 @@ async def test_script_variables( }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "value": "{{ test_var }}", }, @@ -1112,7 +1167,7 @@ async def test_script_variables( }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "value": "{{ test_var }}", }, @@ -1154,7 +1209,7 @@ async def test_script_variables( assert mock_calls[2].data["value"] == "from_service" assert "Error rendering variables" not in caplog.text - with pytest.raises(template.TemplateError): + with pytest.raises(TemplateError): await hass.services.async_call("script", "script3", blocking=True) assert "Error rendering variables" in caplog.text assert len(mock_calls) == 3 @@ -1178,7 +1233,7 @@ async def test_script_this_var_always( "script1": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "this_template": "{{this.entity_id}}", }, @@ -1263,8 +1318,8 @@ async def test_recursive_script( "script1": { "mode": script_mode, "sequence": [ - {"service": "script.script1"}, - {"service": "test.script"}, + {"action": "script.script1"}, + {"action": "test.script"}, ], }, } @@ -1313,26 +1368,26 @@ async def test_recursive_script_indirect( "script1": { "mode": script_mode, "sequence": [ - {"service": "script.script2"}, + {"action": "script.script2"}, ], }, "script2": { "mode": script_mode, "sequence": [ - {"service": "script.script3"}, + {"action": "script.script3"}, ], }, "script3": { "mode": script_mode, "sequence": [ - {"service": "script.script4"}, + {"action": "script.script4"}, ], }, "script4": { "mode": script_mode, "sequence": [ - {"service": "script.script1"}, - {"service": "test.script"}, + {"action": "script.script1"}, + {"action": "test.script"}, ], }, } @@ -1397,10 +1452,10 @@ async def test_recursive_script_turn_on( "condition": "template", "value_template": "{{ request == 'step_2' }}", }, - "sequence": {"service": "test.script_done"}, + "sequence": {"action": "test.script_done"}, }, "default": { - "service": "script.turn_on", + "action": "script.turn_on", "data": { "entity_id": "script.script1", "variables": {"request": "step_2"}, @@ -1408,7 +1463,7 @@ async def test_recursive_script_turn_on( }, }, { - "service": "script.turn_on", + "action": "script.turn_on", "data": {"entity_id": "script.script1"}, }, ], @@ -1470,7 +1525,7 @@ async def test_websocket_config( """Test config command.""" config = { "alias": "hello", - "sequence": [{"service": "light.turn_on"}], + "sequence": [{"action": "light.turn_on"}], } assert await async_setup_component( hass, @@ -1534,7 +1589,7 @@ async def test_script_service_changed_entity_id( "script": { "test": { "sequence": { - "service": "test.script", + "action": "test.script", "data_template": {"entity_id": "{{ this.entity_id }}"}, } } @@ -1563,9 +1618,7 @@ async def test_script_service_changed_entity_id( assert calls[1].data["entity_id"] == "script.custom_entity_id_2" -async def test_blueprint_automation( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_blueprint_script(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test blueprint script.""" assert await async_setup_component( hass, @@ -1617,12 +1670,13 @@ async def test_blueprint_automation( "a_number": 5, }, "Blueprint 'Call service' generated invalid script", - "value should be a string for dictionary value @ data['sequence'][0]['service']", + "value should be a string for dictionary value @ data['sequence'][0]['action']", ), ], ) async def test_blueprint_script_bad_config( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, blueprint_inputs, problem, @@ -1646,9 +1700,24 @@ async def test_blueprint_script_bad_config( assert problem in caplog.text assert details in caplog.text + issues = await get_repairs(hass, hass_ws_client) + assert len(issues) == 1 + issue = "validation_failed_blueprint" + assert issues[0]["issue_id"] == f"script.test_script_{issue}" + assert issues[0]["translation_key"] == issue + assert issues[0]["translation_placeholders"] == { + "edit": "/config/script/edit/test_script", + "entity_id": "script.test_script", + "error": ANY, + "name": "test_script", + } + assert issues[0]["translation_placeholders"]["error"].startswith(details) + async def test_blueprint_script_fails_substitution( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test blueprint script with bad inputs.""" with patch( @@ -1677,6 +1746,18 @@ async def test_blueprint_script_fails_substitution( in caplog.text ) + issues = await get_repairs(hass, hass_ws_client) + assert len(issues) == 1 + issue = "validation_failed_blueprint" + assert issues[0]["issue_id"] == f"script.test_script_{issue}" + assert issues[0]["translation_key"] == issue + assert issues[0]["translation_placeholders"] == { + "edit": "/config/script/edit/test_script", + "entity_id": "script.test_script", + "error": "No substitution found for input blah", + "name": "test_script", + } + @pytest.mark.parametrize("response", [{"value": 5}, '{"value": 5}']) async def test_responses(hass: HomeAssistant, response: Any) -> None: @@ -1770,10 +1851,10 @@ async def test_script_queued_mode(hass: HomeAssistant) -> None: "sequence": [ { "parallel": [ - {"service": "script.test_sub"}, - {"service": "script.test_sub"}, - {"service": "script.test_sub"}, - {"service": "script.test_sub"}, + {"action": "script.test_sub"}, + {"action": "script.test_sub"}, + {"action": "script.test_sub"}, + {"action": "script.test_sub"}, ] } ] @@ -1781,7 +1862,7 @@ async def test_script_queued_mode(hass: HomeAssistant) -> None: "test_sub": { "mode": "queued", "sequence": [ - {"service": "test.simulated_remote"}, + {"action": "test.simulated_remote"}, ], }, } diff --git a/tests/components/script/test_recorder.py b/tests/components/script/test_recorder.py index ca915cede6f..6358093014a 100644 --- a/tests/components/script/test_recorder.py +++ b/tests/components/script/test_recorder.py @@ -52,7 +52,7 @@ async def test_exclude_attributes( "script": { "test": { "sequence": { - "service": "test.script", + "action": "test.script", "data_template": {"hello": "{{ greeting }}"}, } } diff --git a/tests/components/search/test_init.py b/tests/components/search/test_init.py index a817fbfc39e..9b2b959e0dd 100644 --- a/tests/components/search/test_init.py +++ b/tests/components/search/test_init.py @@ -534,12 +534,14 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.AUTOMATION, "automation.wled_device") == { ItemType.AREA: {living_room_area.id}, ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id}, ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.AUTOMATION, "automation.floor") == { ItemType.FLOOR: {first_floor.floor_id}, @@ -561,6 +563,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled_hue"}, + ItemType.INTEGRATION: {"hue", "wled"}, } assert search(ItemType.AUTOMATION, "automation.scene") == { ItemType.AREA: {bedroom_area.id, kitchen_area.id, living_room_area.id}, @@ -574,6 +577,7 @@ async def test_search( scene_wled_hue_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert search(ItemType.AUTOMATION, "automation.script") == { @@ -589,6 +593,7 @@ async def test_search( script_scene_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -611,6 +616,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.device", "script.hue"}, } @@ -624,6 +630,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, } @@ -639,6 +646,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.LABEL: {label_christmas.label_id}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, @@ -652,6 +660,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.device", "script.hue"}, } @@ -664,6 +673,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, } @@ -673,6 +683,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert search(ItemType.ENTITY, hue_segment_1_entity.entity_id) == { @@ -681,6 +692,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.LABEL: {label_energy.label_id}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.hue"}, @@ -691,6 +703,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert not search(ItemType.ENTITY, "automation.wled") @@ -722,6 +735,7 @@ async def test_search( } assert search(ItemType.ENTITY, "light.wled_config_entry_source") == { ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id}, + ItemType.INTEGRATION: {"wled"}, } assert not search(ItemType.FLOOR, "unknown") @@ -780,6 +794,7 @@ async def test_search( wled_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.GROUP, "group.hue") == { ItemType.AREA: {kitchen_area.id}, @@ -790,6 +805,7 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.GROUP, "group.wled_hue") == { ItemType.AREA: {bedroom_area.id, living_room_area.id, kitchen_area.id}, @@ -803,6 +819,7 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCRIPT: {"script.group"}, } @@ -841,6 +858,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.SCENE, "scene.scene_hue_seg_1") == { ItemType.AREA: {kitchen_area.id}, @@ -848,6 +866,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.ENTITY: {hue_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCENE, scene_wled_hue_entity.entity_id) == { ItemType.AREA: {bedroom_area.id, living_room_area.id, kitchen_area.id}, @@ -861,6 +880,7 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.LABEL: {label_other.label_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -880,6 +900,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.SCRIPT, "script.hue") == { ItemType.AREA: {kitchen_area.id}, @@ -887,6 +908,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.ENTITY: {hue_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCRIPT, "script.script_with_templated_services") == {} assert search(ItemType.SCRIPT, "script.device") == { @@ -894,6 +916,7 @@ async def test_search( ItemType.CONFIG_ENTRY: {hue_config_entry.entry_id}, ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCRIPT, "script.floor") == { ItemType.FLOOR: {first_floor.floor_id}, @@ -915,6 +938,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled_hue"}, + ItemType.INTEGRATION: {"hue", "wled"}, } assert search(ItemType.SCRIPT, script_scene_entity.entity_id) == { ItemType.AREA: {bedroom_area.id, kitchen_area.id, living_room_area.id}, @@ -928,6 +952,7 @@ async def test_search( scene_wled_hue_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.LABEL: {label_other.label_id}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } @@ -944,6 +969,7 @@ async def test_search( script_scene_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -981,6 +1007,7 @@ async def test_search( ), ItemType.CONFIG_ENTRY: [hue_config_entry.entry_id], ItemType.FLOOR: [first_floor.floor_id], + ItemType.INTEGRATION: ["hue"], ItemType.SCENE: unordered( ["scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id] ), diff --git a/tests/components/season/conftest.py b/tests/components/season/conftest.py index a45a2078d9b..c7458b0a2e1 100644 --- a/tests/components/season/conftest.py +++ b/tests/components/season/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.season.const import DOMAIN, TYPE_ASTRONOMICAL from homeassistant.const import CONF_TYPE diff --git a/tests/components/season/test_sensor.py b/tests/components/season/test_sensor.py index ffc8e9f1a07..881192c95f0 100644 --- a/tests/components/season/test_sensor.py +++ b/tests/components/season/test_sensor.py @@ -70,6 +70,7 @@ def idfn(val): """Provide IDs for pytest parametrize.""" if isinstance(val, (datetime)): return val.strftime("%Y%m%d") + return None @pytest.mark.parametrize(("type", "day", "expected"), NORTHERN_PARAMETERS, ids=idfn) diff --git a/tests/components/select/test_device_condition.py b/tests/components/select/test_device_condition.py index e60df688658..fc35757fa67 100644 --- a/tests/components/select/test_device_condition.py +++ b/tests/components/select/test_device_condition.py @@ -21,17 +21,7 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import MockConfigEntry, async_get_device_automations async def test_get_conditions( @@ -115,7 +105,7 @@ async def test_get_conditions_hidden_auxiliary( async def test_if_selected_option( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -181,7 +171,7 @@ async def test_if_selected_option( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set( entry.entity_id, "option1", {"options": ["option1", "option2"]} @@ -189,8 +179,8 @@ async def test_if_selected_option( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["result"] == "option1 - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["result"] == "option1 - event - test_event1" hass.states.async_set( entry.entity_id, "option2", {"options": ["option1", "option2"]} @@ -198,13 +188,13 @@ async def test_if_selected_option( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["result"] == "option2 - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["result"] == "option2 - event - test_event2" async def test_if_selected_option_legacy( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -252,8 +242,8 @@ async def test_if_selected_option_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["result"] == "option1 - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["result"] == "option1 - event - test_event1" async def test_get_condition_capabilities( diff --git a/tests/components/select/test_device_trigger.py b/tests/components/select/test_device_trigger.py index c7a55c56202..dbb4e23d785 100644 --- a/tests/components/select/test_device_trigger.py +++ b/tests/components/select/test_device_trigger.py @@ -21,17 +21,7 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import MockConfigEntry, async_get_device_automations async def test_get_triggers( @@ -117,7 +107,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -210,27 +200,27 @@ async def test_if_fires_on_state_change( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "option2") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"to - device - {entry.entity_id} - option1 - option2 - None - 0" ) # Test triggering device trigger with a from state hass.states.async_set(entry.entity_id, "option3") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"from - device - {entry.entity_id} - option2 - option3 - None - 0" ) # Test triggering device trigger with both a from and to state hass.states.async_set(entry.entity_id, "option1") await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert ( - calls[2].data["some"] + service_calls[2].data["some"] == f"from-to - device - {entry.entity_id} - option3 - option1 - None - 0" ) @@ -239,7 +229,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -289,9 +279,9 @@ async def test_if_fires_on_state_change_legacy( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "option2") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"to - device - {entry.entity_id} - option1 - option2 - None - 0" ) diff --git a/tests/components/sensibo/snapshots/test_diagnostics.ambr b/tests/components/sensibo/snapshots/test_diagnostics.ambr index c911a7629be..a33209f7c88 100644 --- a/tests/components/sensibo/snapshots/test_diagnostics.ambr +++ b/tests/components/sensibo/snapshots/test_diagnostics.ambr @@ -1,246 +1,5 @@ # serializer version: 1 # name: test_diagnostics - dict({ - 'modes': dict({ - 'auto': dict({ - 'fanLevels': list([ - 'quiet', - 'low', - 'medium', - ]), - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - 'C': dict({ - 'isNative': True, - 'values': list([ - 10, - 16, - 17, - 18, - 19, - 20, - ]), - }), - 'F': dict({ - 'isNative': False, - 'values': list([ - 64, - 66, - 68, - ]), - }), - }), - }), - 'cool': dict({ - 'fanLevels': list([ - 'quiet', - 'low', - 'medium', - ]), - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - 'C': dict({ - 'isNative': True, - 'values': list([ - 10, - 16, - 17, - 18, - 19, - 20, - ]), - }), - 'F': dict({ - 'isNative': False, - 'values': list([ - 64, - 66, - 68, - ]), - }), - }), - }), - 'dry': dict({ - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - 'C': dict({ - 'isNative': True, - 'values': list([ - 10, - 16, - 17, - 18, - 19, - 20, - ]), - }), - 'F': dict({ - 'isNative': False, - 'values': list([ - 64, - 66, - 68, - ]), - }), - }), - }), - 'fan': dict({ - 'fanLevels': list([ - 'quiet', - 'low', - 'medium', - ]), - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - }), - }), - 'heat': dict({ - 'fanLevels': list([ - 'quiet', - 'low', - 'medium', - ]), - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - 'C': dict({ - 'isNative': True, - 'values': list([ - 10, - 16, - 17, - 18, - 19, - 20, - ]), - }), - 'F': dict({ - 'isNative': False, - 'values': list([ - 63, - 64, - 66, - ]), - }), - }), - }), - }), - }) -# --- -# name: test_diagnostics.1 - dict({ - 'low': 'low', - 'medium': 'medium', - 'quiet': 'quiet', - }) -# --- -# name: test_diagnostics.2 - dict({ - 'fixedmiddletop': 'fixedMiddleTop', - 'fixedtop': 'fixedTop', - 'stopped': 'stopped', - }) -# --- -# name: test_diagnostics.3 - dict({ - 'fixedcenterleft': 'fixedCenterLeft', - 'fixedleft': 'fixedLeft', - 'stopped': 'stopped', - }) -# --- -# name: test_diagnostics.4 - dict({ - 'fanlevel': 'low', - 'horizontalswing': 'stopped', - 'light': 'on', - 'mode': 'heat', - 'on': True, - 'swing': 'stopped', - 'targettemperature': 21, - 'temperatureunit': 'c', - }) -# --- -# name: test_diagnostics.5 - dict({ - 'fanlevel': 'high', - 'horizontalswing': 'stopped', - 'light': 'on', - 'mode': 'cool', - 'on': True, - 'swing': 'stopped', - 'targettemperature': 21, - 'temperatureunit': 'c', - }) -# --- -# name: test_diagnostics.6 - dict({ - }) -# --- -# name: test_diagnostics[full_snapshot] dict({ 'AAZZAAZZ': dict({ 'ac_states': dict({ diff --git a/tests/components/sensibo/test_climate.py b/tests/components/sensibo/test_climate.py index 6b4aedab828..b5a7be7bde0 100644 --- a/tests/components/sensibo/test_climate.py +++ b/tests/components/sensibo/test_climate.py @@ -400,6 +400,10 @@ async def test_climate_temperatures( "homeassistant.components.sensibo.util.SensiboClient.async_set_ac_state_property", return_value={"result": {"status": "Success"}}, ), + pytest.raises( + ServiceValidationError, + match="Provided temperature 24.0 is not valid. Accepted range is 10 to 20", + ), ): await hass.services.async_call( CLIMATE_DOMAIN, @@ -410,7 +414,7 @@ async def test_climate_temperatures( await hass.async_block_till_done() state2 = hass.states.get("climate.hallway") - assert state2.attributes["temperature"] == 20 + assert state2.attributes["temperature"] == 19 with ( patch( diff --git a/tests/components/sensibo/test_diagnostics.py b/tests/components/sensibo/test_diagnostics.py index 1fe72cca0f3..0dc1f2c25e9 100644 --- a/tests/components/sensibo/test_diagnostics.py +++ b/tests/components/sensibo/test_diagnostics.py @@ -3,6 +3,7 @@ from __future__ import annotations from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -10,8 +11,6 @@ from homeassistant.core import HomeAssistant from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator -EXCLUDE_ATTRIBUTES = {"full_features"} - async def test_diagnostics( hass: HomeAssistant, @@ -24,16 +23,6 @@ async def test_diagnostics( diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert diag["ABC999111"]["full_capabilities"] == snapshot - assert diag["ABC999111"]["fan_modes_translated"] == snapshot - assert diag["ABC999111"]["swing_modes_translated"] == snapshot - assert diag["ABC999111"]["horizontal_swing_modes_translated"] == snapshot - assert diag["ABC999111"]["smart_low_state"] == snapshot - assert diag["ABC999111"]["smart_high_state"] == snapshot - assert diag["ABC999111"]["pure_conf"] == snapshot - - def limit_attrs(prop, path): - exclude_attrs = EXCLUDE_ATTRIBUTES - return prop in exclude_attrs - - assert diag == snapshot(name="full_snapshot", exclude=limit_attrs) + assert diag == snapshot( + exclude=props("full_features", "created_at", "modified_at"), + ) diff --git a/tests/components/sensor/test_device_condition.py b/tests/components/sensor/test_device_condition.py index 3bc9a660e93..d9a9900b8b1 100644 --- a/tests/components/sensor/test_device_condition.py +++ b/tests/components/sensor/test_device_condition.py @@ -27,7 +27,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -37,12 +36,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( "device_class", [ @@ -470,7 +463,6 @@ async def test_if_state_not_above_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test for bad value conditions.""" @@ -513,7 +505,7 @@ async def test_if_state_above( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -559,22 +551,22 @@ async def test_if_state_above( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -582,7 +574,7 @@ async def test_if_state_above_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -628,22 +620,22 @@ async def test_if_state_above_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -651,7 +643,7 @@ async def test_if_state_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -697,22 +689,22 @@ async def test_if_state_below( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -720,7 +712,7 @@ async def test_if_state_between( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -767,30 +759,30 @@ async def test_if_state_between( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "event - test_event1" hass.states.async_set(entry.entity_id, 21) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entry.entity_id, 19) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "event - test_event1" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "event - test_event1" diff --git a/tests/components/sensor/test_device_trigger.py b/tests/components/sensor/test_device_trigger.py index 87a6d9929c3..bb560c824d3 100644 --- a/tests/components/sensor/test_device_trigger.py +++ b/tests/components/sensor/test_device_trigger.py @@ -31,7 +31,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -41,12 +40,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( "device_class", [ @@ -427,7 +420,6 @@ async def test_if_fires_not_on_above_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test for value triggers firing.""" @@ -467,7 +459,7 @@ async def test_if_fires_on_state_above( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -513,17 +505,18 @@ async def test_if_fires_on_state_above( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 9 - 11 - None" + service_calls[0].data["some"] + == f"bat_low device - {entry.entity_id} - 9 - 11 - None" ) @@ -532,7 +525,7 @@ async def test_if_fires_on_state_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -578,17 +571,18 @@ async def test_if_fires_on_state_below( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 11 - 9 - None" + service_calls[0].data["some"] + == f"bat_low device - {entry.entity_id} - 11 - 9 - None" ) @@ -597,7 +591,7 @@ async def test_if_fires_on_state_between( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -644,28 +638,30 @@ async def test_if_fires_on_state_between( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 9 - 11 - None" + service_calls[0].data["some"] + == f"bat_low device - {entry.entity_id} - 9 - 11 - None" ) hass.states.async_set(entry.entity_id, 21) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entry.entity_id, 19) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] == f"bat_low device - {entry.entity_id} - 21 - 19 - None" + service_calls[1].data["some"] + == f"bat_low device - {entry.entity_id} - 21 - 19 - None" ) @@ -674,7 +670,7 @@ async def test_if_fires_on_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -720,17 +716,18 @@ async def test_if_fires_on_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 9 - 11 - None" + service_calls[0].data["some"] + == f"bat_low device - {entry.entity_id} - 9 - 11 - None" ) @@ -739,7 +736,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -786,17 +783,17 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 10) hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - 10 - 11 - 0:00:05" ) diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 126e327f364..2504ea80d84 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator from datetime import UTC, date, datetime from decimal import Decimal from types import ModuleType from typing import Any import pytest -from typing_extensions import Generator from homeassistant.components import sensor from homeassistant.components.number import NumberDeviceClass @@ -418,7 +418,7 @@ async def test_restore_sensor_save_state( assert state["entity_id"] == entity0.entity_id extra_data = hass_storage[RESTORE_STATE_KEY]["data"][0]["extra_data"] assert extra_data == expected_extra_data - assert type(extra_data["native_value"]) == native_value_type + assert type(extra_data["native_value"]) is native_value_type @pytest.mark.parametrize( @@ -479,7 +479,7 @@ async def test_restore_sensor_restore_state( assert hass.states.get(entity0.entity_id) assert entity0.native_value == native_value - assert type(entity0.native_value) == native_value_type + assert type(entity0.native_value) is native_value_type assert entity0.native_unit_of_measurement == uom @@ -942,7 +942,21 @@ async def test_custom_unit_change( "1000000", "1093613", SensorDeviceClass.DISTANCE, - ) + ), + # Volume Storage (subclass of Volume) + ( + US_CUSTOMARY_SYSTEM, + UnitOfVolume.LITERS, + UnitOfVolume.GALLONS, + UnitOfVolume.GALLONS, + UnitOfVolume.FLUID_OUNCES, + 1000, + "1000", + "264", + "264", + "33814", + SensorDeviceClass.VOLUME_STORAGE, + ), ], ) async def test_unit_conversion_priority( diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 62cb66d2053..27fab9c0b3b 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -3,7 +3,7 @@ from datetime import datetime, timedelta import math from statistics import mean -from typing import Literal +from typing import Any, Literal from unittest.mock import patch from freezegun import freeze_time @@ -50,9 +50,14 @@ from tests.components.recorder.common import ( async_recorder_block_till_done, async_wait_recording_done, do_adhoc_statistics, + get_start_time, statistics_during_period, ) -from tests.typing import RecorderInstanceGenerator, WebSocketGenerator +from tests.typing import ( + MockHAClientWebSocket, + RecorderInstanceGenerator, + WebSocketGenerator, +) BATTERY_SENSOR_ATTRIBUTES = { "device_class": "battery", @@ -95,7 +100,7 @@ KW_SENSOR_ATTRIBUTES = { @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder patches.""" @@ -116,6 +121,33 @@ async def async_list_statistic_ids( ) +async def assert_statistic_ids( + hass: HomeAssistant, + expected_result: list[dict[str, Any]], +) -> None: + """Assert statistic ids.""" + with session_scope(hass=hass, read_only=True) as session: + db_states = list(session.query(StatisticsMeta)) + assert len(db_states) == len(expected_result) + for i, db_state in enumerate(db_states): + assert db_state.statistic_id == expected_result[i]["statistic_id"] + assert ( + db_state.unit_of_measurement + == expected_result[i]["unit_of_measurement"] + ) + + +async def assert_validation_result( + client: MockHAClientWebSocket, + expected_result: dict[str, list[dict[str, Any]]], +) -> None: + """Assert statistics validation result.""" + await client.send_json_auto_id({"type": "recorder/validate_statistics"}) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + @pytest.mark.parametrize( ( "device_class", @@ -163,7 +195,7 @@ async def test_compile_hourly_statistics( max, ) -> None: """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -247,7 +279,7 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( If the last updated value is the same we will have a zero duration. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -361,7 +393,7 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( If the last updated value is the same we will have a zero duration. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -467,7 +499,7 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period( max, ) -> None: """Test compiling hourly statistics when the only state at end of period.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -561,7 +593,7 @@ async def test_compile_hourly_statistics_purged_state_changes( unit_class, ) -> None: """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -632,7 +664,7 @@ async def test_compile_hourly_statistics_wrong_unit( attributes, ) -> None: """Test compiling hourly statistics for sensor with unit not matching device class.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -856,7 +888,7 @@ async def test_compile_hourly_sum_statistics_amount( factor, ) -> None: """Test compiling hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1040,7 +1072,7 @@ async def test_compile_hourly_sum_statistics_amount_reset_every_state_change( factor, ) -> None: """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -1163,7 +1195,7 @@ async def test_compile_hourly_sum_statistics_amount_invalid_last_reset( factor, ) -> None: """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -1263,7 +1295,7 @@ async def test_compile_hourly_sum_statistics_nan_inf_state( factor, ) -> None: """Test compiling hourly statistics with nan and inf states.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -1398,7 +1430,7 @@ async def test_compile_hourly_sum_statistics_negative_state( offset, ) -> None: """Test compiling hourly statistics with negative states.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) hass.data.pop(loader.DATA_CUSTOM_COMPONENTS) mocksensor = MockSensor(name="custom_sensor") @@ -1406,10 +1438,11 @@ async def test_compile_hourly_sum_statistics_negative_state( setup_test_component_platform(hass, DOMAIN, [mocksensor], built_in=False) await async_setup_component(hass, "homeassistant", {}) - await async_setup_component( - hass, "sensor", {"sensor": [{"platform": "demo"}, {"platform": "test"}]} - ) - await hass.async_block_till_done() + with freeze_time(zero) as freezer: + await async_setup_component( + hass, "sensor", {"sensor": [{"platform": "demo"}, {"platform": "test"}]} + ) + await hass.async_block_till_done() attributes = { "device_class": device_class, "state_class": state_class, @@ -1510,7 +1543,7 @@ async def test_compile_hourly_sum_statistics_total_no_reset( factor, ) -> None: """Test compiling hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1623,7 +1656,7 @@ async def test_compile_hourly_sum_statistics_total_increasing( factor, ) -> None: """Test compiling hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1736,7 +1769,7 @@ async def test_compile_hourly_sum_statistics_total_increasing_small_dip( factor, ) -> None: """Test small dips in sensor readings do not trigger a reset.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1838,7 +1871,7 @@ async def test_compile_hourly_energy_statistics_unsupported( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1942,7 +1975,7 @@ async def test_compile_hourly_energy_statistics_multiple( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling multiple hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -2156,7 +2189,7 @@ async def test_compile_hourly_statistics_unchanged( value, ) -> None: """Test compiling hourly statistics, with no changes during the hour.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2199,7 +2232,7 @@ async def test_compile_hourly_statistics_partially_unavailable( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics, with the sensor being partially unavailable.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2268,7 +2301,7 @@ async def test_compile_hourly_statistics_unavailable( sensor.test1 is unavailable and should not have statistics generated sensor.test2 should have statistics generated """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2315,7 +2348,7 @@ async def test_compile_hourly_statistics_fails( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics throws.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2433,30 +2466,29 @@ async def test_list_statistic_ids( @pytest.mark.parametrize( - "_attributes", + "energy_attributes", [{**ENERGY_SENSOR_ATTRIBUTES, "last_reset": 0}, TEMPERATURE_SENSOR_ATTRIBUTES], ) async def test_list_statistic_ids_unsupported( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - _attributes, + energy_attributes: dict[str, Any], ) -> None: """Test listing future statistic ids for unsupported sensor.""" await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) - attributes = dict(_attributes) + attributes = dict(energy_attributes) hass.states.async_set("sensor.test1", 0, attributes=attributes) if "last_reset" in attributes: attributes.pop("unit_of_measurement") hass.states.async_set("last_reset.test2", 0, attributes=attributes) - attributes = dict(_attributes) + attributes = dict(energy_attributes) if "unit_of_measurement" in attributes: attributes["unit_of_measurement"] = "invalid" hass.states.async_set("sensor.test3", 0, attributes=attributes) attributes.pop("unit_of_measurement") hass.states.async_set("sensor.test4", 0, attributes=attributes) - attributes = dict(_attributes) + attributes = dict(energy_attributes) attributes["state_class"] = "invalid" hass.states.async_set("sensor.test5", 0, attributes=attributes) attributes.pop("state_class") @@ -2492,7 +2524,7 @@ async def test_compile_hourly_statistics_changing_units_1( This tests the case where the recorder cannot convert between the units. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2621,7 +2653,7 @@ async def test_compile_hourly_statistics_changing_units_2( This tests the behaviour when the sensor units are note supported by any unit converter. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) - timedelta(seconds=30 * 5) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2700,7 +2732,7 @@ async def test_compile_hourly_statistics_changing_units_3( This tests the behaviour when the sensor units are note supported by any unit converter. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2821,7 +2853,7 @@ async def test_compile_hourly_statistics_convert_units_1( This tests the case where the recorder can convert between the units. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2980,7 +3012,7 @@ async def test_compile_hourly_statistics_equivalent_units_1( max, ) -> None: """Test compiling hourly statistics where units change from one hour to the next.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3105,7 +3137,7 @@ async def test_compile_hourly_statistics_equivalent_units_2( max, ) -> None: """Test compiling hourly statistics where units change during an hour.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3129,7 +3161,7 @@ async def test_compile_hourly_statistics_equivalent_units_2( ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 5)) + do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 10)) await async_wait_recording_done(hass) assert "The unit of sensor.test1 is changing" not in caplog.text assert "and matches the unit of already compiled statistics" not in caplog.text @@ -3151,9 +3183,9 @@ async def test_compile_hourly_statistics_equivalent_units_2( "sensor.test1": [ { "start": process_timestamp( - zero + timedelta(seconds=30 * 5) + zero + timedelta(seconds=30 * 10) ).timestamp(), - "end": process_timestamp(zero + timedelta(seconds=30 * 15)).timestamp(), + "end": process_timestamp(zero + timedelta(seconds=30 * 20)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), @@ -3198,7 +3230,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( Device class is ignored, meaning changing device class should not influence the statistics. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3409,7 +3441,7 @@ async def test_compile_hourly_statistics_changing_device_class_2( Device class is ignored, meaning changing device class should not influence the statistics. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3547,7 +3579,7 @@ async def test_compile_hourly_statistics_changing_state_class( max, ) -> None: """Test compiling hourly statistics where state class changes.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period0 + timedelta(minutes=10) await async_setup_component(hass, "sensor", {}) @@ -4117,7 +4149,7 @@ async def async_record_states( one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=10 * 5) three = two + timedelta(seconds=40 * 5) - four = three + timedelta(seconds=10 * 5) + four = three + timedelta(seconds=9 * 5) states = {entity_id: []} freezer.move_to(one) @@ -4178,22 +4210,8 @@ async def test_validate_unit_change_convertible( The test also asserts that the sensor's device class is ignored. """ - msg_id = 1 - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4205,14 +4223,20 @@ async def test_validate_unit_change_convertible( # No statistics, unit in state matching device class - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) # No statistics, unit in state not matching device class - empty response hass.states.async_set( - "sensor.test", 11, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 11, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4221,7 +4245,10 @@ async def test_validate_unit_change_convertible( await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) expected = { @@ -4241,7 +4268,10 @@ async def test_validate_unit_change_convertible( # Valid state - empty response hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4253,7 +4283,10 @@ async def test_validate_unit_change_convertible( # Valid state in compatible unit - empty response hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4292,22 +4325,7 @@ async def test_validate_statistics_unit_ignore_device_class( The test asserts that the sensor's device class is ignored. """ - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4319,7 +4337,9 @@ async def test_validate_statistics_unit_ignore_device_class( # No statistics, no device class - empty response initial_attributes = {"state_class": "measurement", "unit_of_measurement": "dogs"} - hass.states.async_set("sensor.test", 10, attributes=initial_attributes) + hass.states.async_set( + "sensor.test", 10, attributes=initial_attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4327,7 +4347,10 @@ async def test_validate_statistics_unit_ignore_device_class( do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4384,24 +4407,10 @@ async def test_validate_statistics_unit_change_no_device_class( conversion, and the unit is then changed to a unit which can and cannot be converted to the original unit. """ - msg_id = 1 attributes = dict(attributes) attributes.pop("device_class") - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4413,14 +4422,20 @@ async def test_validate_statistics_unit_change_no_device_class( # No statistics, sensor state set - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) # No statistics, sensor state set to an incompatible unit - empty response hass.states.async_set( - "sensor.test", 11, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 11, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4429,7 +4444,10 @@ async def test_validate_statistics_unit_change_no_device_class( await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) expected = { @@ -4449,7 +4467,10 @@ async def test_validate_statistics_unit_change_no_device_class( # Valid state - empty response hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4461,7 +4482,10 @@ async def test_validate_statistics_unit_change_no_device_class( # Valid state in compatible unit - empty response hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4498,22 +4522,7 @@ async def test_validate_statistics_unsupported_state_class( unit, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4524,7 +4533,9 @@ async def test_validate_statistics_unsupported_state_class( await assert_validation_result(client, {}) # No statistics, valid state - empty response - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4536,7 +4547,9 @@ async def test_validate_statistics_unsupported_state_class( # State update with invalid state class, expect error _attributes = dict(attributes) _attributes.pop("state_class") - hass.states.async_set("sensor.test", 12, attributes=_attributes) + hass.states.async_set( + "sensor.test", 12, attributes=_attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() expected = { "sensor.test": [ @@ -4566,22 +4579,7 @@ async def test_validate_statistics_sensor_no_longer_recorded( unit, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4592,7 +4590,9 @@ async def test_validate_statistics_sensor_no_longer_recorded( await assert_validation_result(client, {}) # No statistics, valid state - empty response - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4633,22 +4633,7 @@ async def test_validate_statistics_sensor_not_recorded( unit, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4673,7 +4658,9 @@ async def test_validate_statistics_sensor_not_recorded( "entity_filter", return_value=False, ): - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, expected) @@ -4697,22 +4684,7 @@ async def test_validate_statistics_sensor_removed( unit, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4723,7 +4695,9 @@ async def test_validate_statistics_sensor_removed( await assert_validation_result(client, {}) # No statistics, valid state - empty response - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4760,33 +4734,7 @@ async def test_validate_statistics_unit_change_no_conversion( unit2, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - async def assert_statistic_ids(expected_result): - with session_scope(hass=hass, read_only=True) as session: - db_states = list(session.query(StatisticsMeta)) - assert len(db_states) == len(expected_result) - for i, db_state in enumerate(db_states): - assert db_state.statistic_id == expected_result[i]["statistic_id"] - assert ( - db_state.unit_of_measurement - == expected_result[i]["unit_of_measurement"] - ) - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) @@ -4797,13 +4745,19 @@ async def test_validate_statistics_unit_change_no_conversion( # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) # No statistics, changed unit - empty response hass.states.async_set( - "sensor.test", 11, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 11, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) @@ -4811,11 +4765,14 @@ async def test_validate_statistics_unit_change_no_conversion( await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_statistic_ids([]) + await assert_statistic_ids(hass, []) # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) @@ -4824,13 +4781,16 @@ async def test_validate_statistics_unit_change_no_conversion( do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) await assert_validation_result(client, {}) # Change unit - expect error hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) expected = { @@ -4850,7 +4810,10 @@ async def test_validate_statistics_unit_change_no_conversion( # Original unit - empty response hass.states.async_set( - "sensor.test", 14, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 14, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4894,33 +4857,7 @@ async def test_validate_statistics_unit_change_equivalent_units( This tests no validation issue is created when a sensor's unit changes to an equivalent unit. """ - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - async def assert_statistic_ids(expected_result): - with session_scope(hass=hass, read_only=True) as session: - db_states = list(session.query(StatisticsMeta)) - assert len(db_states) == len(expected_result) - for i, db_state in enumerate(db_states): - assert db_state.statistic_id == expected_result[i]["statistic_id"] - assert ( - db_state.unit_of_measurement - == expected_result[i]["unit_of_measurement"] - ) - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) @@ -4931,7 +4868,10 @@ async def test_validate_statistics_unit_change_equivalent_units( # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) @@ -4940,12 +4880,15 @@ async def test_validate_statistics_unit_change_equivalent_units( do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) # Units changed to an equivalent unit - empty response hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp() + 1, ) await assert_validation_result(client, {}) @@ -4954,7 +4897,7 @@ async def test_validate_statistics_unit_change_equivalent_units( do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit2}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit2}] ) await assert_validation_result(client, {}) @@ -4978,34 +4921,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( This tests a validation issue is created when a sensor's unit changes to an equivalent unit which is not known to the unit converters. """ - - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - async def assert_statistic_ids(expected_result): - with session_scope(hass=hass, read_only=True) as session: - db_states = list(session.query(StatisticsMeta)) - assert len(db_states) == len(expected_result) - for i, db_state in enumerate(db_states): - assert db_state.statistic_id == expected_result[i]["statistic_id"] - assert ( - db_state.unit_of_measurement - == expected_result[i]["unit_of_measurement"] - ) - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) @@ -5016,7 +4932,10 @@ async def test_validate_statistics_unit_change_equivalent_units_2( # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) @@ -5025,12 +4944,15 @@ async def test_validate_statistics_unit_change_equivalent_units_2( do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) # Units changed to an equivalent unit which is not known by the unit converters hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) expected = { "sensor.test": [ @@ -5052,7 +4974,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) await assert_validation_result(client, expected) @@ -5061,21 +4983,6 @@ async def test_validate_statistics_other_domain( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test sensor does not raise issues for statistics for other domains.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) client = await hass_ws_client() @@ -5211,7 +5118,7 @@ async def async_record_states_partially_unavailable(hass, zero, entity_id, attri one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=15 * 5) three = two + timedelta(seconds=30 * 5) - four = three + timedelta(seconds=15 * 5) + four = three + timedelta(seconds=14 * 5) states = {entity_id: []} with freeze_time(one) as freezer: diff --git a/tests/components/sentry/conftest.py b/tests/components/sentry/conftest.py index 781250b2753..663f8ee6aa6 100644 --- a/tests/components/sentry/conftest.py +++ b/tests/components/sentry/conftest.py @@ -6,7 +6,7 @@ from typing import Any import pytest -from homeassistant.components.sentry import DOMAIN +from homeassistant.components.sentry.const import DOMAIN from tests.common import MockConfigEntry diff --git a/tests/components/seventeentrack/conftest.py b/tests/components/seventeentrack/conftest.py index 1ab4eed11ee..e2493319b69 100644 --- a/tests/components/seventeentrack/conftest.py +++ b/tests/components/seventeentrack/conftest.py @@ -1,10 +1,10 @@ """Configuration for 17Track tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch -from py17track.package import Package +from pyseventeentrack.package import Package import pytest -from typing_extensions import Generator from homeassistant.components.seventeentrack.const import ( CONF_SHOW_ARCHIVED, diff --git a/tests/components/seventeentrack/snapshots/test_services.ambr b/tests/components/seventeentrack/snapshots/test_services.ambr index 185a1d44fe0..202c5a3d667 100644 --- a/tests/components/seventeentrack/snapshots/test_services.ambr +++ b/tests/components/seventeentrack/snapshots/test_services.ambr @@ -3,27 +3,39 @@ dict({ 'packages': list([ dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 3', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'Expired', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '123', }), dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 1', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'In Transit', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '456', }), dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 2', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'Delivered', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '789', }), ]), @@ -33,19 +45,27 @@ dict({ 'packages': list([ dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 1', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'In Transit', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '456', }), dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 2', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'Delivered', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '789', }), ]), diff --git a/tests/components/seventeentrack/test_config_flow.py b/tests/components/seventeentrack/test_config_flow.py index 380146ed276..0a7c4ca918c 100644 --- a/tests/components/seventeentrack/test_config_flow.py +++ b/tests/components/seventeentrack/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from py17track.errors import SeventeenTrackError +from pyseventeentrack.errors import SeventeenTrackError import pytest from homeassistant import config_entries diff --git a/tests/components/seventeentrack/test_repairs.py b/tests/components/seventeentrack/test_repairs.py new file mode 100644 index 00000000000..0f697c1ad49 --- /dev/null +++ b/tests/components/seventeentrack/test_repairs.py @@ -0,0 +1,95 @@ +"""Tests for the seventeentrack repair flow.""" + +from http import HTTPStatus +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN +from homeassistant.components.repairs.websocket_api import RepairsFlowIndexView +from homeassistant.components.seventeentrack import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from . import goto_future, init_integration +from .conftest import DEFAULT_SUMMARY_LENGTH, get_package + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + + +async def test_repair( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure everything starts correctly.""" + await init_integration(hass, mock_config_entry) # 2 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + assert len(issue_registry.issues) == 1 + + package = get_package() + mock_seventeentrack.return_value.profile.packages.return_value = [package] + await goto_future(hass, freezer) + + assert hass.states.get("sensor.17track_package_friendly_name_1") + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 + + assert "deprecated" not in mock_config_entry.data + + repair_issue = issue_registry.async_get_issue( + domain=DOMAIN, issue_id=f"deprecate_sensor_{mock_config_entry.entry_id}" + ) + + assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) + + client = await hass_client() + + resp = await client.post( + RepairsFlowIndexView.url, + json={"handler": DOMAIN, "issue_id": repair_issue.issue_id}, + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "form", + "flow_id": flow_id, + "handler": DOMAIN, + "step_id": "confirm", + "data_schema": [], + "errors": None, + "description_placeholders": None, + "last_step": None, + "preview": None, + } + + resp = await client.post(RepairsFlowIndexView.url + f"/{flow_id}") + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "create_entry", + "handler": DOMAIN, + "flow_id": flow_id, + "description": None, + "description_placeholders": None, + } + + assert mock_config_entry.data["deprecated"] + + repair_issue = issue_registry.async_get_issue( + domain=DOMAIN, issue_id="deprecate_sensor" + ) + + assert repair_issue is None + + await goto_future(hass, freezer) + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH diff --git a/tests/components/seventeentrack/test_sensor.py b/tests/components/seventeentrack/test_sensor.py index 75cc6435073..ca16fc64833 100644 --- a/tests/components/seventeentrack/test_sensor.py +++ b/tests/components/seventeentrack/test_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory -from py17track.errors import SeventeenTrackError +from pyseventeentrack.errors import SeventeenTrackError from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir @@ -317,4 +317,4 @@ async def test_full_valid_platform_config( assert await async_setup_component(hass, "sensor", VALID_PLATFORM_CONFIG_FULL) await hass.async_block_till_done() assert len(hass.states.async_entity_ids()) == len(DEFAULT_SUMMARY.keys()) - assert len(issue_registry.issues) == 1 + assert len(issue_registry.issues) == 2 diff --git a/tests/components/sfr_box/conftest.py b/tests/components/sfr_box/conftest.py index e86cd06650e..7c1f8bbab5c 100644 --- a/tests/components/sfr_box/conftest.py +++ b/tests/components/sfr_box/conftest.py @@ -1,11 +1,11 @@ """Provide common SFR Box fixtures.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, patch import pytest from sfrbox_api.models import DslInfo, FtthInfo, SystemInfo, WanInfo -from typing_extensions import Generator from homeassistant.components.sfr_box.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntry diff --git a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr index 7422c1395c3..0023f65c90e 100644 --- a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', + 'model_id': None, 'name': 'SFR Box', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'NB6VAC-MAIN-R4.0.44k', @@ -148,8 +150,10 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', + 'model_id': None, 'name': 'SFR Box', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'NB6VAC-MAIN-R4.0.44k', diff --git a/tests/components/sfr_box/snapshots/test_button.ambr b/tests/components/sfr_box/snapshots/test_button.ambr index 0dfbf187f6d..df097b58c51 100644 --- a/tests/components/sfr_box/snapshots/test_button.ambr +++ b/tests/components/sfr_box/snapshots/test_button.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', + 'model_id': None, 'name': 'SFR Box', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'NB6VAC-MAIN-R4.0.44k', diff --git a/tests/components/sfr_box/snapshots/test_sensor.ambr b/tests/components/sfr_box/snapshots/test_sensor.ambr index 0f39eed9e60..46b22448d25 100644 --- a/tests/components/sfr_box/snapshots/test_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_sensor.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', + 'model_id': None, 'name': 'SFR Box', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': 'NB6VAC-MAIN-R4.0.44k', diff --git a/tests/components/sfr_box/test_binary_sensor.py b/tests/components/sfr_box/test_binary_sensor.py index 8dba537f6cb..6152f8e2721 100644 --- a/tests/components/sfr_box/test_binary_sensor.py +++ b/tests/components/sfr_box/test_binary_sensor.py @@ -1,11 +1,11 @@ """Test the SFR Box binary sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.models import SystemInfo from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/sfr_box/test_button.py b/tests/components/sfr_box/test_button.py index 4f20a2f34a3..f555ccebbf9 100644 --- a/tests/components/sfr_box/test_button.py +++ b/tests/components/sfr_box/test_button.py @@ -1,11 +1,11 @@ """Test the SFR Box buttons.""" +from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.exceptions import SFRBoxError from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/sfr_box/test_diagnostics.py b/tests/components/sfr_box/test_diagnostics.py index 597631d12f1..d31d97cbcf8 100644 --- a/tests/components/sfr_box/test_diagnostics.py +++ b/tests/components/sfr_box/test_diagnostics.py @@ -1,11 +1,11 @@ """Test the SFR Box diagnostics.""" +from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.models import SystemInfo from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/tests/components/sfr_box/test_init.py b/tests/components/sfr_box/test_init.py index 14688009c5c..19e15491be1 100644 --- a/tests/components/sfr_box/test_init.py +++ b/tests/components/sfr_box/test_init.py @@ -1,10 +1,10 @@ """Test the SFR Box setup process.""" +from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError -from typing_extensions import Generator from homeassistant.components.sfr_box.const import DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigEntryState diff --git a/tests/components/sfr_box/test_sensor.py b/tests/components/sfr_box/test_sensor.py index 506e1ed8962..dd4a67b42f6 100644 --- a/tests/components/sfr_box/test_sensor.py +++ b/tests/components/sfr_box/test_sensor.py @@ -1,10 +1,10 @@ """Test the SFR Box sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/sharkiq/test_vacuum.py b/tests/components/sharkiq/test_vacuum.py index e5154008f56..3748cfd6dc4 100644 --- a/tests/components/sharkiq/test_vacuum.py +++ b/tests/components/sharkiq/test_vacuum.py @@ -141,7 +141,7 @@ class MockShark(SharkIqVacuum): @pytest.fixture(autouse=True) @patch("sharkiq.ayla_api.AylaApi", MockAyla) -async def setup_integration(hass): +async def setup_integration(hass: HomeAssistant) -> None: """Build the mock integration.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=TEST_USERNAME, data=CONFIG, entry_id=ENTRY_ID diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index 4631a17969e..7de45eeee98 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -23,6 +23,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, + DeviceEntry, DeviceRegistry, format_mac, ) @@ -111,6 +112,7 @@ def register_entity( unique_id: str, config_entry: ConfigEntry | None = None, capabilities: Mapping[str, Any] | None = None, + device_id: str | None = None, ) -> str: """Register enabled entity, return entity_id.""" entity_registry = er.async_get(hass) @@ -122,6 +124,7 @@ def register_entity( disabled_by=None, config_entry=config_entry, capabilities=capabilities, + device_id=device_id, ) return f"{domain}.{object_id}" @@ -145,9 +148,11 @@ def get_entity_state(hass: HomeAssistant, entity_id: str) -> str: return entity.state -def register_device(device_registry: DeviceRegistry, config_entry: ConfigEntry) -> None: +def register_device( + device_registry: DeviceRegistry, config_entry: ConfigEntry +) -> DeviceEntry: """Register Shelly device.""" - device_registry.async_get_or_create( + return device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(CONNECTION_NETWORK_MAC, format_mac(MOCK_MAC))}, ) diff --git a/tests/components/shelly/bluetooth/test_scanner.py b/tests/components/shelly/bluetooth/test_scanner.py index c7bbb5cb708..1076691a768 100644 --- a/tests/components/shelly/bluetooth/test_scanner.py +++ b/tests/components/shelly/bluetooth/test_scanner.py @@ -12,7 +12,9 @@ from homeassistant.core import HomeAssistant from .. import init_integration, inject_rpc_device_event -async def test_scanner_v1(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> None: +async def test_scanner_v1( + hass: HomeAssistant, mock_rpc_device, monkeypatch: pytest.MonkeyPatch +) -> None: """Test injecting data into the scanner v1.""" await init_integration( hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} @@ -50,7 +52,9 @@ async def test_scanner_v1(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> assert ble_device is None -async def test_scanner_v2(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> None: +async def test_scanner_v2( + hass: HomeAssistant, mock_rpc_device, monkeypatch: pytest.MonkeyPatch +) -> None: """Test injecting data into the scanner v2.""" await init_integration( hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} @@ -93,7 +97,7 @@ async def test_scanner_v2(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> async def test_scanner_ignores_non_ble_events( - hass: HomeAssistant, mock_rpc_device, monkeypatch + hass: HomeAssistant, mock_rpc_device, monkeypatch: pytest.MonkeyPatch ) -> None: """Test injecting non ble data into the scanner.""" await init_integration( @@ -119,7 +123,10 @@ async def test_scanner_ignores_non_ble_events( async def test_scanner_ignores_wrong_version_and_logs( - hass: HomeAssistant, mock_rpc_device, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + mock_rpc_device, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Test injecting wrong version of ble data into the scanner.""" await init_integration( @@ -152,7 +159,10 @@ async def test_scanner_ignores_wrong_version_and_logs( async def test_scanner_warns_on_corrupt_event( - hass: HomeAssistant, mock_rpc_device, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + mock_rpc_device, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Test injecting garbage ble data into the scanner.""" await init_integration( diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index a16cc62fbae..a2629d21362 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -11,11 +11,11 @@ from homeassistant.components.shelly.const import ( EVENT_SHELLY_CLICK, REST_SENSORS_UPDATE_INTERVAL, ) -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from . import MOCK_MAC -from tests.common import async_capture_events, async_mock_service +from tests.common import async_capture_events MOCK_SETTINGS = { "name": "Test name", @@ -228,7 +228,9 @@ MOCK_STATUS_RPC = { "input:1": {"id": 1, "percent": 89, "xpercent": 8.9}, "input:2": { "id": 2, - "counts": {"total": 56174, "xtotal": 561.74, "freq": 208.00, "xfreq": 6.11}, + "counts": {"total": 56174, "xtotal": 561.74}, + "freq": 208.00, + "xfreq": 6.11, }, "light:0": {"output": True, "brightness": 53.0}, "light:1": {"output": True, "brightness": 53.0}, @@ -290,12 +292,6 @@ def mock_ws_server(): yield -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture def events(hass: HomeAssistant): """Yield caught shelly_click events.""" @@ -357,6 +353,7 @@ def _mock_rpc_device(version: str | None = None): status=MOCK_STATUS_RPC, firmware_version="some fw string", initialized=True, + connected=True, ) type(device).name = PropertyMock(return_value="Test name") return device diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index 026a7041863..18f65deb907 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -1,5 +1,6 @@ """Tests for Shelly binary sensor platform.""" +from copy import deepcopy from unittest.mock import Mock from aioshelly.const import MODEL_MOTION @@ -7,9 +8,10 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.components.shelly.const import SLEEP_PERIOD_MULTIPLIER +from homeassistant.components.shelly.const import UPDATE_PERIOD_MULTIPLIER from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry @@ -122,7 +124,7 @@ async def test_block_rest_binary_sensor_connected_battery_devices( assert hass.states.get(entity_id).state == STATE_OFF # Verify update on slow intervals - await mock_rest_update(hass, freezer, seconds=SLEEP_PERIOD_MULTIPLIER * 3600) + await mock_rest_update(hass, freezer, seconds=UPDATE_PERIOD_MULTIPLIER * 3600) assert hass.states.get(entity_id).state == STATE_ON entry = entity_registry.async_get(entity_id) @@ -263,6 +265,7 @@ async def test_rpc_sleeping_binary_sensor( ) -> None: """Test RPC online sleeping binary sensor.""" entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_cloud" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) config_entry = await init_integration(hass, 2, sleep_period=1000) @@ -353,3 +356,104 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_OFF + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual binary sensor", "binary_sensor.test_name_virtual_binary_sensor"), + (None, "binary_sensor.test_name_boolean_203"), + ], +) +async def test_rpc_device_virtual_binary_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual binary sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:203"] = { + "name": name, + "meta": {"ui": {"view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:203"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-boolean:203-boolean" + + monkeypatch.setitem(mock_rpc_device.status["boolean:203"], "value", False) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == STATE_OFF + + +async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual binary sensor will be removed if the mode has been changed to a toggle.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "toggle"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + BINARY_SENSOR_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_binary_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual binary sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + BINARY_SENSOR_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index a26c6eac405..0c574a33e0c 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -1114,6 +1114,7 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh( caplog: pytest.LogCaptureFixture, ) -> None: """Test zeroconf discovery does not triggers refresh for sleeping device.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) entry = MockConfigEntry( domain="shelly", @@ -1304,3 +1305,22 @@ async def test_reconfigure_with_exception( ) assert result["errors"] == {"base": base_error} + + +async def test_zeroconf_rejects_ipv6(hass: HomeAssistant) -> None: + """Test zeroconf discovery rejects ipv6.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("fd00::b27c:63bb:cc85:4ea0"), + ip_addresses=[ip_address("fd00::b27c:63bb:cc85:4ea0")], + hostname="mock_hostname", + name="shelly1pm-12345", + port=None, + properties={zeroconf.ATTR_PROPERTIES_ID: "shelly1pm-12345"}, + type="mock_type", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "ipv6_not_supported" diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 1e0af115c9e..d3494c094f9 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -20,7 +20,6 @@ from homeassistant.components.shelly.const import ( ENTRY_RELOAD_COOLDOWN, MAX_PUSH_UPDATE_FAILURES, RPC_RECONNECT_INTERVAL, - SLEEP_PERIOD_MULTIPLIER, UPDATE_PERIOD_MULTIPLIER, BLEScannerMode, ) @@ -546,6 +545,7 @@ async def test_rpc_update_entry_sleep_period( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC update entry sleep period.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 600) entry = await init_integration(hass, 2, sleep_period=600) register_entity( @@ -564,7 +564,7 @@ async def test_rpc_update_entry_sleep_period( # Move time to generate sleep period update monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 3600) - freezer.tick(timedelta(seconds=600 * SLEEP_PERIOD_MULTIPLIER)) + freezer.tick(timedelta(seconds=600 * UPDATE_PERIOD_MULTIPLIER)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -579,6 +579,7 @@ async def test_rpc_sleeping_device_no_periodic_updates( ) -> None: """Test RPC sleeping device no periodic updates.""" entity_id = f"{SENSOR_DOMAIN}.test_name_temperature" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) entry = await init_integration(hass, 2, sleep_period=1000) register_entity( @@ -596,7 +597,7 @@ async def test_rpc_sleeping_device_no_periodic_updates( assert get_entity_state(hass, entity_id) == "22.9" # Move time to generate polling - freezer.tick(timedelta(seconds=SLEEP_PERIOD_MULTIPLIER * 1000)) + freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 1000)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -610,6 +611,7 @@ async def test_rpc_sleeping_device_firmware_unsupported( issue_registry: ir.IssueRegistry, ) -> None: """Test RPC sleeping device firmware not supported.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "firmware_supported", False) entry = await init_integration(hass, 2, sleep_period=3600) @@ -889,7 +891,7 @@ async def test_block_sleeping_device_connection_error( assert get_entity_state(hass, entity_id) == STATE_ON # Move time to generate sleep period update - freezer.tick(timedelta(seconds=sleep_period * SLEEP_PERIOD_MULTIPLIER)) + freezer.tick(timedelta(seconds=sleep_period * UPDATE_PERIOD_MULTIPLIER)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -913,6 +915,7 @@ async def test_rpc_sleeping_device_connection_error( hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -934,9 +937,25 @@ async def test_rpc_sleeping_device_connection_error( assert get_entity_state(hass, entity_id) == STATE_ON # Move time to generate sleep period update - freezer.tick(timedelta(seconds=sleep_period * SLEEP_PERIOD_MULTIPLIER)) + freezer.tick(timedelta(seconds=sleep_period * UPDATE_PERIOD_MULTIPLIER)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert "Sleeping device did not update" in caplog.text assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + + +async def test_rpc_already_connected( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_rpc_device: Mock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test RPC ignore connect event if already connected.""" + await init_integration(hass, 2) + + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert "already connected" in caplog.text + mock_rpc_device.initialize.assert_called_once() diff --git a/tests/components/shelly/test_device_trigger.py b/tests/components/shelly/test_device_trigger.py index d47cca17460..fb68393304b 100644 --- a/tests/components/shelly/test_device_trigger.py +++ b/tests/components/shelly/test_device_trigger.py @@ -178,7 +178,7 @@ async def test_get_triggers_for_invalid_device_id( async def test_if_fires_on_click_event_block_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_block_device: Mock, ) -> None: """Test for click_event trigger firing for block device.""" @@ -215,14 +215,14 @@ async def test_if_fires_on_click_event_block_device( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_click" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_click" async def test_if_fires_on_click_event_rpc_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_rpc_device: Mock, ) -> None: """Test for click_event trigger firing for rpc device.""" @@ -259,14 +259,14 @@ async def test_if_fires_on_click_event_rpc_device( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_push" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_push" async def test_validate_trigger_block_device_not_ready( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -304,14 +304,14 @@ async def test_validate_trigger_block_device_not_ready( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_click" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_click" async def test_validate_trigger_rpc_device_not_ready( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -349,8 +349,8 @@ async def test_validate_trigger_rpc_device_not_ready( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_push" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_push" async def test_validate_trigger_invalid_triggers( @@ -391,7 +391,7 @@ async def test_validate_trigger_invalid_triggers( async def test_rpc_no_runtime_data( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -429,14 +429,14 @@ async def test_rpc_no_runtime_data( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_push" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_push" async def test_block_no_runtime_data( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -474,5 +474,5 @@ async def test_block_no_runtime_data( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single" diff --git a/tests/components/shelly/test_diagnostics.py b/tests/components/shelly/test_diagnostics.py index f7f238f3327..4fc8ea6ca8f 100644 --- a/tests/components/shelly/test_diagnostics.py +++ b/tests/components/shelly/test_diagnostics.py @@ -1,9 +1,10 @@ """Tests for Shelly diagnostics platform.""" -from unittest.mock import ANY, Mock +from unittest.mock import ANY, Mock, PropertyMock from aioshelly.ble.const import BLE_SCAN_RESULT_EVENT from aioshelly.const import MODEL_25 +from aioshelly.exceptions import DeviceConnectionError import pytest from homeassistant.components.diagnostics import REDACTED @@ -36,6 +37,10 @@ async def test_block_config_entry_diagnostics( {key: REDACTED for key in TO_REDACT if key in entry_dict["data"]} ) + type(mock_block_device).last_error = PropertyMock( + return_value=DeviceConnectionError() + ) + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) assert result == { @@ -48,6 +53,7 @@ async def test_block_config_entry_diagnostics( }, "device_settings": {"coiot": {"update_period": 15}}, "device_status": MOCK_STATUS_COAP, + "last_error": "DeviceConnectionError()", } @@ -91,6 +97,10 @@ async def test_rpc_config_entry_diagnostics( {key: REDACTED for key in TO_REDACT if key in entry_dict["data"]} ) + type(mock_rpc_device).last_error = PropertyMock( + return_value=DeviceConnectionError() + ) + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) assert result == { @@ -152,4 +162,5 @@ async def test_rpc_config_entry_diagnostics( }, "wifi": {"rssi": -63}, }, + "last_error": "DeviceConnectionError()", } diff --git a/tests/components/shelly/test_init.py b/tests/components/shelly/test_init.py index 998d56fc6cc..46698c23c0a 100644 --- a/tests/components/shelly/test_init.py +++ b/tests/components/shelly/test_init.py @@ -279,6 +279,7 @@ async def test_sleeping_rpc_device_online( caplog: pytest.LogCaptureFixture, ) -> None: """Test sleeping RPC device online.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", device_sleep) entry = await init_integration(hass, 2, sleep_period=entry_sleep) assert "will resume when device is online" in caplog.text @@ -297,6 +298,7 @@ async def test_sleeping_rpc_device_online_new_firmware( caplog: pytest.LogCaptureFixture, ) -> None: """Test sleeping device Gen2 with firmware 1.0.0 or later.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) entry = await init_integration(hass, 2, sleep_period=None) assert "will resume when device is online" in caplog.text diff --git a/tests/components/shelly/test_number.py b/tests/components/shelly/test_number.py index ff453b3251c..73f432094b9 100644 --- a/tests/components/shelly/test_number.py +++ b/tests/components/shelly/test_number.py @@ -1,18 +1,24 @@ """Tests for Shelly number platform.""" +from copy import deepcopy from unittest.mock import AsyncMock, Mock from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError import pytest from homeassistant.components.number import ( + ATTR_MAX, + ATTR_MIN, + ATTR_MODE, + ATTR_STEP, ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, + NumberMode, ) from homeassistant.components.shelly.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceRegistry @@ -240,3 +246,145 @@ async def test_block_set_value_auth_error( assert "context" in flow assert flow["context"].get("source") == SOURCE_REAUTH assert flow["context"].get("entry_id") == entry.entry_id + + +@pytest.mark.parametrize( + ("name", "entity_id", "original_unit", "expected_unit", "view", "mode"), + [ + ( + "Virtual number", + "number.test_name_virtual_number", + "%", + "%", + "field", + NumberMode.BOX, + ), + (None, "number.test_name_number_203", "", None, "field", NumberMode.BOX), + ( + "Virtual slider", + "number.test_name_virtual_slider", + "Hz", + "Hz", + "slider", + NumberMode.SLIDER, + ), + ], +) +async def test_rpc_device_virtual_number( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + original_unit: str, + expected_unit: str | None, + view: str, + mode: NumberMode, +) -> None: + """Test a virtual number for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["number:203"] = { + "name": name, + "min": 0, + "max": 100, + "meta": {"ui": {"step": 0.1, "unit": original_unit, "view": view}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:203"] = {"value": 12.3} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "12.3" + assert state.attributes.get(ATTR_MIN) == 0 + assert state.attributes.get(ATTR_MAX) == 100 + assert state.attributes.get(ATTR_STEP) == 0.1 + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit + assert state.attributes.get(ATTR_MODE) is mode + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-number:203-number" + + monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 78.9) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "78.9" + + monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 56.7}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "56.7" + + +async def test_rpc_remove_virtual_number_when_mode_label( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual number will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["number:200"] = { + "name": None, + "min": -1000, + "max": 1000, + "meta": {"ui": {"step": 1, "unit": "", "view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:200"] = {"value": 123} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + NUMBER_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_number_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual number will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + NUMBER_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_select.py b/tests/components/shelly/test_select.py new file mode 100644 index 00000000000..0a6eb2a5843 --- /dev/null +++ b/tests/components/shelly/test_select.py @@ -0,0 +1,151 @@ +"""Tests for Shelly select platform.""" + +from copy import deepcopy +from unittest.mock import Mock + +import pytest + +from homeassistant.components.select import ( + ATTR_OPTION, + ATTR_OPTIONS, + DOMAIN as SELECT_PLATFORM, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.entity_registry import EntityRegistry + +from . import init_integration, register_device, register_entity + + +@pytest.mark.parametrize( + ("name", "entity_id", "value", "expected_state"), + [ + ("Virtual enum", "select.test_name_virtual_enum", "option 1", "Title 1"), + (None, "select.test_name_enum_203", None, STATE_UNKNOWN), + ], +) +async def test_rpc_device_virtual_enum( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + value: str | None, + expected_state: str, +) -> None: + """Test a virtual enum for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["enum:203"] = { + "name": name, + "options": ["option 1", "option 2", "option 3"], + "meta": { + "ui": { + "view": "dropdown", + "titles": {"option 1": "Title 1", "option 2": None}, + } + }, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["enum:203"] = {"value": value} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == expected_state + assert state.attributes.get(ATTR_OPTIONS) == [ + "Title 1", + "option 2", + "option 3", + ] + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-enum:203-enum" + + monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 2") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "option 2" + + monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 1") + await hass.services.async_call( + SELECT_PLATFORM, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "Title 1"}, + blocking=True, + ) + # 'Title 1' corresponds to 'option 1' + assert mock_rpc_device.call_rpc.call_args[0][1] == {"id": 203, "value": "option 1"} + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "Title 1" + + +async def test_rpc_remove_virtual_enum_when_mode_label( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual enum will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["enum:200"] = { + "name": None, + "options": ["one", "two"], + "meta": { + "ui": {"view": "label", "titles": {"one": "Title 1", "two": "Title 2"}} + }, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["enum:200"] = {"value": "one"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SELECT_PLATFORM, + "test_name_enum_200", + "enum:200-enum", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_enum_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual enum will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SELECT_PLATFORM, + "test_name_enum_200", + "enum:200-enum", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index 513bcd875e2..a39123a6722 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -11,6 +11,7 @@ from homeassistant.components.homeassistant import ( SERVICE_UPDATE_ENTITY, ) from homeassistant.components.sensor import ( + ATTR_OPTIONS, ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, @@ -449,6 +450,7 @@ async def test_rpc_sleeping_sensor( ) -> None: """Test RPC online sleeping sensor.""" entity_id = f"{SENSOR_DOMAIN}.test_name_temperature" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) entry = await init_integration(hass, 2, sleep_period=1000) @@ -600,6 +602,7 @@ async def test_rpc_sleeping_update_entity_service( await async_setup_component(hass, "homeassistant", {}) entity_id = f"{SENSOR_DOMAIN}.test_name_temperature" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) await init_integration(hass, 2, sleep_period=1000) @@ -828,3 +831,361 @@ async def test_rpc_pulse_counter_frequency_sensors( entry = entity_registry.async_get(entity_id) assert entry assert entry.unique_id == "123456789ABC-input:2-counter_frequency_value" + + +async def test_rpc_disabled_xfreq( + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test RPC input with the xfreq sensor disabled.""" + status = deepcopy(mock_rpc_device.status) + status["input:2"] = { + "id": 2, + "counts": {"total": 56174, "xtotal": 561.74}, + "freq": 208.00, + } + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 2) + + entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" + + state = hass.states.get(entity_id) + assert not state + + entry = entity_registry.async_get(entity_id) + assert not entry + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual sensor", "sensor.test_name_virtual_sensor"), + (None, "sensor.test_name_text_203"), + ], +) +async def test_rpc_device_virtual_text_sensor( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual text sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["text:203"] = { + "name": name, + "meta": {"ui": {"view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:203"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "lorem ipsum" + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-text:203-text" + + monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "dolor sit amet" + + +async def test_rpc_remove_text_virtual_sensor_when_mode_field( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual text sensor will be removed if the mode has been changed to a field.""" + config = deepcopy(mock_rpc_device.config) + config["text:200"] = {"name": None, "meta": {"ui": {"view": "field"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:200"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_text_virtual_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual text sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +@pytest.mark.parametrize( + ("name", "entity_id", "original_unit", "expected_unit"), + [ + ("Virtual number sensor", "sensor.test_name_virtual_number_sensor", "W", "W"), + (None, "sensor.test_name_number_203", "", None), + ], +) +async def test_rpc_device_virtual_number_sensor( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + original_unit: str, + expected_unit: str | None, +) -> None: + """Test a virtual number sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["number:203"] = { + "name": name, + "min": 0, + "max": 100, + "meta": {"ui": {"step": 0.1, "unit": original_unit, "view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:203"] = {"value": 34.5} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "34.5" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-number:203-number" + + monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "56.7" + + +async def test_rpc_remove_number_virtual_sensor_when_mode_field( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual number sensor will be removed if the mode has been changed to a field.""" + config = deepcopy(mock_rpc_device.config) + config["number:200"] = { + "name": None, + "min": 0, + "max": 100, + "meta": {"ui": {"step": 1, "unit": "", "view": "field"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:200"] = {"value": 67.8} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_number_virtual_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual number sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +@pytest.mark.parametrize( + ("name", "entity_id", "value", "expected_state"), + [ + ( + "Virtual enum sensor", + "sensor.test_name_virtual_enum_sensor", + "one", + "Title 1", + ), + (None, "sensor.test_name_enum_203", None, STATE_UNKNOWN), + ], +) +async def test_rpc_device_virtual_enum_sensor( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + value: str | None, + expected_state: str, +) -> None: + """Test a virtual enum sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["enum:203"] = { + "name": name, + "options": ["one", "two", "three"], + "meta": {"ui": {"view": "label", "titles": {"one": "Title 1", "two": None}}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["enum:203"] = {"value": value} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == expected_state + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM + assert state.attributes.get(ATTR_OPTIONS) == ["Title 1", "two", "three"] + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-enum:203-enum" + + monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "two") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "two" + + +async def test_rpc_remove_enum_virtual_sensor_when_mode_dropdown( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual enum sensor will be removed if the mode has been changed to a dropdown.""" + config = deepcopy(mock_rpc_device.config) + config["enum:200"] = { + "name": None, + "options": ["option 1", "option 2", "option 3"], + "meta": { + "ui": { + "view": "dropdown", + "titles": {"option 1": "Title 1", "option 2": None}, + } + }, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["enum:200"] = {"value": "option 2"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_enum_200", + "enum:200-enum", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_enum_virtual_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual enum sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_enum_200", + "enum:200-enum", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index 637a92a7fbe..124562be8d5 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -25,6 +25,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry @@ -187,7 +188,7 @@ async def test_block_device_unique_ids( async def test_block_set_state_connection_error( - hass: HomeAssistant, mock_block_device, monkeypatch + hass: HomeAssistant, mock_block_device, monkeypatch: pytest.MonkeyPatch ) -> None: """Test block device set state connection error.""" monkeypatch.setattr( @@ -430,3 +431,142 @@ async def test_wall_display_relay_mode( entry = entity_registry.async_get(switch_entity_id) assert entry assert entry.unique_id == "123456789ABC-switch:0" + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual switch", "switch.test_name_virtual_switch"), + (None, "switch.test_name_boolean_200"), + ], +) +async def test_rpc_device_virtual_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual switch for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = { + "name": name, + "meta": {"ui": {"view": "toggle"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-boolean:200-boolean" + + monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", False) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == STATE_OFF + + monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", True) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == STATE_ON + + +async def test_rpc_device_virtual_binary_sensor( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that a switch entity has not been created for a virtual binary sensor.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + entity_id = "switch.test_name_boolean_200" + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert not state + + +async def test_rpc_remove_virtual_switch_when_mode_label( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual switch will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SWITCH_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_switch_when_orphaned( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual switch will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SWITCH_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_text.py b/tests/components/shelly/test_text.py new file mode 100644 index 00000000000..19acb856f35 --- /dev/null +++ b/tests/components/shelly/test_text.py @@ -0,0 +1,129 @@ +"""Tests for Shelly text platform.""" + +from copy import deepcopy +from unittest.mock import Mock + +import pytest + +from homeassistant.components.text import ( + ATTR_VALUE, + DOMAIN as TEXT_PLATFORM, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.entity_registry import EntityRegistry + +from . import init_integration, register_device, register_entity + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual text", "text.test_name_virtual_text"), + (None, "text.test_name_text_203"), + ], +) +async def test_rpc_device_virtual_text( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual text for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["text:203"] = { + "name": name, + "meta": {"ui": {"view": "field"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:203"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "lorem ipsum" + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-text:203-text" + + monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "dolor sit amet" + + monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "sed do eiusmod") + await hass.services.async_call( + TEXT_PLATFORM, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: "sed do eiusmod"}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "sed do eiusmod" + + +async def test_rpc_remove_virtual_text_when_mode_label( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual text will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["text:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:200"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + TEXT_PLATFORM, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_text_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual text will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + TEXT_PLATFORM, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index 8448c116815..721e86559a3 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -334,6 +334,7 @@ async def test_rpc_sleeping_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC sleeping device update entity.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) monkeypatch.setitem(mock_rpc_device.shelly, "ver", "1") monkeypatch.setitem( diff --git a/tests/components/shelly/test_utils.py b/tests/components/shelly/test_utils.py index 7c4ea8accae..5891f250fae 100644 --- a/tests/components/shelly/test_utils.py +++ b/tests/components/shelly/test_utils.py @@ -23,6 +23,7 @@ from homeassistant.components.shelly.utils import ( get_block_device_sleep_period, get_block_input_triggers, get_device_uptime, + get_host, get_number_of_channels, get_release_url, get_rpc_channel_name, @@ -274,3 +275,19 @@ def test_get_release_url( result = get_release_url(gen, model, beta) assert result is expected + + +@pytest.mark.parametrize( + ("host", "expected"), + [ + ("shelly_device.local", "shelly_device.local"), + ("192.168.178.12", "192.168.178.12"), + ( + "2001:0db8:85a3:0000:0000:8a2e:0370:7334", + "[2001:0db8:85a3:0000:0000:8a2e:0370:7334]", + ), + ], +) +def test_get_host(host: str, expected: str) -> None: + """Test get_host function.""" + assert get_host(host) == expected diff --git a/tests/components/shopping_list/test_todo.py b/tests/components/shopping_list/test_todo.py index 173544d0be2..c54a6abfd6f 100644 --- a/tests/components/shopping_list/test_todo.py +++ b/tests/components/shopping_list/test_todo.py @@ -1,11 +1,18 @@ """Test shopping list todo platform.""" -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine from typing import Any import pytest -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -13,11 +20,12 @@ from tests.typing import WebSocketGenerator TEST_ENTITY = "todo.shopping_list" +type WsGetItemsType = Callable[[], Coroutine[Any, Any, list[dict[str, str]]]] +type WsMoveItemType = Callable[[str, str | None], Coroutine[Any, Any, dict[str, Any]]] + @pytest.fixture -async def ws_get_items( - hass_ws_client: WebSocketGenerator, -) -> Callable[[], Awaitable[dict[str, str]]]: +async def ws_get_items(hass_ws_client: WebSocketGenerator) -> WsGetItemsType: """Fixture to fetch items from the todo websocket.""" async def get() -> list[dict[str, str]]: @@ -37,9 +45,7 @@ async def ws_get_items( @pytest.fixture -async def ws_move_item( - hass_ws_client: WebSocketGenerator, -) -> Callable[[str, str | None], Awaitable[None]]: +async def ws_move_item(hass_ws_client: WebSocketGenerator) -> WsMoveItemType: """Fixture to move an item in the todo list.""" async def move(uid: str, previous_uid: str | None) -> dict[str, Any]: @@ -62,7 +68,7 @@ async def test_get_items( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test creating a shopping list item with the WS API and verifying with To-do API.""" client = await hass_ws_client(hass) @@ -93,16 +99,16 @@ async def test_get_items( async def test_add_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test adding shopping_list item and listing it.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -120,14 +126,14 @@ async def test_add_item( async def test_remove_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test removing a todo item.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) items = await ws_get_items() @@ -142,11 +148,11 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", + TodoServices.REMOVE_ITEM, { - "item": [items[0]["uid"]], + ATTR_ITEM: [items[0]["uid"]], }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -161,18 +167,18 @@ async def test_remove_item( async def test_bulk_remove( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test removing a todo item.""" for _i in range(5): await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -186,11 +192,11 @@ async def test_bulk_remove( await hass.services.async_call( TODO_DOMAIN, - "remove_item", + TodoServices.REMOVE_ITEM, { - "item": uids, + ATTR_ITEM: uids, }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -205,18 +211,18 @@ async def test_bulk_remove( async def test_update_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test updating a todo item.""" # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -234,12 +240,12 @@ async def test_update_item( # Mark item completed await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "soda", - "status": "completed", + ATTR_ITEM: "soda", + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -258,18 +264,18 @@ async def test_update_item( async def test_partial_update_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test updating a todo item with partial information.""" # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -287,12 +293,12 @@ async def test_partial_update_item( # Mark item completed without changing the summary await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": item["uid"], - "status": "completed", + ATTR_ITEM: item["uid"], + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -310,12 +316,12 @@ async def test_partial_update_item( # Change the summary without changing the status await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": item["uid"], - "rename": "other summary", + ATTR_ITEM: item["uid"], + ATTR_RENAME: "other summary", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -334,19 +340,19 @@ async def test_partial_update_item( async def test_update_invalid_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test updating a todo item that does not exist.""" with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "invalid-uid", - "rename": "Example task", + ATTR_ITEM: "invalid-uid", + ATTR_RENAME: "Example task", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -380,8 +386,8 @@ async def test_update_invalid_item( async def test_move_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], - ws_move_item: Callable[[str, str | None], Awaitable[dict[str, Any]]], + ws_get_items: WsGetItemsType, + ws_move_item: WsMoveItemType, src_idx: int, dst_idx: int | None, expected_items: list[str], @@ -391,11 +397,11 @@ async def test_move_item( for i in range(1, 5): await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": f"item {i}", + ATTR_ITEM: f"item {i}", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -422,16 +428,16 @@ async def test_move_item( async def test_move_invalid_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], - ws_move_item: Callable[[str, int | None], Awaitable[dict[str, Any]]], + ws_get_items: WsGetItemsType, + ws_move_item: WsMoveItemType, ) -> None: """Test moving an item that does not exist.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -456,11 +462,11 @@ async def test_subscribe_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -491,12 +497,12 @@ async def test_subscribe_item( # Rename item item completed await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "soda", - "rename": "milk", + ATTR_ITEM: "soda", + ATTR_RENAME: "milk", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/sia/test_config_flow.py b/tests/components/sia/test_config_flow.py index 95de53d7fbe..b0d83855a25 100644 --- a/tests/components/sia/test_config_flow.py +++ b/tests/components/sia/test_config_flow.py @@ -1,5 +1,6 @@ """Test the sia config flow.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -16,6 +17,7 @@ from homeassistant.components.sia.const import ( CONF_ZONES, DOMAIN, ) +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_PORT, CONF_PROTOCOL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -105,7 +107,7 @@ ADDITIONAL_OPTIONS = { @pytest.fixture -async def flow_at_user_step(hass): +async def flow_at_user_step(hass: HomeAssistant) -> ConfigFlowResult: """Return a initialized flow.""" return await hass.config_entries.flow.async_init( DOMAIN, @@ -114,7 +116,9 @@ async def flow_at_user_step(hass): @pytest.fixture -async def entry_with_basic_config(hass, flow_at_user_step): +async def entry_with_basic_config( + hass: HomeAssistant, flow_at_user_step: ConfigFlowResult +) -> ConfigFlowResult: """Return a entry with a basic config.""" with patch("homeassistant.components.sia.async_setup_entry", return_value=True): return await hass.config_entries.flow.async_configure( @@ -123,7 +127,9 @@ async def entry_with_basic_config(hass, flow_at_user_step): @pytest.fixture -async def flow_at_add_account_step(hass, flow_at_user_step): +async def flow_at_add_account_step( + hass: HomeAssistant, flow_at_user_step: ConfigFlowResult +) -> ConfigFlowResult: """Return a initialized flow at the additional account step.""" return await hass.config_entries.flow.async_configure( flow_at_user_step["flow_id"], BASIC_CONFIG_ADDITIONAL @@ -131,7 +137,9 @@ async def flow_at_add_account_step(hass, flow_at_user_step): @pytest.fixture -async def entry_with_additional_account_config(hass, flow_at_add_account_step): +async def entry_with_additional_account_config( + hass: HomeAssistant, flow_at_add_account_step: ConfigFlowResult +) -> ConfigFlowResult: """Return a entry with a two account config.""" with patch("homeassistant.components.sia.async_setup_entry", return_value=True): return await hass.config_entries.flow.async_configure( @@ -139,7 +147,7 @@ async def entry_with_additional_account_config(hass, flow_at_add_account_step): ) -async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry): +async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Add mock config to HASS.""" assert await async_setup_component(hass, DOMAIN, {}) config_entry.add_to_hass(hass) @@ -147,23 +155,21 @@ async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry): await hass.async_block_till_done() -async def test_form_start_user(hass: HomeAssistant, flow_at_user_step) -> None: +async def test_form_start_user(flow_at_user_step: ConfigFlowResult) -> None: """Start the form and check if you get the right id and schema for the user step.""" assert flow_at_user_step["step_id"] == "user" assert flow_at_user_step["errors"] is None assert flow_at_user_step["data_schema"] == HUB_SCHEMA -async def test_form_start_account( - hass: HomeAssistant, flow_at_add_account_step -) -> None: +async def test_form_start_account(flow_at_add_account_step: ConfigFlowResult) -> None: """Start the form and check if you get the right id and schema for the additional account step.""" assert flow_at_add_account_step["step_id"] == "add_account" assert flow_at_add_account_step["errors"] is None assert flow_at_add_account_step["data_schema"] == ACCOUNT_SCHEMA -async def test_create(hass: HomeAssistant, entry_with_basic_config) -> None: +async def test_create(entry_with_basic_config: ConfigFlowResult) -> None: """Test we create a entry through the form.""" assert entry_with_basic_config["type"] is FlowResultType.CREATE_ENTRY assert ( @@ -175,7 +181,7 @@ async def test_create(hass: HomeAssistant, entry_with_basic_config) -> None: async def test_create_additional_account( - hass: HomeAssistant, entry_with_additional_account_config + entry_with_additional_account_config: ConfigFlowResult, ) -> None: """Test we create a config with two accounts.""" assert entry_with_additional_account_config["type"] is FlowResultType.CREATE_ENTRY @@ -210,7 +216,7 @@ async def test_abort_form(hass: HomeAssistant) -> None: @pytest.fixture(autouse=True) -def mock_sia(): +def mock_sia() -> Generator[None]: """Mock SIAClient.""" with patch("homeassistant.components.sia.hub.SIAClient", autospec=True): yield diff --git a/tests/components/simplefin/__init__.py b/tests/components/simplefin/__init__.py new file mode 100644 index 00000000000..e4c7848ba9a --- /dev/null +++ b/tests/components/simplefin/__init__.py @@ -0,0 +1,13 @@ +"""Tests for SimpleFin.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/simplefin/conftest.py b/tests/components/simplefin/conftest.py new file mode 100644 index 00000000000..328e16ccbd0 --- /dev/null +++ b/tests/components/simplefin/conftest.py @@ -0,0 +1,83 @@ +"""Test fixtures for SimpleFIN.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from simplefin4py import FinancialData +from simplefin4py.exceptions import SimpleFinInvalidClaimTokenError + +from homeassistant.components.simplefin import CONF_ACCESS_URL +from homeassistant.components.simplefin.const import DOMAIN + +from tests.common import MockConfigEntry, load_fixture + +MOCK_ACCESS_URL = "https://i:am@yomama.house.com" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.simplefin.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup + + +@pytest.fixture +async def mock_config_entry() -> MockConfigEntry: + """Fixture for MockConfigEntry.""" + return MockConfigEntry( + domain=DOMAIN, + data={CONF_ACCESS_URL: MOCK_ACCESS_URL}, + version=1, + ) + + +@pytest.fixture +def mock_claim_setup_token() -> str: + """Fixture to mock the claim_setup_token method of SimpleFin.""" + with patch( + "homeassistant.components.simplefin.config_flow.SimpleFin.claim_setup_token", + ) as mock_claim_setup_token: + mock_claim_setup_token.return_value = "https://i:am@yomama.comma" + yield + + +@pytest.fixture +def mock_decode_claim_token_invalid_then_good() -> str: + """Fixture to mock the decode_claim_token method of SimpleFin.""" + return_values = [SimpleFinInvalidClaimTokenError, "valid_return_value"] + with patch( + "homeassistant.components.simplefin.config_flow.SimpleFin.decode_claim_token", + new_callable=lambda: MagicMock(side_effect=return_values), + ): + yield + + +@pytest.fixture +def mock_simplefin_client() -> Generator[AsyncMock]: + """Mock a SimpleFin client.""" + + with ( + patch( + "homeassistant.components.simplefin.SimpleFin", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.simplefin.config_flow.SimpleFin", + new=mock_client, + ), + ): + mock_client.claim_setup_token.return_value = MOCK_ACCESS_URL + client = mock_client.return_value + + fixture_data = load_fixture("fin_data.json", DOMAIN) + fin_data = FinancialData.from_json(fixture_data) + + assert fin_data.accounts != [] + client.fetch_data.return_value = fin_data + + client.access_url = MOCK_ACCESS_URL + + yield mock_client diff --git a/tests/components/simplefin/fixtures/fin_data.json b/tests/components/simplefin/fixtures/fin_data.json new file mode 100644 index 00000000000..bd35945c12b --- /dev/null +++ b/tests/components/simplefin/fixtures/fin_data.json @@ -0,0 +1,173 @@ +{ + "errors": [ + "Connection to Investments may need attention", + "Connection to The Bank of Go may need attention" + ], + "accounts": [ + { + "org": { + "domain": "www.newwealthfront.com", + "name": "The Bank of Go", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newwealthfront.com" + }, + "id": "ACT-1a2b3c4d-5e6f-7g8h-9i0j", + "name": "The Bank", + "currency": "USD", + "balance": "7777.77", + "available-balance": "7777.77", + "balance-date": 1705413843, + "transactions": [ + { + "id": "12394832938403", + "posted": 793090572, + "amount": "-1234.56", + "description": "Enchanted Bait Shop", + "payee": "Uncle Frank", + "memo": "Some memo", + "transacted_at": 793080572 + } + ], + "extra": { + "account-open-date": 978360153 + }, + "holdings": [] + }, + { + "org": { + "domain": "www.newfidelity.com", + "name": "Investments", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newfidelity.com" + }, + "id": "ACT-1k2l3m4n-5o6p-7q8r-9s0t", + "name": "My Checking", + "currency": "USD", + "balance": "12345.67", + "available-balance": "5432.10", + "balance-date": 1705413319, + "transactions": [], + "holdings": [] + }, + { + "org": { + "domain": "www.newhfcu.org", + "name": "The Bank of Go", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newhfcu.org/" + }, + "id": "ACT-2a3b4c5d-6e7f-8g9h-0i1j", + "name": "PRIME SAVINGS", + "currency": "EUR", + "balance": "9876.54", + "available-balance": "8765.43", + "balance-date": 1705428861, + "transactions": [], + "holdings": [] + }, + { + "org": { + "domain": "www.randombank2.com", + "name": "Random Bank", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.randombank2.com/" + }, + "id": "ACT-3a4b5c6d-7e8f-9g0h-1i2j", + "name": "Costco Anywhere Visa® Card", + "currency": "USD", + "balance": "-532.69", + "available-balance": "4321.98", + "balance-date": 1705429002, + "transactions": [], + "holdings": [] + }, + { + "org": { + "domain": "www.newfidelity.com", + "name": "Investments", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newfidelity.com" + }, + "id": "ACT-4k5l6m7n-8o9p-1q2r-3s4t", + "name": "Dr Evil", + "currency": "USD", + "balance": "1000000.00", + "available-balance": "13579.24", + "balance-date": 1705413319, + "transactions": [], + "holdings": [ + { + "id": "HOL-62eb5bb6-4aed-4fe1-bdbe-f28e127e359b", + "created": 1705413320, + "currency": "", + "cost_basis": "10000.00", + "description": "Fantastic FID GROWTH CO K6", + "market_value": "15000.00", + "purchase_price": "0.00", + "shares": "200.00", + "symbol": "FGKFX" + } + ] + }, + { + "org": { + "domain": "www.newfidelity.com", + "name": "Investments", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newfidelity.com" + }, + "id": "ACT-5k6l7m8n-9o0p-1q2r-3s4t", + "name": "NerdCorp Series B", + "currency": "EUR", + "balance": "13579.24", + "available-balance": "9876.54", + "balance-date": 1705413319, + "transactions": [], + "holdings": [ + { + "id": "HOL-08f775cd-eedf-4ee5-9f53-241c8efa5bf3", + "created": 1705413321, + "currency": "", + "cost_basis": "7500.00", + "description": "Mythical FID GROWTH CO K6", + "market_value": "9876.54", + "purchase_price": "0.00", + "shares": "150.00", + "symbol": "FGKFX" + } + ] + }, + { + "org": { + "domain": "www.randombank2.com", + "name": "Mythical RandomSavings", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.randombank2.com/" + }, + "id": "ACT-6a7b8c9d-0e1f-2g3h-4i5j", + "name": "Unicorn Pot", + "currency": "USD", + "balance": "10000.00", + "available-balance": "7500.00", + "balance-date": 1705429002, + "transactions": [], + "holdings": [] + }, + { + "org": { + "domain": "www.randombank2.com", + "name": "Mythical RandomSavings", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.randombank2.com/" + }, + "id": "ACT-7a8b9c0d-1e2f-3g4h-5i6j", + "name": "Castle Mortgage", + "currency": "USD", + "balance": "7500.00", + "available-balance": "5000.00", + "balance-date": 1705429002, + "transactions": [], + "holdings": [] + } + ] +} diff --git a/tests/components/simplefin/snapshots/test_sensor.ambr b/tests/components/simplefin/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c7dced9300e --- /dev/null +++ b/tests/components/simplefin/snapshots/test_sensor.ambr @@ -0,0 +1,809 @@ +# serializer version: 1 +# name: test_all_entities[sensor.investments_dr_evil_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.investments_dr_evil_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.investments_dr_evil_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Investments Dr Evil Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.investments_dr_evil_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1000000.00', + }) +# --- +# name: test_all_entities[sensor.investments_dr_evil_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.investments_dr_evil_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.investments_dr_evil_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Investments Dr Evil Data age', + }), + 'context': , + 'entity_id': 'sensor.investments_dr_evil_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T13:55:19+00:00', + }) +# --- +# name: test_all_entities[sensor.investments_my_checking_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.investments_my_checking_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.investments_my_checking_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Investments My Checking Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.investments_my_checking_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12345.67', + }) +# --- +# name: test_all_entities[sensor.investments_my_checking_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.investments_my_checking_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.investments_my_checking_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Investments My Checking Data age', + }), + 'context': , + 'entity_id': 'sensor.investments_my_checking_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T13:55:19+00:00', + }) +# --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.investments_nerdcorp_series_b_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_balance', + 'unit_of_measurement': 'EUR', + }) +# --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Investments NerdCorp Series B Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'EUR', + }), + 'context': , + 'entity_id': 'sensor.investments_nerdcorp_series_b_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '13579.24', + }) +# --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.investments_nerdcorp_series_b_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Investments NerdCorp Series B Data age', + }), + 'context': , + 'entity_id': 'sensor.investments_nerdcorp_series_b_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T13:55:19+00:00', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Mythical RandomSavings Castle Mortgage Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7500.00', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Mythical RandomSavings Castle Mortgage Data age', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:16:42+00:00', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Mythical RandomSavings Unicorn Pot Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10000.00', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Mythical RandomSavings Unicorn Pot Data age', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:16:42+00:00', + }) +# --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-532.69', + }) +# --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Data age', + }), + 'context': , + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:16:42+00:00', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.the_bank_of_go_prime_savings_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_balance', + 'unit_of_measurement': 'EUR', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'The Bank of Go PRIME SAVINGS Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'EUR', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_prime_savings_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9876.54', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.the_bank_of_go_prime_savings_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'The Bank of Go PRIME SAVINGS Data age', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_prime_savings_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:14:21+00:00', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.the_bank_of_go_the_bank_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'The Bank of Go The Bank Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_the_bank_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7777.77', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.the_bank_of_go_the_bank_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'The Bank of Go The Bank Data age', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_the_bank_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T14:04:03+00:00', + }) +# --- diff --git a/tests/components/simplefin/test_config_flow.py b/tests/components/simplefin/test_config_flow.py new file mode 100644 index 00000000000..c83f2aed62e --- /dev/null +++ b/tests/components/simplefin/test_config_flow.py @@ -0,0 +1,164 @@ +"""Test config flow.""" + +from unittest.mock import AsyncMock + +import pytest +from simplefin4py.exceptions import ( + SimpleFinAuthError, + SimpleFinClaimError, + SimpleFinInvalidAccountURLError, + SimpleFinInvalidClaimTokenError, + SimpleFinPaymentRequiredError, +) + +from homeassistant.components.simplefin import CONF_ACCESS_URL +from homeassistant.components.simplefin.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import entity_registry as er + +from .conftest import MOCK_ACCESS_URL + +from tests.common import MockConfigEntry + + +async def test_successful_claim( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_simplefin_client: AsyncMock, +) -> None: + """Test successful token claim in config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "donJulio"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "SimpleFIN" + assert result["data"] == {CONF_ACCESS_URL: MOCK_ACCESS_URL} + + +async def test_already_setup( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_simplefin_client: AsyncMock, +) -> None: + """Test all entities.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: MOCK_ACCESS_URL}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_access_url( + hass: HomeAssistant, + mock_simplefin_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test standard config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "http://user:password@string"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_ACCESS_URL] == "http://user:password@string" + assert result["title"] == "SimpleFIN" + + +@pytest.mark.parametrize( + ("side_effect", "error_key"), + [ + (SimpleFinInvalidAccountURLError, "url_error"), + (SimpleFinPaymentRequiredError, "payment_required"), + (SimpleFinAuthError, "invalid_auth"), + ], +) +async def test_access_url_errors( + hass: HomeAssistant, + mock_simplefin_client: AsyncMock, + side_effect: Exception, + error_key: str, +) -> None: + """Test the various errors we can get in access_url mode.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + mock_simplefin_client.claim_setup_token.side_effect = side_effect + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "donJulio"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_key} + + mock_simplefin_client.claim_setup_token.side_effect = None + + # Pass the entry creation + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "http://user:password@string"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {CONF_ACCESS_URL: "http://user:password@string"} + assert result["title"] == "SimpleFIN" + + +@pytest.mark.parametrize( + ("side_effect", "error_key"), + [ + (SimpleFinInvalidClaimTokenError, "invalid_claim_token"), + (SimpleFinClaimError, "claim_error"), + ], +) +async def test_claim_token_errors( + hass: HomeAssistant, + mock_simplefin_client: AsyncMock, + side_effect: Exception, + error_key: str, +) -> None: + """Test config flow with various token claim errors.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + mock_simplefin_client.claim_setup_token.side_effect = side_effect + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "donJulio"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_key} + + mock_simplefin_client.claim_setup_token.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "donJulio"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {CONF_ACCESS_URL: "https://i:am@yomama.house.com"} + assert result["title"] == "SimpleFIN" diff --git a/tests/components/simplefin/test_sensor.py b/tests/components/simplefin/test_sensor.py new file mode 100644 index 00000000000..495f249d4e1 --- /dev/null +++ b/tests/components/simplefin/test_sensor.py @@ -0,0 +1,94 @@ +"""Test SimpleFin Sensor with Snapshot data.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from simplefin4py.exceptions import SimpleFinAuthError, SimpleFinPaymentRequiredError +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_simplefin_client: AsyncMock, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.simplefin.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("side_effect"), + [ + (SimpleFinAuthError), + (SimpleFinPaymentRequiredError), + ], +) +async def test_update_errors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_simplefin_client: AsyncMock, + freezer: FrozenDateTimeFactory, + side_effect: Exception, +) -> None: + """Test connection error.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("sensor.the_bank_of_go_the_bank_balance").state == "7777.77" + assert hass.states.get("sensor.investments_my_checking_balance").state == "12345.67" + assert ( + hass.states.get("sensor.the_bank_of_go_prime_savings_balance").state + == "9876.54" + ) + assert ( + hass.states.get("sensor.random_bank_costco_anywhere_visa_r_card_balance").state + == "-532.69" + ) + assert hass.states.get("sensor.investments_dr_evil_balance").state == "1000000.00" + assert ( + hass.states.get("sensor.investments_nerdcorp_series_b_balance").state + == "13579.24" + ) + assert ( + hass.states.get("sensor.mythical_randomsavings_unicorn_pot_balance").state + == "10000.00" + ) + assert ( + hass.states.get("sensor.mythical_randomsavings_castle_mortgage_balance").state + == "7500.00" + ) + + mock_simplefin_client.return_value.fetch_data.side_effect = side_effect + freezer.tick(timedelta(days=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + sensors = [ + "sensor.the_bank_of_go_the_bank_balance", + "sensor.investments_my_checking_balance", + "sensor.the_bank_of_go_prime_savings_balance", + "sensor.random_bank_costco_anywhere_visa_r_card_balance", + "sensor.investments_dr_evil_balance", + "sensor.investments_nerdcorp_series_b_balance", + "sensor.mythical_randomsavings_unicorn_pot_balance", + "sensor.mythical_randomsavings_castle_mortgage_balance", + ] + + for sensor in sensors: + assert hass.states.get(sensor).state == STATE_UNAVAILABLE diff --git a/tests/components/simplisafe/conftest.py b/tests/components/simplisafe/conftest.py index cc387ee765b..12ed845c7d2 100644 --- a/tests/components/simplisafe/conftest.py +++ b/tests/components/simplisafe/conftest.py @@ -1,6 +1,6 @@ """Define test fixtures for SimpliSafe.""" -import json +from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, Mock, patch import pytest @@ -8,11 +8,13 @@ from simplipy.system.v3 import SystemV3 from homeassistant.components.simplisafe.const import DOMAIN from homeassistant.const import CONF_CODE, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType from .common import REFRESH_TOKEN, USER_ID, USERNAME -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_json_object_fixture CODE = "12345" PASSWORD = "password" @@ -20,7 +22,9 @@ SYSTEM_ID = 12345 @pytest.fixture(name="api") -def api_fixture(data_subscription, system_v3, websocket): +def api_fixture( + data_subscription: JsonObjectType, system_v3: SystemV3, websocket: Mock +) -> Mock: """Define a simplisafe-python API object.""" return Mock( async_get_systems=AsyncMock(return_value={SYSTEM_ID: system_v3}), @@ -32,7 +36,9 @@ def api_fixture(data_subscription, system_v3, websocket): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config, unique_id): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, str], unique_id: str +) -> MockConfigEntry: """Define a config entry.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=unique_id, data=config, options={CONF_CODE: "1234"} @@ -42,7 +48,7 @@ def config_entry_fixture(hass, config, unique_id): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, str]: """Define config entry data config.""" return { CONF_TOKEN: REFRESH_TOKEN, @@ -51,7 +57,7 @@ def config_fixture(): @pytest.fixture(name="credentials_config") -def credentials_config_fixture(): +def credentials_config_fixture() -> dict[str, str]: """Define a username/password config.""" return { CONF_USERNAME: USERNAME, @@ -60,32 +66,32 @@ def credentials_config_fixture(): @pytest.fixture(name="data_latest_event", scope="package") -def data_latest_event_fixture(): +def data_latest_event_fixture() -> JsonObjectType: """Define latest event data.""" - return json.loads(load_fixture("latest_event_data.json", "simplisafe")) + return load_json_object_fixture("latest_event_data.json", "simplisafe") @pytest.fixture(name="data_sensor", scope="package") -def data_sensor_fixture(): +def data_sensor_fixture() -> JsonObjectType: """Define sensor data.""" - return json.loads(load_fixture("sensor_data.json", "simplisafe")) + return load_json_object_fixture("sensor_data.json", "simplisafe") @pytest.fixture(name="data_settings", scope="package") -def data_settings_fixture(): +def data_settings_fixture() -> JsonObjectType: """Define settings data.""" - return json.loads(load_fixture("settings_data.json", "simplisafe")) + return load_json_object_fixture("settings_data.json", "simplisafe") @pytest.fixture(name="data_subscription", scope="package") -def data_subscription_fixture(): +def data_subscription_fixture() -> JsonObjectType: """Define subscription data.""" - data = json.loads(load_fixture("subscription_data.json", "simplisafe")) + data = load_json_object_fixture("subscription_data.json", "simplisafe") return {SYSTEM_ID: data} @pytest.fixture(name="reauth_config") -def reauth_config_fixture(): +def reauth_config_fixture() -> dict[str, str]: """Define a reauth config.""" return { CONF_PASSWORD: PASSWORD, @@ -93,7 +99,9 @@ def reauth_config_fixture(): @pytest.fixture(name="setup_simplisafe") -async def setup_simplisafe_fixture(hass, api, config): +async def setup_simplisafe_fixture( + hass: HomeAssistant, api: Mock, config: dict[str, str] +) -> AsyncGenerator[None]: """Define a fixture to set up SimpliSafe.""" with ( patch( @@ -122,7 +130,7 @@ async def setup_simplisafe_fixture(hass, api, config): @pytest.fixture(name="sms_config") -def sms_config_fixture(): +def sms_config_fixture() -> dict[str, str]: """Define a SMS-based two-factor authentication config.""" return { CONF_CODE: CODE, @@ -130,7 +138,12 @@ def sms_config_fixture(): @pytest.fixture(name="system_v3") -def system_v3_fixture(data_latest_event, data_sensor, data_settings, data_subscription): +def system_v3_fixture( + data_latest_event: JsonObjectType, + data_sensor: JsonObjectType, + data_settings: JsonObjectType, + data_subscription: JsonObjectType, +) -> SystemV3: """Define a simplisafe-python V3 System object.""" system = SystemV3(Mock(subscription_data=data_subscription), SYSTEM_ID) system.async_get_latest_event = AsyncMock(return_value=data_latest_event) @@ -141,13 +154,13 @@ def system_v3_fixture(data_latest_event, data_sensor, data_settings, data_subscr @pytest.fixture(name="unique_id") -def unique_id_fixture(): +def unique_id_fixture() -> str: """Define a unique ID.""" return USER_ID @pytest.fixture(name="websocket") -def websocket_fixture(): +def websocket_fixture() -> Mock: """Define a simplisafe-python websocket object.""" return Mock( async_connect=AsyncMock(), diff --git a/tests/components/simplisafe/test_diagnostics.py b/tests/components/simplisafe/test_diagnostics.py index 6948f98b159..31bd44c6146 100644 --- a/tests/components/simplisafe/test_diagnostics.py +++ b/tests/components/simplisafe/test_diagnostics.py @@ -3,6 +3,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -28,6 +29,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "subscription_data": { "12345": { diff --git a/tests/components/simulated/test_sensor.py b/tests/components/simulated/test_sensor.py index d32eca8c66e..b167147367a 100644 --- a/tests/components/simulated/test_sensor.py +++ b/tests/components/simulated/test_sensor.py @@ -16,13 +16,17 @@ from homeassistant.components.simulated.sensor import ( DEFAULT_PHASE, DEFAULT_RELATIVE_TO_EPOCH, DEFAULT_SEED, + DOMAIN, ) from homeassistant.const import CONF_FRIENDLY_NAME from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component -async def test_simulated_sensor_default_config(hass: HomeAssistant) -> None: +async def test_simulated_sensor_default_config( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: """Test default config.""" config = {"sensor": {"platform": "simulated"}} assert await async_setup_component(hass, "sensor", config) @@ -40,3 +44,7 @@ async def test_simulated_sensor_default_config(hass: HomeAssistant) -> None: assert state.attributes.get(CONF_FWHM) == DEFAULT_FWHM assert state.attributes.get(CONF_SEED) == DEFAULT_SEED assert state.attributes.get(CONF_RELATIVE_TO_EPOCH) == DEFAULT_RELATIVE_TO_EPOCH + + issue = issue_registry.async_get_issue(DOMAIN, DOMAIN) + assert issue.issue_id == DOMAIN + assert issue.translation_key == "simulated_deprecation" diff --git a/tests/components/siren/test_init.py b/tests/components/siren/test_init.py index 168300d0abe..475b32540b4 100644 --- a/tests/components/siren/test_init.py +++ b/tests/components/siren/test_init.py @@ -27,7 +27,7 @@ class MockSirenEntity(SirenEntity): supported_features=0, available_tones_as_attr=None, available_tones_in_desc=None, - ): + ) -> None: """Initialize mock siren entity.""" self._attr_supported_features = supported_features if available_tones_as_attr is not None: diff --git a/tests/components/sleepiq/conftest.py b/tests/components/sleepiq/conftest.py index fd07cc414e7..a9456bd3cc6 100644 --- a/tests/components/sleepiq/conftest.py +++ b/tests/components/sleepiq/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, create_autospec, patch from asyncsleepiq import ( @@ -17,7 +18,6 @@ from asyncsleepiq import ( SleepIQSleeper, ) import pytest -from typing_extensions import Generator from homeassistant.components.sleepiq import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/slimproto/conftest.py b/tests/components/slimproto/conftest.py index ece30d3e5cf..1bb2d7f2628 100644 --- a/tests/components/slimproto/conftest.py +++ b/tests/components/slimproto/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.slimproto.const import DOMAIN diff --git a/tests/components/sma/conftest.py b/tests/components/sma/conftest.py index a98eda673e4..a54f478a31d 100644 --- a/tests/components/sma/conftest.py +++ b/tests/components/sma/conftest.py @@ -9,6 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.sma.const import DOMAIN +from homeassistant.core import HomeAssistant from . import MOCK_DEVICE, MOCK_USER_INPUT @@ -16,7 +17,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(): +def mock_config_entry() -> MockConfigEntry: """Return the default mocked config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -28,7 +29,9 @@ def mock_config_entry(): @pytest.fixture -async def init_integration(hass, mock_config_entry): +async def init_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: """Create a fake SMA Config Entry.""" mock_config_entry.add_to_hass(hass) diff --git a/tests/components/smart_meter_texas/conftest.py b/tests/components/smart_meter_texas/conftest.py index d06571fe05e..9c0301037a9 100644 --- a/tests/components/smart_meter_texas/conftest.py +++ b/tests/components/smart_meter_texas/conftest.py @@ -19,6 +19,7 @@ from homeassistant.components.homeassistant import ( ) from homeassistant.components.smart_meter_texas.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture @@ -91,7 +92,7 @@ def mock_connection( @pytest.fixture(name="config_entry") -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return a mock config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 17e2c781989..70fd9db0744 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -91,7 +91,7 @@ async def setup_component( await async_setup_component(hass, "smartthings", {}) -def _create_location(): +def _create_location() -> Mock: loc = Mock(Location) loc.name = "Test Location" loc.location_id = str(uuid4()) @@ -99,19 +99,19 @@ def _create_location(): @pytest.fixture(name="location") -def location_fixture(): +def location_fixture() -> Mock: """Fixture for a single location.""" return _create_location() @pytest.fixture(name="locations") -def locations_fixture(location): +def locations_fixture(location: Mock) -> list[Mock]: """Fixture for 2 locations.""" return [location, _create_location()] @pytest.fixture(name="app") -async def app_fixture(hass, config_file): +async def app_fixture(hass: HomeAssistant, config_file: dict[str, str]) -> Mock: """Fixture for a single app.""" app = Mock(AppEntity) app.app_name = APP_NAME_PREFIX + str(uuid4()) @@ -133,7 +133,7 @@ async def app_fixture(hass, config_file): @pytest.fixture(name="app_oauth_client") -def app_oauth_client_fixture(): +def app_oauth_client_fixture() -> Mock: """Fixture for a single app's oauth.""" client = Mock(AppOAuthClient) client.client_id = str(uuid4()) @@ -150,7 +150,7 @@ def app_settings_fixture(app, config_file): return settings -def _create_installed_app(location_id, app_id): +def _create_installed_app(location_id: str, app_id: str) -> Mock: item = Mock(InstalledApp) item.installed_app_id = str(uuid4()) item.installed_app_status = InstalledAppStatus.AUTHORIZED @@ -161,7 +161,7 @@ def _create_installed_app(location_id, app_id): @pytest.fixture(name="installed_app") -def installed_app_fixture(location, app): +def installed_app_fixture(location: Mock, app: Mock) -> Mock: """Fixture for a single installed app.""" return _create_installed_app(location.location_id, app.app_id) @@ -222,7 +222,7 @@ def device_fixture(location): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, installed_app, location): +def config_entry_fixture(installed_app: Mock, location: Mock) -> MockConfigEntry: """Fixture representing a config entry.""" data = { CONF_ACCESS_TOKEN: str(uuid4()), diff --git a/tests/components/smartthings/test_fan.py b/tests/components/smartthings/test_fan.py index 043c022b225..b78c453b402 100644 --- a/tests/components/smartthings/test_fan.py +++ b/tests/components/smartthings/test_fan.py @@ -39,7 +39,12 @@ async def test_entity_state(hass: HomeAssistant, device_factory) -> None: # Dimmer 1 state = hass.states.get("fan.fan_1") assert state.state == "on" - assert state.attributes[ATTR_SUPPORTED_FEATURES] == FanEntityFeature.SET_SPEED + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) assert state.attributes[ATTR_PERCENTAGE] == 66 @@ -100,7 +105,12 @@ async def test_setup_mode_capability(hass: HomeAssistant, device_factory) -> Non # Assert state = hass.states.get("fan.fan_1") assert state is not None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == FanEntityFeature.PRESET_MODE + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) assert state.attributes[ATTR_PRESET_MODE] == "high" assert state.attributes[ATTR_PRESET_MODES] == ["high", "low", "medium"] @@ -122,7 +132,12 @@ async def test_setup_speed_capability(hass: HomeAssistant, device_factory) -> No # Assert state = hass.states.get("fan.fan_1") assert state is not None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == FanEntityFeature.SET_SPEED + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) assert state.attributes[ATTR_PERCENTAGE] == 66 @@ -151,7 +166,10 @@ async def test_setup_both_capabilities(hass: HomeAssistant, device_factory) -> N assert state is not None assert ( state.attributes[ATTR_SUPPORTED_FEATURES] - == FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + == FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) assert state.attributes[ATTR_PERCENTAGE] == 66 assert state.attributes[ATTR_PRESET_MODE] == "high" diff --git a/tests/components/smarttub/conftest.py b/tests/components/smarttub/conftest.py index c05762a903d..06780f8fb1e 100644 --- a/tests/components/smarttub/conftest.py +++ b/tests/components/smarttub/conftest.py @@ -1,5 +1,6 @@ """Common fixtures for smarttub tests.""" +from typing import Any from unittest.mock import create_autospec, patch import pytest @@ -7,19 +8,20 @@ import smarttub from homeassistant.components.smarttub.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @pytest.fixture -def config_data(): +def config_data() -> dict[str, Any]: """Provide configuration data for tests.""" return {CONF_EMAIL: "test-email", CONF_PASSWORD: "test-password"} @pytest.fixture -def config_entry(config_data): +def config_entry(config_data: dict[str, Any]) -> MockConfigEntry: """Create a mock config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -29,7 +31,7 @@ def config_entry(config_data): @pytest.fixture -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Set up the component.""" assert await async_setup_component(hass, DOMAIN, {}) is True @@ -162,7 +164,7 @@ def mock_api(account, spa): @pytest.fixture -async def setup_entry(hass, config_entry): +async def setup_entry(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Initialize the config entry.""" config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/smhi/snapshots/test_weather.ambr b/tests/components/smhi/snapshots/test_weather.ambr index 0d2f6b3b3bf..9ab0375df83 100644 --- a/tests/components/smhi/snapshots/test_weather.ambr +++ b/tests/components/smhi/snapshots/test_weather.ambr @@ -6,7 +6,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T00:00:00', + 'datetime': '2023-08-08T00:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, 'pressure': 992.0, @@ -19,7 +19,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T01:00:00', + 'datetime': '2023-08-08T01:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, 'pressure': 992.0, @@ -32,7 +32,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T02:00:00', + 'datetime': '2023-08-08T02:00:00+00:00', 'humidity': 97, 'precipitation': 0.0, 'pressure': 992.0, @@ -45,7 +45,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'sunny', - 'datetime': '2023-08-08T03:00:00', + 'datetime': '2023-08-08T03:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, 'pressure': 991.0, @@ -80,142 +80,6 @@ 'wind_speed_unit': , }) # --- -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00', - 'humidity': 96, - 'precipitation': 0.0, - 'pressure': 991.0, - 'temperature': 18.0, - 'templow': 15.0, - 'wind_bearing': 114, - 'wind_gust_speed': 32.76, - 'wind_speed': 10.08, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'rainy', - 'datetime': '2023-08-08T12:00:00', - 'humidity': 97, - 'precipitation': 10.6, - 'pressure': 984.0, - 'temperature': 15.0, - 'templow': 11.0, - 'wind_bearing': 183, - 'wind_gust_speed': 27.36, - 'wind_speed': 11.16, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'rainy', - 'datetime': '2023-08-09T12:00:00', - 'humidity': 95, - 'precipitation': 6.3, - 'pressure': 1001.0, - 'temperature': 12.0, - 'templow': 11.0, - 'wind_bearing': 166, - 'wind_gust_speed': 48.24, - 'wind_speed': 18.0, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-10T12:00:00', - 'humidity': 75, - 'precipitation': 4.8, - 'pressure': 1011.0, - 'temperature': 14.0, - 'templow': 10.0, - 'wind_bearing': 174, - 'wind_gust_speed': 29.16, - 'wind_speed': 11.16, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-11T12:00:00', - 'humidity': 69, - 'precipitation': 0.6, - 'pressure': 1015.0, - 'temperature': 18.0, - 'templow': 12.0, - 'wind_bearing': 197, - 'wind_gust_speed': 27.36, - 'wind_speed': 10.08, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-12T12:00:00', - 'humidity': 82, - 'precipitation': 0.0, - 'pressure': 1014.0, - 'temperature': 17.0, - 'templow': 12.0, - 'wind_bearing': 225, - 'wind_gust_speed': 28.08, - 'wind_speed': 8.64, - }), - dict({ - 'cloud_coverage': 75, - 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00', - 'humidity': 59, - 'precipitation': 0.0, - 'pressure': 1013.0, - 'temperature': 20.0, - 'templow': 14.0, - 'wind_bearing': 234, - 'wind_gust_speed': 35.64, - 'wind_speed': 14.76, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'partlycloudy', - 'datetime': '2023-08-14T12:00:00', - 'humidity': 56, - 'precipitation': 0.0, - 'pressure': 1015.0, - 'temperature': 21.0, - 'templow': 14.0, - 'wind_bearing': 216, - 'wind_gust_speed': 33.12, - 'wind_speed': 13.68, - }), - dict({ - 'cloud_coverage': 88, - 'condition': 'partlycloudy', - 'datetime': '2023-08-15T12:00:00', - 'humidity': 64, - 'precipitation': 3.6, - 'pressure': 1014.0, - 'temperature': 20.0, - 'templow': 14.0, - 'wind_bearing': 226, - 'wind_gust_speed': 33.12, - 'wind_speed': 13.68, - }), - dict({ - 'cloud_coverage': 75, - 'condition': 'partlycloudy', - 'datetime': '2023-08-16T12:00:00', - 'humidity': 61, - 'precipitation': 2.4, - 'pressure': 1014.0, - 'temperature': 20.0, - 'templow': 14.0, - 'wind_bearing': 233, - 'wind_gust_speed': 33.48, - 'wind_speed': 14.04, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.smhi_test': dict({ @@ -223,7 +87,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00', + 'datetime': '2023-08-07T12:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, 'pressure': 991.0, @@ -236,7 +100,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'rainy', - 'datetime': '2023-08-08T12:00:00', + 'datetime': '2023-08-08T12:00:00+00:00', 'humidity': 97, 'precipitation': 10.6, 'pressure': 984.0, @@ -249,7 +113,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'rainy', - 'datetime': '2023-08-09T12:00:00', + 'datetime': '2023-08-09T12:00:00+00:00', 'humidity': 95, 'precipitation': 6.3, 'pressure': 1001.0, @@ -262,7 +126,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-10T12:00:00', + 'datetime': '2023-08-10T12:00:00+00:00', 'humidity': 75, 'precipitation': 4.8, 'pressure': 1011.0, @@ -275,7 +139,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-11T12:00:00', + 'datetime': '2023-08-11T12:00:00+00:00', 'humidity': 69, 'precipitation': 0.6, 'pressure': 1015.0, @@ -288,7 +152,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-12T12:00:00', + 'datetime': '2023-08-12T12:00:00+00:00', 'humidity': 82, 'precipitation': 0.0, 'pressure': 1014.0, @@ -301,7 +165,7 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00', + 'datetime': '2023-08-13T12:00:00+00:00', 'humidity': 59, 'precipitation': 0.0, 'pressure': 1013.0, @@ -314,7 +178,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'partlycloudy', - 'datetime': '2023-08-14T12:00:00', + 'datetime': '2023-08-14T12:00:00+00:00', 'humidity': 56, 'precipitation': 0.0, 'pressure': 1015.0, @@ -327,7 +191,7 @@ dict({ 'cloud_coverage': 88, 'condition': 'partlycloudy', - 'datetime': '2023-08-15T12:00:00', + 'datetime': '2023-08-15T12:00:00+00:00', 'humidity': 64, 'precipitation': 3.6, 'pressure': 1014.0, @@ -340,7 +204,7 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-16T12:00:00', + 'datetime': '2023-08-16T12:00:00+00:00', 'humidity': 61, 'precipitation': 2.4, 'pressure': 1014.0, @@ -358,7 +222,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00', + 'datetime': '2023-08-07T12:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, 'pressure': 991.0, @@ -373,7 +237,7 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00', + 'datetime': '2023-08-13T12:00:00+00:00', 'humidity': 59, 'precipitation': 0.0, 'pressure': 1013.0, @@ -388,7 +252,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'fog', - 'datetime': '2023-08-07T09:00:00', + 'datetime': '2023-08-07T09:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, 'pressure': 992.0, @@ -403,7 +267,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T15:00:00', + 'datetime': '2023-08-07T15:00:00+00:00', 'humidity': 89, 'precipitation': 0.0, 'pressure': 991.0, diff --git a/tests/components/snapcast/conftest.py b/tests/components/snapcast/conftest.py index e5806ac5f40..bcc0ac5bc30 100644 --- a/tests/components/snapcast/conftest.py +++ b/tests/components/snapcast/conftest.py @@ -1,9 +1,9 @@ """Test the snapcast config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/snips/test_init.py b/tests/components/snips/test_init.py index 89ee211b38f..82dbf1cd281 100644 --- a/tests/components/snips/test_init.py +++ b/tests/components/snips/test_init.py @@ -6,10 +6,10 @@ import logging import pytest import voluptuous as vol -from homeassistant.bootstrap import async_setup_component from homeassistant.components import snips from homeassistant.core import HomeAssistant from homeassistant.helpers.intent import ServiceIntentHandler, async_register +from homeassistant.setup import async_setup_component from tests.common import async_fire_mqtt_message, async_mock_intent, async_mock_service from tests.typing import MqttMockHAClient diff --git a/tests/components/snmp/test_integer_sensor.py b/tests/components/snmp/test_integer_sensor.py index dab2b080c97..8e7e0f166ef 100644 --- a/tests/components/snmp/test_integer_sensor.py +++ b/tests/components/snmp/test_integer_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from pysnmp.hlapi import Integer32 +from pysnmp.proto.rfc1902 import Integer32 import pytest from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN diff --git a/tests/components/snmp/test_negative_sensor.py b/tests/components/snmp/test_negative_sensor.py index dba09ea75bd..66a111b68d0 100644 --- a/tests/components/snmp/test_negative_sensor.py +++ b/tests/components/snmp/test_negative_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from pysnmp.hlapi import Integer32 +from pysnmp.proto.rfc1902 import Integer32 import pytest from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN diff --git a/tests/components/snmp/test_switch.py b/tests/components/snmp/test_switch.py new file mode 100644 index 00000000000..fe1c3922ff0 --- /dev/null +++ b/tests/components/snmp/test_switch.py @@ -0,0 +1,67 @@ +"""SNMP switch tests.""" + +from unittest.mock import patch + +from pysnmp.proto.rfc1902 import Integer32 +import pytest + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +config = { + SWITCH_DOMAIN: { + "platform": "snmp", + "host": "192.168.1.32", + # ippower-mib::ippoweroutlet1.0 + "baseoid": "1.3.6.1.4.1.38107.1.3.1.0", + "payload_on": 1, + "payload_off": 0, + }, +} + + +async def test_snmp_integer_switch_off(hass: HomeAssistant) -> None: + """Test snmp switch returning int 0 for off.""" + + mock_data = Integer32(0) + with patch( + "homeassistant.components.snmp.switch.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + assert await async_setup_component(hass, SWITCH_DOMAIN, config) + await hass.async_block_till_done() + state = hass.states.get("switch.snmp") + assert state.state == STATE_OFF + + +async def test_snmp_integer_switch_on(hass: HomeAssistant) -> None: + """Test snmp switch returning int 1 for on.""" + + mock_data = Integer32(1) + with patch( + "homeassistant.components.snmp.switch.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + assert await async_setup_component(hass, SWITCH_DOMAIN, config) + await hass.async_block_till_done() + state = hass.states.get("switch.snmp") + assert state.state == STATE_ON + + +async def test_snmp_integer_switch_unknown( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test snmp switch returning int 3 (not a configured payload) for unknown.""" + + mock_data = Integer32(3) + with patch( + "homeassistant.components.snmp.switch.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + assert await async_setup_component(hass, SWITCH_DOMAIN, config) + await hass.async_block_till_done() + state = hass.states.get("switch.snmp") + assert state.state == STATE_UNKNOWN + assert "Invalid payload '3' received for entity" in caplog.text diff --git a/tests/components/solarlog/conftest.py b/tests/components/solarlog/conftest.py index 08340487d99..c34d0c011a3 100644 --- a/tests/components/solarlog/conftest.py +++ b/tests/components/solarlog/conftest.py @@ -7,16 +7,10 @@ import pytest from homeassistant.components.solarlog.const import DOMAIN as SOLARLOG_DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME -from homeassistant.core import HomeAssistant from .const import HOST, NAME -from tests.common import ( - MockConfigEntry, - load_json_object_fixture, - mock_device_registry, - mock_registry, -) +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -60,7 +54,7 @@ def mock_solarlog_connector(): @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.solarlog.async_setup_entry", return_value=True @@ -76,15 +70,3 @@ def mock_test_connection(): return_value=True, ): yield - - -@pytest.fixture(name="device_reg") -def device_reg_fixture(hass: HomeAssistant): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture(name="entity_reg") -def entity_reg_fixture(hass: HomeAssistant): - """Return an empty, loaded, registry.""" - return mock_registry(hass) diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index 5fb369bc3b6..df154a5eb9b 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -745,1097 +745,6 @@ 'state': '545', }) # --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_alternator_loss-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_alternator_loss', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Alternator loss', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'alternator_loss', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_alternator_loss', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_alternator_loss-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Alternator loss', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_alternator_loss', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_capacity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_capacity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Capacity', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'capacity', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_capacity', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_capacity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'solarlog_test_1_2_3 Capacity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_capacity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '85.0', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_ac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_ac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption AC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_ac', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_ac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Consumption AC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_ac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '54.87', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption day', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_day', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption day', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.005', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption month', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption month', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.758', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption total', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '354.687', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption year', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_year', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption year', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.587', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_yesterday-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_yesterday', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption yesterday', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_yesterday', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_yesterday-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption yesterday', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_yesterday', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.007', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_efficiency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_efficiency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Efficiency', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'efficiency', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_efficiency', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_efficiency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'solarlog_test_1_2_3 Efficiency', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_efficiency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '98.0', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_installed_peak_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_installed_peak_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Installed peak power', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_power', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_total_power', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_installed_peak_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Installed peak power', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_installed_peak_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '120', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_last_update-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_last_update', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last update', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_update', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_last_updated', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_last_update-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'solarlog_test_1_2_3 Last update', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_last_update', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_ac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_power_ac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power AC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_ac', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_ac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Power AC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_power_ac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_available-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_power_available', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power available', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_available', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_available', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_available-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Power available', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_power_available', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '45.13', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_dc-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_power_dc', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power DC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_dc', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_dc-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Power DC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_power_dc', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '102', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Usage', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'usage', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_usage', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'solarlog_test_1_2_3 Usage', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '54.9', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_ac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_ac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage AC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voltage_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_ac', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_ac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'solarlog_test_1_2_3 Voltage AC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_ac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_dc-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_dc', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage DC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voltage_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_dc', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_dc-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'solarlog_test_1_2_3 Voltage DC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_dc', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield day', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_day', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield day', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.004', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield month', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield month', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.515', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield total', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '56.513', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield year', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_year', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield year', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.023', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_yesterday-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_yesterday', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield yesterday', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_yesterday', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_yesterday-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield yesterday', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_yesterday', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.005', - }) -# --- # name: test_all_entities[sensor.solarlog_usage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/solarlog/test_init.py b/tests/components/solarlog/test_init.py index f9f00ef601b..0044d09f20e 100644 --- a/tests/components/solarlog/test_init.py +++ b/tests/components/solarlog/test_init.py @@ -50,7 +50,9 @@ async def test_raise_config_entry_not_ready_when_offline( async def test_migrate_config_entry( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: DeviceRegistry, + entity_registry: EntityRegistry, ) -> None: """Test successful migration of entry data.""" entry = MockConfigEntry( @@ -64,13 +66,13 @@ async def test_migrate_config_entry( ) entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=entry.entry_id, identifiers={(DOMAIN, entry.entry_id)}, manufacturer="Solar-Log", name="solarlog", ) - sensor_entity = entity_reg.async_get_or_create( + sensor_entity = entity_registry.async_get_or_create( config_entry=entry, platform=DOMAIN, domain=Platform.SENSOR, @@ -85,7 +87,7 @@ async def test_migrate_config_entry( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - entity_migrated = entity_reg.async_get(sensor_entity.entity_id) + entity_migrated = entity_registry.async_get(sensor_entity.entity_id) assert entity_migrated assert entity_migrated.unique_id == f"{entry.entry_id}_last_updated" diff --git a/tests/components/sonarr/conftest.py b/tests/components/sonarr/conftest.py index 739880a99aa..de7a3f781d7 100644 --- a/tests/components/sonarr/conftest.py +++ b/tests/components/sonarr/conftest.py @@ -1,5 +1,6 @@ """Fixtures for Sonarr integration tests.""" +from collections.abc import Generator import json from unittest.mock import MagicMock, patch @@ -13,7 +14,6 @@ from aiopyarr import ( SystemStatus, ) import pytest -from typing_extensions import Generator from homeassistant.components.sonarr.const import ( CONF_BASE_PATH, diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 51dd2b9047c..840fcb4dcdb 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -1,9 +1,10 @@ """Configuration for Sonos tests.""" import asyncio -from collections.abc import Callable +from collections.abc import Callable, Coroutine, Generator from copy import copy from ipaddress import ip_address +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -17,6 +18,7 @@ from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.components.sonos import DOMAIN from homeassistant.const import CONF_HOSTS from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture, load_json_value_fixture @@ -78,7 +80,7 @@ class SonosMockService: class SonosMockEvent: """Mock a sonos Event used in callbacks.""" - def __init__(self, soco, service, variables): + def __init__(self, soco, service, variables) -> None: """Initialize the instance.""" self.sid = f"{soco.uid}_sub0000000001" self.seq = "0" @@ -119,7 +121,9 @@ async def async_autosetup_sonos(async_setup_sonos): @pytest.fixture -def async_setup_sonos(hass, config_entry, fire_zgs_event): +def async_setup_sonos( + hass: HomeAssistant, config_entry: MockConfigEntry, fire_zgs_event +) -> Callable[[], Coroutine[Any, Any, None]]: """Return a coroutine to set up a Sonos integration instance on demand.""" async def _wrapper(): @@ -135,7 +139,7 @@ def async_setup_sonos(hass, config_entry, fire_zgs_event): @pytest.fixture(name="config_entry") -def config_entry_fixture(): +def config_entry_fixture() -> MockConfigEntry: """Create a mock Sonos config entry.""" return MockConfigEntry(domain=DOMAIN, title="Sonos") @@ -271,7 +275,7 @@ def soco_fixture(soco_factory): @pytest.fixture(autouse=True) -async def silent_ssdp_scanner(hass): +def silent_ssdp_scanner() -> Generator[None]: """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), @@ -453,6 +457,7 @@ def mock_get_music_library_information( "object.container.album.musicAlbum", ) ] + return [] @pytest.fixture(name="music_library_browse_categories") @@ -648,7 +653,9 @@ def zgs_discovery_fixture(): @pytest.fixture(name="fire_zgs_event") -def zgs_event_fixture(hass: HomeAssistant, soco: SoCo, zgs_discovery: str): +def zgs_event_fixture( + hass: HomeAssistant, soco: SoCo, zgs_discovery: str +) -> Callable[[], Coroutine[Any, Any, None]]: """Create alarm_event fixture.""" variables = {"ZoneGroupState": zgs_discovery} @@ -660,3 +667,26 @@ def zgs_event_fixture(hass: HomeAssistant, soco: SoCo, zgs_discovery: str): await hass.async_block_till_done(wait_background_tasks=True) return _wrapper + + +@pytest.fixture(name="sonos_setup_two_speakers") +async def sonos_setup_two_speakers( + hass: HomeAssistant, soco_factory: SoCoMockFactory +) -> list[MockSoCo]: + """Set up home assistant with two Sonos Speakers.""" + soco_lr = soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Living Room") + soco_br = soco_factory.cache_mock(MockSoCo(), "10.10.10.2", "Bedroom") + await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + "media_player": { + "interface_addr": "127.0.0.1", + "hosts": ["10.10.10.1", "10.10.10.2"], + } + } + }, + ) + await hass.async_block_till_done() + return [soco_lr, soco_br] diff --git a/tests/components/sonos/fixtures/av_transport.json b/tests/components/sonos/fixtures/av_transport.json new file mode 100644 index 00000000000..743ac61e3ff --- /dev/null +++ b/tests/components/sonos/fixtures/av_transport.json @@ -0,0 +1,38 @@ +{ + "transport_state": "PLAYING", + "current_play_mode": "NORMAL", + "current_crossfade_mode": "0", + "number_of_tracks": "1", + "current_track": "1", + "current_section": "0", + "current_track_uri": "x-rincon:RINCON_test_10.10.10.2", + "current_track_duration": "", + "current_track_meta_data": "", + "next_track_uri": "", + "next_track_meta_data": "", + "enqueued_transport_uri": "", + "enqueued_transport_uri_meta_data": "", + "playback_storage_medium": "NETWORK", + "av_transport_uri": "x-rincon:RINCON_test_10.10.10.2", + "av_transport_uri_meta_data": "", + "next_av_transport_uri": "", + "next_av_transport_uri_meta_data": "", + "current_transport_actions": "Stop, Play", + "current_valid_play_modes": "CROSSFADE", + "direct_control_client_id": "", + "direct_control_is_suspended": "0", + "direct_control_account_id": "", + "transport_status": "OK", + "sleep_timer_generation": "0", + "alarm_running": "0", + "snooze_running": "0", + "restart_pending": "0", + "transport_play_speed": "NOT_IMPLEMENTED", + "current_media_duration": "NOT_IMPLEMENTED", + "record_storage_medium": "NOT_IMPLEMENTED", + "possible_playback_storage_media": "NONE, NETWORK", + "possible_record_storage_media": "NOT_IMPLEMENTED", + "record_medium_write_status": "NOT_IMPLEMENTED", + "current_record_quality_mode": "NOT_IMPLEMENTED", + "possible_record_quality_modes": "NOT_IMPLEMENTED" +} diff --git a/tests/components/sonos/fixtures/zgs_group.xml b/tests/components/sonos/fixtures/zgs_group.xml new file mode 100644 index 00000000000..58f40be0049 --- /dev/null +++ b/tests/components/sonos/fixtures/zgs_group.xml @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/tests/components/sonos/fixtures/zgs_two_single.xml b/tests/components/sonos/fixtures/zgs_two_single.xml new file mode 100644 index 00000000000..18c3c9231c6 --- /dev/null +++ b/tests/components/sonos/fixtures/zgs_two_single.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index ab9b598bb04..ddf84efd7da 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -2,19 +2,44 @@ import logging from typing import Any +from unittest.mock import patch import pytest from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, DOMAIN as MP_DOMAIN, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, MediaPlayerEnqueue, + RepeatMode, +) +from homeassistant.components.sonos.const import ( + DOMAIN as SONOS_DOMAIN, + SOURCE_LINEIN, + SOURCE_TV, +) +from homeassistant.components.sonos.media_player import ( + LONG_SERVICE_TIMEOUT, + SERVICE_RESTORE, + SERVICE_SNAPSHOT, + VOLUME_INCREMENT, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + STATE_IDLE, ) -from homeassistant.components.sonos.const import SOURCE_LINEIN, SOURCE_TV -from homeassistant.components.sonos.media_player import LONG_SERVICE_TIMEOUT -from homeassistant.const import STATE_IDLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.device_registry import ( @@ -22,8 +47,9 @@ from homeassistant.helpers.device_registry import ( CONNECTION_UPNP, DeviceRegistry, ) +from homeassistant.setup import async_setup_component -from .conftest import MockMusicServiceItem, SoCoMockFactory +from .conftest import MockMusicServiceItem, MockSoCo, SoCoMockFactory, SonosMockEvent async def test_device_registry( @@ -157,9 +183,9 @@ async def test_play_media_library( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": media_content_type, - "media_content_id": media_content_id, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: media_content_type, + ATTR_MEDIA_CONTENT_ID: media_content_id, ATTR_MEDIA_ENQUEUE: enqueue, }, blocking=True, @@ -206,9 +232,9 @@ async def test_play_media_lib_track_play( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, }, blocking=True, @@ -235,9 +261,9 @@ async def test_play_media_lib_track_next( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, }, blocking=True, @@ -263,9 +289,9 @@ async def test_play_media_lib_track_replace( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.REPLACE, }, blocking=True, @@ -286,9 +312,9 @@ async def test_play_media_lib_track_add( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, }, blocking=True, @@ -316,9 +342,9 @@ async def test_play_media_share_link_add( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, }, blocking=True, @@ -344,9 +370,9 @@ async def test_play_media_share_link_next( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, }, blocking=True, @@ -376,9 +402,9 @@ async def test_play_media_share_link_play( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, }, blocking=True, @@ -410,9 +436,9 @@ async def test_play_media_share_link_replace( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.REPLACE, }, blocking=True, @@ -475,9 +501,9 @@ async def test_play_media_music_library_playlist( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": media_content_id, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: media_content_id, }, blocking=True, ) @@ -505,9 +531,9 @@ async def test_play_media_music_library_playlist_dne( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": media_content_id, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: media_content_id, }, blocking=True, ) @@ -546,8 +572,8 @@ async def test_select_source_line_in_tv( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": source, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: source, }, blocking=True, ) @@ -589,8 +615,8 @@ async def test_select_source_play_uri( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": source, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: source, }, blocking=True, ) @@ -629,8 +655,8 @@ async def test_select_source_play_queue( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": source, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: source, }, blocking=True, ) @@ -658,8 +684,8 @@ async def test_select_source_error( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": "invalid_source", + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: "invalid_source", }, blocking=True, ) @@ -667,6 +693,147 @@ async def test_select_source_error( assert "Could not find a Sonos favorite" in str(sve.value) +async def test_shuffle_set( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, +) -> None: + """Test the set shuffle method.""" + assert soco.play_mode == "NORMAL" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_SHUFFLE: True, + }, + blocking=True, + ) + assert soco.play_mode == "SHUFFLE_NOREPEAT" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_SHUFFLE: False, + }, + blocking=True, + ) + assert soco.play_mode == "NORMAL" + + +async def test_shuffle_get( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + no_media_event: SonosMockEvent, +) -> None: + """Test the get shuffle attribute by simulating a Sonos Event.""" + subscription = soco.avTransport.subscribe.return_value + sub_callback = subscription.callback + + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_SHUFFLE] is False + + no_media_event.variables["current_play_mode"] = "SHUFFLE_NOREPEAT" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_SHUFFLE] is True + + # The integration keeps a copy of the last event to check for + # changes, so we create a new event. + no_media_event = SonosMockEvent( + soco, soco.avTransport, no_media_event.variables.copy() + ) + no_media_event.variables["current_play_mode"] = "NORMAL" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_SHUFFLE] is False + + +async def test_repeat_set( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, +) -> None: + """Test the set repeat method.""" + assert soco.play_mode == "NORMAL" + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_REPEAT: RepeatMode.ALL, + }, + blocking=True, + ) + assert soco.play_mode == "REPEAT_ALL" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_REPEAT: RepeatMode.ONE, + }, + blocking=True, + ) + assert soco.play_mode == "REPEAT_ONE" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_REPEAT: RepeatMode.OFF, + }, + blocking=True, + ) + assert soco.play_mode == "NORMAL" + + +async def test_repeat_get( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + no_media_event: SonosMockEvent, +) -> None: + """Test the get repeat attribute by simulating a Sonos Event.""" + subscription = soco.avTransport.subscribe.return_value + sub_callback = subscription.callback + + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.OFF + + no_media_event.variables["current_play_mode"] = "REPEAT_ALL" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ALL + + no_media_event = SonosMockEvent( + soco, soco.avTransport, no_media_event.variables.copy() + ) + no_media_event.variables["current_play_mode"] = "REPEAT_ONE" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ONE + + no_media_event = SonosMockEvent( + soco, soco.avTransport, no_media_event.variables.copy() + ) + no_media_event.variables["current_play_mode"] = "NORMAL" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.OFF + + async def test_play_media_favorite_item_id( hass: HomeAssistant, soco_factory: SoCoMockFactory, @@ -678,9 +845,9 @@ async def test_play_media_favorite_item_id( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "favorite_item_id", - "media_content_id": "FV:2/4", + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "favorite_item_id", + ATTR_MEDIA_CONTENT_ID: "FV:2/4", }, blocking=True, ) @@ -700,10 +867,99 @@ async def test_play_media_favorite_item_id( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "favorite_item_id", - "media_content_id": "UNKNOWN_ID", + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "favorite_item_id", + ATTR_MEDIA_CONTENT_ID: "UNKNOWN_ID", }, blocking=True, ) assert "UNKNOWN_ID" in str(sve.value) + + +async def _setup_hass(hass: HomeAssistant): + await async_setup_component( + hass, + SONOS_DOMAIN, + { + "sonos": { + "media_player": { + "interface_addr": "127.0.0.1", + "hosts": ["10.10.10.1", "10.10.10.2"], + } + } + }, + ) + await hass.async_block_till_done() + + +async def test_service_snapshot_restore( + hass: HomeAssistant, + soco_factory: SoCoMockFactory, +) -> None: + """Test the snapshot and restore services.""" + soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Living Room") + soco_factory.cache_mock(MockSoCo(), "10.10.10.2", "Bedroom") + await _setup_hass(hass) + with patch( + "homeassistant.components.sonos.speaker.Snapshot.snapshot" + ) as mock_snapshot: + await hass.services.async_call( + SONOS_DOMAIN, + SERVICE_SNAPSHOT, + { + ATTR_ENTITY_ID: ["media_player.living_room", "media_player.bedroom"], + }, + blocking=True, + ) + assert mock_snapshot.call_count == 2 + + with patch( + "homeassistant.components.sonos.speaker.Snapshot.restore" + ) as mock_restore: + await hass.services.async_call( + SONOS_DOMAIN, + SERVICE_RESTORE, + { + ATTR_ENTITY_ID: ["media_player.living_room", "media_player.bedroom"], + }, + blocking=True, + ) + assert mock_restore.call_count == 2 + + +async def test_volume( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, +) -> None: + """Test the media player volume services.""" + initial_volume = soco.volume + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_UP, + { + ATTR_ENTITY_ID: "media_player.zone_a", + }, + blocking=True, + ) + assert soco.volume == initial_volume + VOLUME_INCREMENT + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_DOWN, + { + ATTR_ENTITY_ID: "media_player.zone_a", + }, + blocking=True, + ) + assert soco.volume == initial_volume + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: "media_player.zone_a", ATTR_MEDIA_VOLUME_LEVEL: 0.30}, + blocking=True, + ) + # SoCo uses 0..100 for its range. + assert soco.volume == 30 diff --git a/tests/components/sonos/test_speaker.py b/tests/components/sonos/test_speaker.py index 2c4357060be..40d126c64f2 100644 --- a/tests/components/sonos/test_speaker.py +++ b/tests/components/sonos/test_speaker.py @@ -4,11 +4,18 @@ from unittest.mock import patch import pytest +from homeassistant.components.media_player import ( + DOMAIN as MP_DOMAIN, + SERVICE_MEDIA_PLAY, +) +from homeassistant.components.sonos import DOMAIN from homeassistant.components.sonos.const import DATA_SONOS, SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import MockSoCo, SonosMockEvent + +from tests.common import async_fire_time_changed, load_fixture, load_json_value_fixture async def test_fallback_to_polling( @@ -67,3 +74,140 @@ async def test_subscription_creation_fails( await hass.async_block_till_done() assert speaker._subscriptions + + +def _create_zgs_sonos_event( + fixture_file: str, soco_1: MockSoCo, soco_2: MockSoCo, create_uui_ds: bool = True +) -> SonosMockEvent: + """Create a Sonos Event for zone group state, with the option of creating the uui_ds_in_group.""" + zgs = load_fixture(fixture_file, DOMAIN) + variables = {} + variables["ZoneGroupState"] = zgs + # Sonos does not always send this variable with zgs events + if create_uui_ds: + variables["zone_player_uui_ds_in_group"] = f"{soco_1.uid},{soco_2.uid}" + event = SonosMockEvent(soco_1, soco_1.zoneGroupTopology, variables) + if create_uui_ds: + event.zone_player_uui_ds_in_group = f"{soco_1.uid},{soco_2.uid}" + return event + + +def _create_avtransport_sonos_event( + fixture_file: str, soco: MockSoCo +) -> SonosMockEvent: + """Create a Sonos Event for an AVTransport update.""" + variables = load_json_value_fixture(fixture_file, DOMAIN) + return SonosMockEvent(soco, soco.avTransport, variables) + + +async def _media_play(hass: HomeAssistant, entity: str) -> None: + """Call media play service.""" + await hass.services.async_call( + MP_DOMAIN, + SERVICE_MEDIA_PLAY, + { + "entity_id": entity, + }, + blocking=True, + ) + + +async def test_zgs_event_group_speakers( + hass: HomeAssistant, sonos_setup_two_speakers: list[MockSoCo] +) -> None: + """Tests grouping and ungrouping two speakers.""" + # When Sonos speakers are grouped; one of the speakers is the coordinator and is in charge + # of playback across both speakers. Hence, service calls to play or pause on media_players + # that are part of the group are routed to the coordinator. + soco_lr = sonos_setup_two_speakers[0] + soco_br = sonos_setup_two_speakers[1] + + # Test 1 - Initial state - speakers are not grouped + state = hass.states.get("media_player.living_room") + assert state.attributes["group_members"] == ["media_player.living_room"] + state = hass.states.get("media_player.bedroom") + assert state.attributes["group_members"] == ["media_player.bedroom"] + # Each speaker is its own coordinator and calls should route to their SoCos + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 1 + await _media_play(hass, "media_player.bedroom") + assert soco_br.play.call_count == 1 + + soco_lr.play.reset_mock() + soco_br.play.reset_mock() + + # Test 2 - Group the speakers, living room is the coordinator + event = _create_zgs_sonos_event( + "zgs_group.xml", soco_lr, soco_br, create_uui_ds=True + ) + soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) + soco_br.zoneGroupTopology.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.living_room") + assert state.attributes["group_members"] == [ + "media_player.living_room", + "media_player.bedroom", + ] + state = hass.states.get("media_player.bedroom") + assert state.attributes["group_members"] == [ + "media_player.living_room", + "media_player.bedroom", + ] + # Play calls should route to the living room SoCo + await _media_play(hass, "media_player.living_room") + await _media_play(hass, "media_player.bedroom") + assert soco_lr.play.call_count == 2 + assert soco_br.play.call_count == 0 + + soco_lr.play.reset_mock() + soco_br.play.reset_mock() + + # Test 3 - Ungroup the speakers + event = _create_zgs_sonos_event( + "zgs_two_single.xml", soco_lr, soco_br, create_uui_ds=False + ) + soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) + soco_br.zoneGroupTopology.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.living_room") + assert state.attributes["group_members"] == ["media_player.living_room"] + state = hass.states.get("media_player.bedroom") + assert state.attributes["group_members"] == ["media_player.bedroom"] + # Calls should route to each speakers Soco + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 1 + await _media_play(hass, "media_player.bedroom") + assert soco_br.play.call_count == 1 + + +async def test_zgs_avtransport_group_speakers( + hass: HomeAssistant, sonos_setup_two_speakers: list[MockSoCo] +) -> None: + """Test processing avtransport and zgs events to change group membership.""" + soco_lr = sonos_setup_two_speakers[0] + soco_br = sonos_setup_two_speakers[1] + + # Test 1 - Send a transport event changing the coordinator + # for the living room speaker to the bedroom speaker. + event = _create_avtransport_sonos_event("av_transport.json", soco_lr) + soco_lr.avTransport.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + # Call should route to the new coodinator which is the bedroom + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 0 + assert soco_br.play.call_count == 1 + + soco_lr.play.reset_mock() + soco_br.play.reset_mock() + + # Test 2- Send a zgs event to return living room to its own coordinator + event = _create_zgs_sonos_event( + "zgs_two_single.xml", soco_lr, soco_br, create_uui_ds=False + ) + soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) + soco_br.zoneGroupTopology.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + # Call should route to the living room + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 1 + assert soco_br.play.call_count == 0 diff --git a/tests/components/spc/conftest.py b/tests/components/spc/conftest.py new file mode 100644 index 00000000000..1ccda31e314 --- /dev/null +++ b/tests/components/spc/conftest.py @@ -0,0 +1,26 @@ +"""Tests for Vanderbilt SPC component.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pyspcwebgw +import pytest + + +@pytest.fixture +def mock_client() -> Generator[AsyncMock]: + """Mock the SPC client.""" + + with patch( + "homeassistant.components.spc.SpcWebGateway", autospec=True + ) as mock_client: + client = mock_client.return_value + client.async_load_parameters.return_value = True + mock_area = AsyncMock(spec=pyspcwebgw.area.Area) + mock_area.id = "1" + mock_area.mode = pyspcwebgw.const.AreaMode.FULL_SET + mock_area.last_changed_by = "Sven" + mock_area.name = "House" + mock_area.verified_alarm = False + client.areas = {"1": mock_area} + yield mock_client diff --git a/tests/components/spc/test_alarm_control_panel.py b/tests/components/spc/test_alarm_control_panel.py new file mode 100644 index 00000000000..7b1ab4ff947 --- /dev/null +++ b/tests/components/spc/test_alarm_control_panel.py @@ -0,0 +1,34 @@ +"""Tests for Vanderbilt SPC component.""" + +from unittest.mock import AsyncMock + +from pyspcwebgw.const import AreaMode + +from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + + +async def test_update_alarm_device(hass: HomeAssistant, mock_client: AsyncMock) -> None: + """Test that alarm panel state changes on incoming websocket data.""" + + config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} + assert await async_setup_component(hass, "spc", config) is True + + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.house" + + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" + + mock_area = mock_client.return_value.areas["1"] + + mock_area.mode = AreaMode.UNSET + mock_area.last_changed_by = "Anna" + + await mock_client.call_args_list[0][1]["async_callback"](mock_area) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" diff --git a/tests/components/spc/test_init.py b/tests/components/spc/test_init.py index 3dfea94a4bd..dc407dc2c5b 100644 --- a/tests/components/spc/test_init.py +++ b/tests/components/spc/test_init.py @@ -1,73 +1,22 @@ """Tests for Vanderbilt SPC component.""" -from unittest.mock import Mock, PropertyMock, patch +from unittest.mock import AsyncMock -import pyspcwebgw -from pyspcwebgw.const import AreaMode - -from homeassistant.bootstrap import async_setup_component -from homeassistant.components.spc import DATA_API -from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component -async def test_valid_device_config(hass: HomeAssistant, monkeypatch) -> None: +async def test_valid_device_config(hass: HomeAssistant, mock_client: AsyncMock) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} - with patch( - "homeassistant.components.spc.SpcWebGateway.async_load_parameters", - return_value=True, - ): - assert await async_setup_component(hass, "spc", config) is True + assert await async_setup_component(hass, "spc", config) is True -async def test_invalid_device_config(hass: HomeAssistant, monkeypatch) -> None: +async def test_invalid_device_config( + hass: HomeAssistant, mock_client: AsyncMock +) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/"}} - with patch( - "homeassistant.components.spc.SpcWebGateway.async_load_parameters", - return_value=True, - ): - assert await async_setup_component(hass, "spc", config) is False - - -async def test_update_alarm_device(hass: HomeAssistant) -> None: - """Test that alarm panel state changes on incoming websocket data.""" - - config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} - - area_mock = Mock( - spec=pyspcwebgw.area.Area, - id="1", - mode=AreaMode.FULL_SET, - last_changed_by="Sven", - ) - area_mock.name = "House" - area_mock.verified_alarm = False - - with patch( - "homeassistant.components.spc.SpcWebGateway.areas", new_callable=PropertyMock - ) as mock_areas: - mock_areas.return_value = {"1": area_mock} - with patch( - "homeassistant.components.spc.SpcWebGateway.async_load_parameters", - return_value=True, - ): - assert await async_setup_component(hass, "spc", config) is True - - await hass.async_block_till_done() - - entity_id = "alarm_control_panel.house" - - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY - assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" - - area_mock.mode = AreaMode.UNSET - area_mock.last_changed_by = "Anna" - await hass.data[DATA_API]._async_callback(area_mock) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED - assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" + assert await async_setup_component(hass, "spc", config) is False diff --git a/tests/components/srp_energy/conftest.py b/tests/components/srp_energy/conftest.py index 45eb726443f..b612bc9f3f3 100644 --- a/tests/components/srp_energy/conftest.py +++ b/tests/components/srp_energy/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator import datetime as dt from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator from homeassistant.components.srp_energy.const import DOMAIN, PHOENIX_TIME_ZONE from homeassistant.const import CONF_ID diff --git a/tests/components/srp_energy/test_config_flow.py b/tests/components/srp_energy/test_config_flow.py index 19e21f0e1a0..e3abb3c98df 100644 --- a/tests/components/srp_energy/test_config_flow.py +++ b/tests/components/srp_energy/test_config_flow.py @@ -2,6 +2,8 @@ from unittest.mock import MagicMock, patch +import pytest + from homeassistant.components.srp_energy.const import CONF_IS_TOU, DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_ID, CONF_PASSWORD, CONF_SOURCE, CONF_USERNAME @@ -23,8 +25,9 @@ from . import ( from tests.common import MockConfigEntry +@pytest.mark.usefixtures("mock_srp_energy_config_flow") async def test_show_form( - hass: HomeAssistant, mock_srp_energy_config_flow: MagicMock, capsys + hass: HomeAssistant, capsys: pytest.CaptureFixture[str] ) -> None: """Test show configuration form.""" result = await hass.config_entries.flow.async_init( @@ -140,7 +143,7 @@ async def test_flow_entry_already_configured( async def test_flow_multiple_configs( - hass: HomeAssistant, init_integration: MockConfigEntry, capsys + hass: HomeAssistant, init_integration: MockConfigEntry ) -> None: """Test multiple config entries.""" # Verify mock config setup from fixture diff --git a/tests/components/srp_energy/test_sensor.py b/tests/components/srp_energy/test_sensor.py index 7369d07f77a..025d9fe49ca 100644 --- a/tests/components/srp_energy/test_sensor.py +++ b/tests/components/srp_energy/test_sensor.py @@ -1,6 +1,5 @@ """Tests for the srp_energy sensor platform.""" -import time from unittest.mock import patch from requests.models import HTTPError @@ -80,7 +79,7 @@ async def test_srp_entity_timeout( ): client = srp_energy_mock.return_value client.validate.return_value = True - client.usage = lambda _, __, ___: time.sleep(1) + client.usage = lambda _, __, ___: None mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/ssdp/conftest.py b/tests/components/ssdp/conftest.py index 8b06163cd95..ac0ac7298a8 100644 --- a/tests/components/ssdp/conftest.py +++ b/tests/components/ssdp/conftest.py @@ -1,11 +1,14 @@ """Configuration for SSDP tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from async_upnp_client.server import UpnpServer from async_upnp_client.ssdp_listener import SsdpListener import pytest +from homeassistant.core import HomeAssistant + @pytest.fixture(autouse=True) async def silent_ssdp_listener(): @@ -32,7 +35,7 @@ async def disabled_upnp_server(): @pytest.fixture -def mock_flow_init(hass): +def mock_flow_init(hass: HomeAssistant) -> Generator[AsyncMock]: """Mock hass.config_entries.flow.async_init.""" with patch.object( hass.config_entries.flow, "async_init", return_value=AsyncMock() diff --git a/tests/components/startca/test_sensor.py b/tests/components/startca/test_sensor.py index b0d43af1cae..be5524eb650 100644 --- a/tests/components/startca/test_sensor.py +++ b/tests/components/startca/test_sensor.py @@ -2,11 +2,11 @@ from http import HTTPStatus -from homeassistant.bootstrap import async_setup_component from homeassistant.components.startca.sensor import StartcaData from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, UnitOfInformation from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.setup import async_setup_component from tests.test_util.aiohttp import AiohttpClientMocker diff --git a/tests/components/statistics/conftest.py b/tests/components/statistics/conftest.py new file mode 100644 index 00000000000..e62488c4cf6 --- /dev/null +++ b/tests/components/statistics/conftest.py @@ -0,0 +1,90 @@ +"""Fixtures for the Statistics integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.statistics import DOMAIN +from homeassistant.components.statistics.sensor import ( + CONF_KEEP_LAST_SAMPLE, + CONF_MAX_AGE, + CONF_PERCENTILE, + CONF_PRECISION, + CONF_SAMPLES_MAX_BUFFER_SIZE, + CONF_STATE_CHARACTERISTIC, + DEFAULT_NAME, + STAT_AVERAGE_LINEAR, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import ( + ATTR_UNIT_OF_MEASUREMENT, + CONF_ENTITY_ID, + CONF_NAME, + UnitOfTemperature, +) +from homeassistant.core import HomeAssistant + +from .test_sensor import VALUES_NUMERIC + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically path uuid generator.""" + with patch( + "homeassistant.components.statistics.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + CONF_STATE_CHARACTERISTIC: STAT_AVERAGE_LINEAR, + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 5, "seconds": 5}, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + } + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the Statistics integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/statistics/test_config_flow.py b/tests/components/statistics/test_config_flow.py new file mode 100644 index 00000000000..7c9ed5bed47 --- /dev/null +++ b/tests/components/statistics/test_config_flow.py @@ -0,0 +1,273 @@ +"""Test the Scrape config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant import config_entries +from homeassistant.components.statistics import DOMAIN +from homeassistant.components.statistics.sensor import ( + CONF_KEEP_LAST_SAMPLE, + CONF_MAX_AGE, + CONF_PERCENTILE, + CONF_PRECISION, + CONF_SAMPLES_MAX_BUFFER_SIZE, + CONF_STATE_CHARACTERISTIC, + DEFAULT_NAME, + STAT_AVERAGE_LINEAR, + STAT_COUNT, +) +from homeassistant.const import CONF_ENTITY_ID, CONF_NAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form_sensor(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form for sensor.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_STATE_CHARACTERISTIC: STAT_AVERAGE_LINEAR, + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + CONF_STATE_CHARACTERISTIC: STAT_AVERAGE_LINEAR, + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_binary_sensor( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form for binary sensor.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_STATE_CHARACTERISTIC: STAT_COUNT, + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE_CHARACTERISTIC: STAT_COUNT, + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_SAMPLES_MAX_BUFFER_SIZE: 25.0, + CONF_MAX_AGE: {"hours": 16, "minutes": 0, "seconds": 0}, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + CONF_STATE_CHARACTERISTIC: STAT_AVERAGE_LINEAR, + CONF_SAMPLES_MAX_BUFFER_SIZE: 25.0, + CONF_MAX_AGE: {"hours": 16, "minutes": 0, "seconds": 0}, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + } + + await hass.async_block_till_done() + + # Check the entity was updated, no new entity was created + assert len(hass.states.async_all()) == 2 + + state = hass.states.get("sensor.statistical_characteristic") + assert state is not None + + +async def test_validation_options( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test validation.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_STATE_CHARACTERISTIC: STAT_AVERAGE_LINEAR, + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + await hass.async_block_till_done() + + assert result["step_id"] == "options" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "missing_max_age_or_sampling_size"} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_KEEP_LAST_SAMPLE: True, CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0}, + ) + await hass.async_block_till_done() + + assert result["step_id"] == "options" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "missing_keep_last_sample"} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + CONF_STATE_CHARACTERISTIC: STAT_AVERAGE_LINEAR, + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 0, "seconds": 0}, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_entry_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "sensor.test_monitored", + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_STATE_CHARACTERISTIC: STAT_AVERAGE_LINEAR, + }, + ) + await hass.async_block_till_done() + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_MAX_AGE: {"hours": 8, "minutes": 5, "seconds": 5}, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/statistics/test_init.py b/tests/components/statistics/test_init.py new file mode 100644 index 00000000000..64829ea7d66 --- /dev/null +++ b/tests/components/statistics/test_init.py @@ -0,0 +1,109 @@ +"""Test Statistics component setup process.""" + +from __future__ import annotations + +from homeassistant.components.statistics import DOMAIN as STATISTICS_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device_cleaning( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the cleaning of devices linked to the helper Statistics.""" + + # Source entity device config entry + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + + # Device entry of the source entity + source_device1_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("sensor", "identifier_test1")}, + connections={("mac", "30:31:32:33:34:01")}, + ) + + # Source entity registry + source_entity = entity_registry.async_get_or_create( + "sensor", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device1_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("sensor.test_source") is not None + + # Configure the configuration entry for Statistics + statistics_config_entry = MockConfigEntry( + data={}, + domain=STATISTICS_DOMAIN, + options={ + "name": "Statistics", + "entity_id": "sensor.test_source", + "state_characteristic": "mean", + "keep_last_sample": False, + "percentile": 50.0, + "precision": 2.0, + "sampling_size": 20.0, + }, + title="Statistics", + ) + statistics_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(statistics_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the statistics sensor + statistics_entity = entity_registry.async_get("sensor.statistics") + assert statistics_entity is not None + assert statistics_entity.device_id == source_entity.device_id + + # Device entry incorrectly linked to Statistics config entry + device_registry.async_get_or_create( + config_entry_id=statistics_config_entry.entry_id, + identifiers={("sensor", "identifier_test2")}, + connections={("mac", "30:31:32:33:34:02")}, + ) + device_registry.async_get_or_create( + config_entry_id=statistics_config_entry.entry_id, + identifiers={("sensor", "identifier_test3")}, + connections={("mac", "30:31:32:33:34:03")}, + ) + await hass.async_block_till_done() + + # Before reloading the config entry, two devices are expected to be linked + devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( + statistics_config_entry.entry_id + ) + assert len(devices_before_reload) == 3 + + # Config entry reload + await hass.config_entries.async_reload(statistics_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the statistics sensor + statistics_entity = entity_registry.async_get("sensor.statistics") + assert statistics_entity is not None + assert statistics_entity.device_id == source_entity.device_id + + # After reloading the config entry, only one linked device is expected + devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( + statistics_config_entry.entry_id + ) + assert len(devices_after_reload) == 1 + + assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/statistics/test_sensor.py b/tests/components/statistics/test_sensor.py index 5a716fd8ce8..c90d685714c 100644 --- a/tests/components/statistics/test_sensor.py +++ b/tests/components/statistics/test_sensor.py @@ -19,10 +19,20 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.components.statistics import DOMAIN as STATISTICS_DOMAIN -from homeassistant.components.statistics.sensor import StatisticsSensor +from homeassistant.components.statistics.sensor import ( + CONF_KEEP_LAST_SAMPLE, + CONF_PERCENTILE, + CONF_PRECISION, + CONF_SAMPLES_MAX_BUFFER_SIZE, + CONF_STATE_CHARACTERISTIC, + STAT_MEAN, + StatisticsSensor, +) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT, + CONF_ENTITY_ID, + CONF_NAME, DEGREE, SERVICE_RELOAD, STATE_UNAVAILABLE, @@ -31,11 +41,11 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed, get_fixture_path +from tests.common import MockConfigEntry, async_fire_time_changed, get_fixture_path from tests.components.recorder.common import async_wait_recording_done VALUES_BINARY = ["on", "off", "on", "off", "on", "off", "on", "off", "on"] @@ -171,6 +181,35 @@ async def test_sensor_defaults_numeric(hass: HomeAssistant) -> None: assert new_state.attributes.get("source_value_valid") is False +@pytest.mark.parametrize( + "get_config", + [ + { + CONF_NAME: "test", + CONF_ENTITY_ID: "sensor.test_monitored", + CONF_STATE_CHARACTERISTIC: STAT_MEAN, + CONF_SAMPLES_MAX_BUFFER_SIZE: 20.0, + CONF_KEEP_LAST_SAMPLE: False, + CONF_PERCENTILE: 50.0, + CONF_PRECISION: 2.0, + } + ], +) +async def test_sensor_loaded_from_config_entry( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test the sensor loaded from a config entry.""" + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT + assert state.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) + assert state.attributes.get("source_value_valid") is True + assert "age_coverage_ratio" not in state.attributes + + async def test_sensor_defaults_binary(hass: HomeAssistant) -> None: """Test the general behavior of the sensor, with binary source sensor.""" assert await async_setup_component( @@ -1615,3 +1654,50 @@ async def test_reload(recorder_mock: Recorder, hass: HomeAssistant) -> None: assert hass.states.get("sensor.test") is None assert hass.states.get("sensor.cputest") + + +async def test_device_id( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test for source entity device for Statistics.""" + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + source_device_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("sensor", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + source_entity = entity_registry.async_get_or_create( + "sensor", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("sensor.test_source") is not None + + statistics_config_entry = MockConfigEntry( + data={}, + domain=STATISTICS_DOMAIN, + options={ + "name": "Statistics", + "entity_id": "sensor.test_source", + "state_characteristic": "mean", + "keep_last_sample": False, + "percentile": 50.0, + "precision": 2.0, + "sampling_size": 20.0, + }, + title="Statistics", + ) + statistics_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(statistics_config_entry.entry_id) + await hass.async_block_till_done() + + statistics_entity = entity_registry.async_get("sensor.statistics") + assert statistics_entity is not None + assert statistics_entity.device_id == source_entity.device_id diff --git a/tests/components/stream/conftest.py b/tests/components/stream/conftest.py index 3cf3de54940..39e4de13fed 100644 --- a/tests/components/stream/conftest.py +++ b/tests/components/stream/conftest.py @@ -13,13 +13,14 @@ so that it can inspect the output. from __future__ import annotations import asyncio +from collections.abc import Generator import logging import threading +from typing import Any from unittest.mock import Mock, patch from aiohttp import web import pytest -from typing_extensions import Generator from homeassistant.components.stream.core import StreamOutput from homeassistant.components.stream.worker import StreamState @@ -32,7 +33,7 @@ TEST_TIMEOUT = 7.0 # Lower than 9s home assistant timeout class WorkerSync: """Test fixture that intercepts stream worker calls to StreamOutput.""" - def __init__(self): + def __init__(self) -> None: """Initialize WorkerSync.""" self._event = None self._original = StreamState.discontinuity @@ -60,7 +61,7 @@ class WorkerSync: @pytest.fixture -def stream_worker_sync(hass): +def stream_worker_sync() -> Generator[WorkerSync]: """Patch StreamOutput to allow test to synchronize worker stream end.""" sync = WorkerSync() with patch( @@ -74,7 +75,7 @@ def stream_worker_sync(hass): class HLSSync: """Test fixture that intercepts stream worker calls to StreamOutput.""" - def __init__(self): + def __init__(self) -> None: """Initialize HLSSync.""" self._request_event = asyncio.Event() self._original_recv = StreamOutput.recv @@ -91,7 +92,7 @@ class HLSSync: self.check_requests_ready() class SyncResponse(web.Response): - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) on_resp() diff --git a/tests/components/stream/test_hls.py b/tests/components/stream/test_hls.py index ce66848a2b1..babd7c0b748 100644 --- a/tests/components/stream/test_hls.py +++ b/tests/components/stream/test_hls.py @@ -54,7 +54,7 @@ async def setup_component(hass: HomeAssistant) -> None: class HlsClient: """Test fixture for fetching the hls stream.""" - def __init__(self, http_client, parsed_url): + def __init__(self, http_client, parsed_url) -> None: """Initialize HlsClient.""" self.http_client = http_client self.parsed_url = parsed_url diff --git a/tests/components/stream/test_recorder.py b/tests/components/stream/test_recorder.py index 515f3fff82d..8e079cded45 100644 --- a/tests/components/stream/test_recorder.py +++ b/tests/components/stream/test_recorder.py @@ -35,7 +35,7 @@ from tests.common import async_fire_time_changed @pytest.fixture(autouse=True) -async def stream_component(hass): +async def stream_component(hass: HomeAssistant) -> None: """Set up the component before each test.""" await async_setup_component(hass, "stream", {"stream": {}}) @@ -305,7 +305,5 @@ async def test_record_stream_rotate(hass: HomeAssistant, filename, h264_video) - # Assert assert os.path.exists(filename) - with open(filename, "rb") as rotated_mp4: - assert_mp4_has_transform_matrix( - rotated_mp4.read(), stream.dynamic_stream_settings.orientation - ) + data = await hass.async_add_executor_job(Path(filename).read_bytes) + assert_mp4_has_transform_matrix(data, stream.dynamic_stream_settings.orientation) diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index 2cb90c5ee9a..d61530f9076 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -83,7 +83,7 @@ def filename(tmp_path: Path) -> str: @pytest.fixture(autouse=True) -def mock_stream_settings(hass): +def mock_stream_settings(hass: HomeAssistant) -> None: """Set the stream settings data in hass before each test.""" hass.data[DOMAIN] = { ATTR_SETTINGS: StreamSettings( @@ -100,7 +100,7 @@ def mock_stream_settings(hass): class FakeAvInputStream: """A fake pyav Stream.""" - def __init__(self, name, time_base): + def __init__(self, name, time_base) -> None: """Initialize the stream.""" self.name = name self.time_base = time_base @@ -142,7 +142,7 @@ class PacketSequence: exercise corner cases. """ - def __init__(self, num_packets): + def __init__(self, num_packets) -> None: """Initialize the sequence with the number of packets it provides.""" self.packet = 0 self.num_packets = num_packets @@ -160,7 +160,7 @@ class PacketSequence: class FakePacket(bytearray): # Be a bytearray so that memoryview works - def __init__(self): + def __init__(self) -> None: super().__init__(3) time_base = VIDEO_TIME_BASE @@ -181,7 +181,7 @@ class PacketSequence: class FakePyAvContainer: """A fake container returned by mock av.open for a stream.""" - def __init__(self, video_stream, audio_stream): + def __init__(self, video_stream, audio_stream) -> None: """Initialize the fake container.""" # Tests can override this to trigger different worker behavior self.packets = PacketSequence(0) @@ -209,7 +209,7 @@ class FakePyAvContainer: class FakePyAvBuffer: """Holds outputs of the decoded stream for tests to assert on results.""" - def __init__(self): + def __init__(self) -> None: """Initialize the FakePyAvBuffer.""" self.segments = [] self.audio_packets = [] @@ -220,7 +220,7 @@ class FakePyAvBuffer: """Create an output buffer that captures packets for test to examine.""" class FakeAvOutputStream: - def __init__(self, capture_packets): + def __init__(self, capture_packets) -> None: self.capture_packets = capture_packets self.type = "ignored-type" @@ -266,7 +266,7 @@ class FakePyAvBuffer: class MockPyAv: """Mocks out av.open.""" - def __init__(self, video=True, audio=False): + def __init__(self, video=True, audio=False) -> None: """Initialize the MockPyAv.""" video_stream = VIDEO_STREAM if video else None audio_stream = AUDIO_STREAM if audio else None @@ -772,12 +772,15 @@ async def test_worker_log( with patch("av.open") as av_open: # pylint: disable-next=c-extension-no-member - av_open.side_effect = av.error.InvalidDataError(-2, "error") + av_open.side_effect = av.error.InvalidDataError( + code=-2, message="Invalid data", filename=stream_url + ) with pytest.raises(StreamWorkerError) as err: run_worker(hass, stream, stream_url) await hass.async_block_till_done() assert ( - str(err.value) == f"Error opening stream (ERRORTYPE_-2, error) {redacted_url}" + str(err.value) + == f"Error opening stream (ERRORTYPE_-2, Invalid data, {redacted_url})" ) assert stream_url not in caplog.text diff --git a/tests/components/streamlabswater/conftest.py b/tests/components/streamlabswater/conftest.py index 5a53c7204fa..1bbdd3e9a08 100644 --- a/tests/components/streamlabswater/conftest.py +++ b/tests/components/streamlabswater/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the StreamLabs tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from streamlabswater.streamlabswater import StreamlabsClient -from typing_extensions import Generator from homeassistant.components.streamlabswater import DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index d28d9c308a7..ca2685ff827 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -1,12 +1,11 @@ """Test STT component setup.""" -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Generator from http import HTTPStatus from pathlib import Path from unittest.mock import AsyncMock import pytest -from typing_extensions import Generator from homeassistant.components.stt import ( DOMAIN, diff --git a/tests/components/stt/test_legacy.py b/tests/components/stt/test_legacy.py index 04068b012f1..20fa86b4d20 100644 --- a/tests/components/stt/test_legacy.py +++ b/tests/components/stt/test_legacy.py @@ -41,7 +41,7 @@ async def test_platform_setup_with_error( discovery_info: DiscoveryInfoType | None = None, ) -> Provider: """Raise exception during platform setup.""" - raise Exception("Setup error") # pylint: disable=broad-exception-raised + raise Exception("Setup error") # noqa: TRY002 mock_stt_platform(hass, tmp_path, "bad_stt", async_get_engine=async_get_engine) diff --git a/tests/components/subaru/conftest.py b/tests/components/subaru/conftest.py index f769eba252c..e18ea8fd398 100644 --- a/tests/components/subaru/conftest.py +++ b/tests/components/subaru/conftest.py @@ -100,7 +100,7 @@ TEST_DEVICE_NAME = "test_vehicle_2" TEST_ENTITY_ID = f"sensor.{TEST_DEVICE_NAME}_odometer" -def advance_time_to_next_fetch(hass): +def advance_time_to_next_fetch(hass: HomeAssistant) -> None: """Fast forward time to next fetch.""" future = dt_util.utcnow() + timedelta(seconds=FETCH_INTERVAL + 30) async_fire_time_changed(hass, future) @@ -181,7 +181,7 @@ async def setup_subaru_config_entry( @pytest.fixture -async def subaru_config_entry(hass): +async def subaru_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Create a Subaru config entry prior to setup.""" await async_setup_component(hass, HA_DOMAIN, {}) config_entry = MockConfigEntry(**TEST_CONFIG_ENTRY) @@ -190,7 +190,9 @@ async def subaru_config_entry(hass): @pytest.fixture -async def ev_entry(hass, subaru_config_entry): +async def ev_entry( + hass: HomeAssistant, subaru_config_entry: MockConfigEntry +) -> MockConfigEntry: """Create a Subaru entry representing an EV vehicle with full STARLINK subscription.""" await setup_subaru_config_entry(hass, subaru_config_entry) assert DOMAIN in hass.config_entries.async_domains() diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 9bddeeee051..6abc544c92a 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -10,6 +10,7 @@ from subarulink.exceptions import InvalidCredentials, InvalidPIN, SubaruExceptio from homeassistant import config_entries from homeassistant.components.subaru import config_flow from homeassistant.components.subaru.const import CONF_UPDATE_ENABLED, DOMAIN +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_DEVICE_ID, CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -389,7 +390,7 @@ async def test_option_flow(hass: HomeAssistant, options_form) -> None: @pytest.fixture -async def user_form(hass): +async def user_form(hass: HomeAssistant) -> ConfigFlowResult: """Return initial form for Subaru config flow.""" return await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -397,7 +398,9 @@ async def user_form(hass): @pytest.fixture -async def two_factor_start_form(hass, user_form): +async def two_factor_start_form( + hass: HomeAssistant, user_form: ConfigFlowResult +) -> ConfigFlowResult: """Return two factor form for Subaru config flow.""" with ( patch(MOCK_API_CONNECT, return_value=True), @@ -410,7 +413,9 @@ async def two_factor_start_form(hass, user_form): @pytest.fixture -async def two_factor_verify_form(hass, two_factor_start_form): +async def two_factor_verify_form( + hass: HomeAssistant, two_factor_start_form: ConfigFlowResult +) -> ConfigFlowResult: """Return two factor form for Subaru config flow.""" with ( patch( @@ -427,7 +432,9 @@ async def two_factor_verify_form(hass, two_factor_start_form): @pytest.fixture -async def pin_form(hass, two_factor_verify_form): +async def pin_form( + hass: HomeAssistant, two_factor_verify_form: ConfigFlowResult +) -> ConfigFlowResult: """Return PIN input form for Subaru config flow.""" with ( patch( @@ -443,7 +450,7 @@ async def pin_form(hass, two_factor_verify_form): @pytest.fixture -async def options_form(hass): +async def options_form(hass: HomeAssistant) -> ConfigFlowResult: """Return options form for Subaru config flow.""" entry = MockConfigEntry(domain=DOMAIN, data={}, options=None) entry.add_to_hass(hass) diff --git a/tests/components/suez_water/conftest.py b/tests/components/suez_water/conftest.py index 51ade6009dc..f218fb7d833 100644 --- a/tests/components/suez_water/conftest.py +++ b/tests/components/suez_water/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Suez Water tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/sun/test_trigger.py b/tests/components/sun/test_trigger.py index fc1af35faea..303ca3b80cd 100644 --- a/tests/components/sun/test_trigger.py +++ b/tests/components/sun/test_trigger.py @@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, async_mock_service, mock_component +from tests.common import async_fire_time_changed, mock_component @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -26,14 +26,8 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") hass.loop.run_until_complete( @@ -41,7 +35,9 @@ def setup_comp(hass): ) -async def test_sunset_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_sunset_trigger( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test the sunset trigger.""" now = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC) trigger_time = datetime(2015, 9, 16, 2, tzinfo=dt_util.UTC) @@ -67,10 +63,11 @@ async def test_sunset_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 1 async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 with freeze_time(now): await hass.services.async_call( @@ -79,14 +76,17 @@ async def test_sunset_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 3 + assert service_calls[2].data["id"] == 0 -async def test_sunrise_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_sunrise_trigger( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test the sunrise trigger.""" now = datetime(2015, 9, 13, 23, tzinfo=dt_util.UTC) trigger_time = datetime(2015, 9, 16, 14, tzinfo=dt_util.UTC) @@ -105,11 +105,11 @@ async def test_sunrise_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_sunset_trigger_with_offset( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the sunset trigger with offset.""" now = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC) @@ -142,12 +142,12 @@ async def test_sunset_trigger_with_offset( async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "sun - sunset - 0:30:00" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "sun - sunset - 0:30:00" async def test_sunrise_trigger_with_offset( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the sunrise trigger with offset.""" now = datetime(2015, 9, 13, 23, tzinfo=dt_util.UTC) @@ -171,4 +171,4 @@ async def test_sunrise_trigger_with_offset( async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 diff --git a/tests/components/surepetcare/conftest.py b/tests/components/surepetcare/conftest.py index 9ae1bfe310a..5dcc5dfdadc 100644 --- a/tests/components/surepetcare/conftest.py +++ b/tests/components/surepetcare/conftest.py @@ -17,6 +17,7 @@ from tests.common import MockConfigEntry async def _mock_call(method, resource): if method == "GET" and resource == MESTART_RESOURCE: return {"data": MOCK_API_DATA} + return None @pytest.fixture diff --git a/tests/components/swiss_public_transport/__init__.py b/tests/components/swiss_public_transport/__init__.py index 3859a630c31..98262324b11 100644 --- a/tests/components/swiss_public_transport/__init__.py +++ b/tests/components/swiss_public_transport/__init__.py @@ -1 +1,13 @@ """Tests for the swiss_public_transport integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/swiss_public_transport/conftest.py b/tests/components/swiss_public_transport/conftest.py index c139b99e54d..88bd233765b 100644 --- a/tests/components/swiss_public_transport/conftest.py +++ b/tests/components/swiss_public_transport/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the swiss_public_transport tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/swiss_public_transport/fixtures/connections.json b/tests/components/swiss_public_transport/fixtures/connections.json new file mode 100644 index 00000000000..4edead56f14 --- /dev/null +++ b/tests/components/swiss_public_transport/fixtures/connections.json @@ -0,0 +1,130 @@ +[ + { + "departure": "2024-01-06T18:03:00+0100", + "number": 0, + "platform": 0, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:04:00+0100", + "number": 1, + "platform": 1, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:05:00+0100", + "number": 2, + "platform": 2, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:06:00+0100", + "number": 3, + "platform": 3, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:07:00+0100", + "number": 4, + "platform": 4, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:08:00+0100", + "number": 5, + "platform": 5, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:09:00+0100", + "number": 6, + "platform": 6, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:10:00+0100", + "number": 7, + "platform": 7, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:11:00+0100", + "number": 8, + "platform": 8, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:12:00+0100", + "number": 9, + "platform": 9, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:13:00+0100", + "number": 10, + "platform": 10, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:14:00+0100", + "number": 11, + "platform": 11, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:15:00+0100", + "number": 12, + "platform": 12, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:16:00+0100", + "number": 13, + "platform": 13, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:17:00+0100", + "number": 14, + "platform": 14, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:18:00+0100", + "number": 15, + "platform": 15, + "transfers": 0, + "duration": "10", + "delay": 0 + } +] diff --git a/tests/components/swiss_public_transport/test_config_flow.py b/tests/components/swiss_public_transport/test_config_flow.py index b728c87d4b0..027336e28a6 100644 --- a/tests/components/swiss_public_transport/test_config_flow.py +++ b/tests/components/swiss_public_transport/test_config_flow.py @@ -12,7 +12,10 @@ from homeassistant.components.swiss_public_transport import config_flow from homeassistant.components.swiss_public_transport.const import ( CONF_DESTINATION, CONF_START, + CONF_VIA, + MAX_VIA, ) +from homeassistant.components.swiss_public_transport.helper import unique_id_from_config from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -25,8 +28,36 @@ MOCK_DATA_STEP = { CONF_DESTINATION: "test_destination", } +MOCK_DATA_STEP_ONE_VIA = { + **MOCK_DATA_STEP, + CONF_VIA: ["via_station"], +} -async def test_flow_user_init_data_success(hass: HomeAssistant) -> None: +MOCK_DATA_STEP_MANY_VIA = { + **MOCK_DATA_STEP, + CONF_VIA: ["via_station_1", "via_station_2", "via_station_3"], +} + +MOCK_DATA_STEP_TOO_MANY_STATIONS = { + **MOCK_DATA_STEP, + CONF_VIA: MOCK_DATA_STEP_ONE_VIA[CONF_VIA] * (MAX_VIA + 1), +} + + +@pytest.mark.parametrize( + ("user_input", "config_title"), + [ + (MOCK_DATA_STEP, "test_start test_destination"), + (MOCK_DATA_STEP_ONE_VIA, "test_start test_destination via via_station"), + ( + MOCK_DATA_STEP_MANY_VIA, + "test_start test_destination via via_station_1, via_station_2, via_station_3", + ), + ], +) +async def test_flow_user_init_data_success( + hass: HomeAssistant, user_input, config_title +) -> None: """Test success response.""" result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": "user"} @@ -47,25 +78,26 @@ async def test_flow_user_init_data_success(hass: HomeAssistant) -> None: ) result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=MOCK_DATA_STEP, + user_input=user_input, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].title == "test_start test_destination" + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["result"].title == config_title - assert result["data"] == MOCK_DATA_STEP + assert result["data"] == user_input @pytest.mark.parametrize( - ("raise_error", "text_error"), + ("raise_error", "text_error", "user_input_error"), [ - (OpendataTransportConnectionError(), "cannot_connect"), - (OpendataTransportError(), "bad_config"), - (IndexError(), "unknown"), + (OpendataTransportConnectionError(), "cannot_connect", MOCK_DATA_STEP), + (OpendataTransportError(), "bad_config", MOCK_DATA_STEP), + (None, "too_many_via_stations", MOCK_DATA_STEP_TOO_MANY_STATIONS), + (IndexError(), "unknown", MOCK_DATA_STEP), ], ) async def test_flow_user_init_data_error_and_recover( - hass: HomeAssistant, raise_error, text_error + hass: HomeAssistant, raise_error, text_error, user_input_error ) -> None: """Test unknown errors.""" with patch( @@ -78,7 +110,7 @@ async def test_flow_user_init_data_error_and_recover( ) result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=MOCK_DATA_STEP, + user_input=user_input_error, ) assert result["type"] is FlowResultType.FORM @@ -92,7 +124,7 @@ async def test_flow_user_init_data_error_and_recover( user_input=MOCK_DATA_STEP, ) - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["type"] == FlowResultType.CREATE_ENTRY assert result["result"].title == "test_start test_destination" assert result["data"] == MOCK_DATA_STEP @@ -104,7 +136,7 @@ async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> No entry = MockConfigEntry( domain=config_flow.DOMAIN, data=MOCK_DATA_STEP, - unique_id=f"{MOCK_DATA_STEP[CONF_START]} {MOCK_DATA_STEP[CONF_DESTINATION]}", + unique_id=unique_id_from_config(MOCK_DATA_STEP), ) entry.add_to_hass(hass) diff --git a/tests/components/swiss_public_transport/test_init.py b/tests/components/swiss_public_transport/test_init.py index e1b27cf5fe1..7ee8b696499 100644 --- a/tests/components/swiss_public_transport/test_init.py +++ b/tests/components/swiss_public_transport/test_init.py @@ -1,23 +1,33 @@ -"""Test the swiss_public_transport config flow.""" +"""Test the swiss_public_transport integration.""" from unittest.mock import AsyncMock, patch +import pytest + from homeassistant.components.swiss_public_transport.const import ( CONF_DESTINATION, CONF_START, + CONF_VIA, DOMAIN, ) +from homeassistant.components.swiss_public_transport.helper import unique_id_from_config +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry -MOCK_DATA_STEP = { +MOCK_DATA_STEP_BASE = { CONF_START: "test_start", CONF_DESTINATION: "test_destination", } +MOCK_DATA_STEP_VIA = { + **MOCK_DATA_STEP_BASE, + CONF_VIA: ["via_station"], +} + CONNECTIONS = [ { "departure": "2024-01-06T18:03:00+0100", @@ -46,19 +56,38 @@ CONNECTIONS = [ ] -async def test_migration_1_1_to_1_2( - hass: HomeAssistant, entity_registry: er.EntityRegistry +@pytest.mark.parametrize( + ( + "from_version", + "from_minor_version", + "config_data", + "overwrite_unique_id", + ), + [ + (1, 1, MOCK_DATA_STEP_BASE, "None_departure"), + (1, 2, MOCK_DATA_STEP_BASE, None), + (2, 1, MOCK_DATA_STEP_VIA, None), + ], +) +async def test_migration_from( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + from_version, + from_minor_version, + config_data, + overwrite_unique_id, ) -> None: """Test successful setup.""" - config_entry_faulty = MockConfigEntry( + config_entry = MockConfigEntry( domain=DOMAIN, - data=MOCK_DATA_STEP, - title="MIGRATION_TEST", - version=1, - minor_version=1, + data=config_data, + title=f"MIGRATION_TEST from {from_version}.{from_minor_version}", + version=from_version, + minor_version=from_minor_version, + unique_id=overwrite_unique_id or unique_id_from_config(config_data), ) - config_entry_faulty.add_to_hass(hass) + config_entry.add_to_hass(hass) with patch( "homeassistant.components.swiss_public_transport.OpendataTransport", @@ -67,21 +96,53 @@ async def test_migration_1_1_to_1_2( mock().connections = CONNECTIONS # Setup the config entry - await hass.config_entries.async_setup(config_entry_faulty.entry_id) + unique_id = unique_id_from_config(config_entry.data) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert entity_registry.async_is_registered( entity_registry.entities.get_entity_id( - (Platform.SENSOR, DOMAIN, "test_start test_destination_departure") + ( + Platform.SENSOR, + DOMAIN, + f"{unique_id}_departure", + ) ) ) - # Check change in config entry - assert config_entry_faulty.minor_version == 2 - assert config_entry_faulty.unique_id == "test_start test_destination" + # Check change in config entry and verify most recent version + assert config_entry.version == 2 + assert config_entry.minor_version == 1 + assert config_entry.unique_id == unique_id - # Check "None" is gone + # Check "None" is gone from version 1.1 to 1.2 assert not entity_registry.async_is_registered( entity_registry.entities.get_entity_id( (Platform.SENSOR, DOMAIN, "None_departure") ) ) + + +async def test_migrate_error_from_future(hass: HomeAssistant) -> None: + """Test a future version isn't migrated.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + version=3, + minor_version=1, + unique_id="some_crazy_future_unique_id", + data=MOCK_DATA_STEP_BASE, + ) + + mock_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = CONNECTIONS + + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + entry = hass.config_entries.async_get_entry(mock_entry.entry_id) + assert entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/swiss_public_transport/test_service.py b/tests/components/swiss_public_transport/test_service.py new file mode 100644 index 00000000000..4009327e77d --- /dev/null +++ b/tests/components/swiss_public_transport/test_service.py @@ -0,0 +1,224 @@ +"""Test the swiss_public_transport service.""" + +import json +import logging +from unittest.mock import AsyncMock, patch + +from opendata_transport.exceptions import ( + OpendataTransportConnectionError, + OpendataTransportError, +) +import pytest +from voluptuous import error as vol_er + +from homeassistant.components.swiss_public_transport.const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_LIMIT, + CONF_DESTINATION, + CONF_START, + CONNECTIONS_COUNT, + CONNECTIONS_MAX, + DOMAIN, + SERVICE_FETCH_CONNECTIONS, +) +from homeassistant.components.swiss_public_transport.helper import unique_id_from_config +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from . import setup_integration + +from tests.common import MockConfigEntry, load_fixture + +_LOGGER = logging.getLogger(__name__) + +MOCK_DATA_STEP_BASE = { + CONF_START: "test_start", + CONF_DESTINATION: "test_destination", +} + + +@pytest.mark.parametrize( + ("data", "config_data"), + [ + ({ATTR_LIMIT: 1}, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: 2}, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: 3}, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: CONNECTIONS_MAX}, MOCK_DATA_STEP_BASE), + ({}, MOCK_DATA_STEP_BASE), + ], +) +async def test_service_call_fetch_connections_success( + hass: HomeAssistant, + data: dict, + config_data, +) -> None: + """Test the fetch_connections service.""" + + unique_id = unique_id_from_config(config_data) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=config_data, + title=f"Service test call with data={data}", + unique_id=unique_id, + entry_id=f"entry_{unique_id}", + ) + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = json.loads(load_fixture("connections.json", DOMAIN))[ + 0 : data.get(ATTR_LIMIT, CONNECTIONS_COUNT) + 2 + ] + + await setup_integration(hass, config_entry) + + data[ATTR_CONFIG_ENTRY_ID] = config_entry.entry_id + assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) + response = await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data=data, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() + assert response["connections"] is not None + assert len(response["connections"]) == data.get(ATTR_LIMIT, CONNECTIONS_COUNT) + + +@pytest.mark.parametrize( + ("limit", "config_data", "expected_result", "raise_error"), + [ + (-1, MOCK_DATA_STEP_BASE, pytest.raises(vol_er.MultipleInvalid), None), + (0, MOCK_DATA_STEP_BASE, pytest.raises(vol_er.MultipleInvalid), None), + ( + CONNECTIONS_MAX + 1, + MOCK_DATA_STEP_BASE, + pytest.raises(vol_er.MultipleInvalid), + None, + ), + ( + 1, + MOCK_DATA_STEP_BASE, + pytest.raises(HomeAssistantError), + OpendataTransportConnectionError(), + ), + ( + 2, + MOCK_DATA_STEP_BASE, + pytest.raises(HomeAssistantError), + OpendataTransportError(), + ), + ], +) +async def test_service_call_fetch_connections_error( + hass: HomeAssistant, + limit, + config_data, + expected_result, + raise_error, +) -> None: + """Test service call with standard error.""" + + unique_id = unique_id_from_config(config_data) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=config_data, + title=f"Service test call with limit={limit} and error={raise_error}", + unique_id=unique_id, + entry_id=f"entry_{unique_id}", + ) + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = json.loads(load_fixture("connections.json", DOMAIN)) + + await setup_integration(hass, config_entry) + + assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) + mock().async_get_data.side_effect = raise_error + with expected_result: + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, + ATTR_LIMIT: limit, + }, + blocking=True, + return_response=True, + ) + + +async def test_service_call_load_unload( + hass: HomeAssistant, +) -> None: + """Test service call with integration error.""" + + unique_id = unique_id_from_config(MOCK_DATA_STEP_BASE) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=MOCK_DATA_STEP_BASE, + title="Service test call for unloaded entry", + unique_id=unique_id, + entry_id=f"entry_{unique_id}", + ) + + bad_entry_id = "bad_entry_id" + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = json.loads(load_fixture("connections.json", DOMAIN)) + + await setup_integration(hass, config_entry) + + assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) + response = await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() + assert response["connections"] is not None + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises( + ServiceValidationError, match=f"{config_entry.title} is not loaded" + ): + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + + with pytest.raises( + ServiceValidationError, + match=f'Swiss public transport integration instance "{bad_entry_id}" not found', + ): + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: bad_entry_id, + }, + blocking=True, + return_response=True, + ) diff --git a/tests/components/switch/test_device_action.py b/tests/components/switch/test_device_action.py index 0b41ce7992d..9751721cbc7 100644 --- a/tests/components/switch/test_device_action.py +++ b/tests/components/switch/test_device_action.py @@ -7,7 +7,7 @@ from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.switch import DOMAIN from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component @@ -24,12 +24,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -115,7 +109,6 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -190,7 +183,6 @@ async def test_action_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) diff --git a/tests/components/switch/test_device_condition.py b/tests/components/switch/test_device_condition.py index 2ba2c6adb5c..7c4f434b0a4 100644 --- a/tests/components/switch/test_device_condition.py +++ b/tests/components/switch/test_device_condition.py @@ -20,7 +20,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -183,7 +176,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -249,20 +242,20 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" @pytest.mark.usefixtures("enable_custom_integrations") @@ -270,7 +263,7 @@ async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -315,12 +308,12 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -328,7 +321,7 @@ async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for firing if condition is on with delay.""" point1 = dt_util.utcnow() @@ -377,26 +370,26 @@ async def test_if_fires_on_for_condition( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future freezer.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 20 secs into the future freezer.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_off event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/switch/test_device_trigger.py b/tests/components/switch/test_device_trigger.py index 092b7a964bb..08e6ab6d0f6 100644 --- a/tests/components/switch/test_device_trigger.py +++ b/tests/components/switch/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -181,7 +174,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -268,20 +261,20 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -292,7 +285,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -337,13 +330,13 @@ async def test_if_fires_on_state_change_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - None" ) @@ -353,7 +346,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -399,16 +392,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/switch_as_x/conftest.py b/tests/components/switch_as_x/conftest.py index 88a86892d2d..f8328f38b54 100644 --- a/tests/components/switch_as_x/conftest.py +++ b/tests/components/switch_as_x/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/switch_as_x/test_fan.py b/tests/components/switch_as_x/test_fan.py index fd4296bd616..a33490dab45 100644 --- a/tests/components/switch_as_x/test_fan.py +++ b/tests/components/switch_as_x/test_fan.py @@ -44,7 +44,7 @@ async def test_default_state(hass: HomeAssistant) -> None: state = hass.states.get("fan.wind_machine") assert state is not None assert state.state == "unavailable" - assert state.attributes["supported_features"] == 0 + assert state.attributes["supported_features"] == 48 async def test_service_calls(hass: HomeAssistant) -> None: diff --git a/tests/components/switch_as_x/test_init.py b/tests/components/switch_as_x/test_init.py index 3889a43f741..e250cacb7ac 100644 --- a/tests/components/switch_as_x/test_init.py +++ b/tests/components/switch_as_x/test_init.py @@ -171,8 +171,10 @@ async def test_device_registry_config_entry_1( original_name="ABC", ) # Add another config entry to the same device + other_config_entry = MockConfigEntry() + other_config_entry.add_to_hass(hass) device_registry.async_update_device( - device_entry.id, add_config_entry_id=MockConfigEntry().entry_id + device_entry.id, add_config_entry_id=other_config_entry.entry_id ) switch_as_x_config_entry = MockConfigEntry( diff --git a/tests/components/switchbot_cloud/conftest.py b/tests/components/switchbot_cloud/conftest.py index ed233ff2de9..b559930dedb 100644 --- a/tests/components/switchbot_cloud/conftest.py +++ b/tests/components/switchbot_cloud/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the SwitchBot via API tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/switcher_kis/conftest.py b/tests/components/switcher_kis/conftest.py index 8ff395fcab3..2cf123af2b0 100644 --- a/tests/components/switcher_kis/conftest.py +++ b/tests/components/switcher_kis/conftest.py @@ -1,9 +1,9 @@ """Common fixtures and objects for the Switcher integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/switcher_kis/consts.py b/tests/components/switcher_kis/consts.py index 3c5f3ff241e..ffeef64b5d7 100644 --- a/tests/components/switcher_kis/consts.py +++ b/tests/components/switcher_kis/consts.py @@ -38,6 +38,10 @@ DUMMY_MAC_ADDRESS1 = "A1:B2:C3:45:67:D8" DUMMY_MAC_ADDRESS2 = "A1:B2:C3:45:67:D9" DUMMY_MAC_ADDRESS3 = "A1:B2:C3:45:67:DA" DUMMY_MAC_ADDRESS4 = "A1:B2:C3:45:67:DB" +DUMMY_TOKEN_NEEDED1 = False +DUMMY_TOKEN_NEEDED2 = False +DUMMY_TOKEN_NEEDED3 = False +DUMMY_TOKEN_NEEDED4 = False DUMMY_PHONE_ID = "1234" DUMMY_POWER_CONSUMPTION1 = 100 DUMMY_POWER_CONSUMPTION2 = 2780 @@ -60,6 +64,7 @@ DUMMY_PLUG_DEVICE = SwitcherPowerPlug( DUMMY_IP_ADDRESS1, DUMMY_MAC_ADDRESS1, DUMMY_DEVICE_NAME1, + DUMMY_TOKEN_NEEDED1, DUMMY_POWER_CONSUMPTION1, DUMMY_ELECTRIC_CURRENT1, ) @@ -72,6 +77,7 @@ DUMMY_WATER_HEATER_DEVICE = SwitcherWaterHeater( DUMMY_IP_ADDRESS2, DUMMY_MAC_ADDRESS2, DUMMY_DEVICE_NAME2, + DUMMY_TOKEN_NEEDED2, DUMMY_POWER_CONSUMPTION2, DUMMY_ELECTRIC_CURRENT2, DUMMY_REMAINING_TIME, @@ -86,6 +92,7 @@ DUMMY_SHUTTER_DEVICE = SwitcherShutter( DUMMY_IP_ADDRESS4, DUMMY_MAC_ADDRESS4, DUMMY_DEVICE_NAME4, + DUMMY_TOKEN_NEEDED4, DUMMY_POSITION, DUMMY_DIRECTION, ) @@ -98,6 +105,7 @@ DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( DUMMY_IP_ADDRESS3, DUMMY_MAC_ADDRESS3, DUMMY_DEVICE_NAME3, + DUMMY_TOKEN_NEEDED3, DUMMY_THERMOSTAT_MODE, DUMMY_TEMPERATURE, DUMMY_TARGET_TEMPERATURE, diff --git a/tests/components/switcher_kis/test_button.py b/tests/components/switcher_kis/test_button.py index 264c163e111..d0604487370 100644 --- a/tests/components/switcher_kis/test_button.py +++ b/tests/components/switcher_kis/test_button.py @@ -63,7 +63,12 @@ async def test_assume_button( ) @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_swing_button( - hass: HomeAssistant, entity, swing, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, + entity, + swing, + mock_bridge, + mock_api, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test vertical swing on/off button.""" monkeypatch.setattr(DEVICE, "remote_id", "ELEC7022") @@ -88,7 +93,7 @@ async def test_swing_button( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_control_device_fail( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test control device fail.""" await init_integration(hass) diff --git a/tests/components/switcher_kis/test_climate.py b/tests/components/switcher_kis/test_climate.py index 759f7f1bd98..5da9684bf2a 100644 --- a/tests/components/switcher_kis/test_climate.py +++ b/tests/components/switcher_kis/test_climate.py @@ -37,7 +37,7 @@ ENTITY_ID = f"{CLIMATE_DOMAIN}.{slugify(DEVICE.name)}" @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_hvac_mode( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate hvac mode service.""" await init_integration(hass) @@ -92,7 +92,7 @@ async def test_climate_hvac_mode( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_temperature( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate temperature service.""" await init_integration(hass) @@ -144,7 +144,7 @@ async def test_climate_temperature( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_fan_level( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate fan level service.""" await init_integration(hass) @@ -179,7 +179,7 @@ async def test_climate_fan_level( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_swing( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate swing service.""" await init_integration(hass) @@ -234,9 +234,7 @@ async def test_climate_swing( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) -async def test_control_device_fail( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch -) -> None: +async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) -> None: """Test control device fail.""" await init_integration(hass) assert mock_bridge @@ -295,7 +293,7 @@ async def test_control_device_fail( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_bad_update_discard( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test that a bad update from device is discarded.""" await init_integration(hass) @@ -318,7 +316,7 @@ async def test_bad_update_discard( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_control_errors( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test control with settings not supported by device.""" await init_integration(hass) diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index 07f349d1a72..c228da6b556 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -31,7 +31,9 @@ ENTITY_ID = f"{COVER_DOMAIN}.{slugify(DEVICE.name)}" @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) -async def test_cover(hass: HomeAssistant, mock_bridge, mock_api, monkeypatch) -> None: +async def test_cover( + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch +) -> None: """Test cover services.""" await init_integration(hass) assert mock_bridge @@ -103,7 +105,7 @@ async def test_cover(hass: HomeAssistant, mock_bridge, mock_api, monkeypatch) -> # Test stop with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop" + "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop_shutter" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index f49ab99ba6c..89bcefa5138 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -1,17 +1,23 @@ """Tests for the diagnostics data provided by Switcher.""" +import pytest + from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant from . import init_integration from .consts import DUMMY_WATER_HEATER_DEVICE +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge, monkeypatch + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_bridge, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test diagnostics.""" entry = await init_integration(hass) @@ -34,7 +40,7 @@ async def test_diagnostics( "__type": "", "repr": ( ")>" + "1, , False)>" ), }, "electric_current": 12.8, @@ -44,6 +50,7 @@ async def test_diagnostics( "name": "Heater FE12", "power_consumption": 2780, "remaining_time": "01:29:32", + "token_needed": False, } ], "entry": { @@ -59,5 +66,7 @@ async def test_diagnostics( "source": "user", "unique_id": "switcher_kis", "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, } diff --git a/tests/components/switcher_kis/test_sensor.py b/tests/components/switcher_kis/test_sensor.py index 1be2efed987..8ccc33f2d37 100644 --- a/tests/components/switcher_kis/test_sensor.py +++ b/tests/components/switcher_kis/test_sensor.py @@ -74,7 +74,9 @@ async def test_sensor_disabled( @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) -async def test_sensor_update(hass: HomeAssistant, mock_bridge, monkeypatch) -> None: +async def test_sensor_update( + hass: HomeAssistant, mock_bridge, monkeypatch: pytest.MonkeyPatch +) -> None: """Test sensor update.""" await init_integration(hass) assert mock_bridge diff --git a/tests/components/switcher_kis/test_services.py b/tests/components/switcher_kis/test_services.py index 039daec4c97..26c54ee53ed 100644 --- a/tests/components/switcher_kis/test_services.py +++ b/tests/components/switcher_kis/test_services.py @@ -30,7 +30,7 @@ from .consts import ( @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) async def test_turn_on_with_timer_service( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test the turn on with timer service.""" await init_integration(hass) diff --git a/tests/components/switcher_kis/test_switch.py b/tests/components/switcher_kis/test_switch.py index 058546ac2ae..f14a8f5b1ca 100644 --- a/tests/components/switcher_kis/test_switch.py +++ b/tests/components/switcher_kis/test_switch.py @@ -23,7 +23,9 @@ from .consts import DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) -async def test_switch(hass: HomeAssistant, mock_bridge, mock_api, monkeypatch) -> None: +async def test_switch( + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch +) -> None: """Test the switch.""" await init_integration(hass) assert mock_bridge @@ -75,7 +77,7 @@ async def test_switch_control_fail( hass: HomeAssistant, mock_bridge, mock_api, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture, ) -> None: """Test switch control fail.""" diff --git a/tests/components/synology_dsm/conftest.py b/tests/components/synology_dsm/conftest.py index 2f05d0187be..0e8f79ffd40 100644 --- a/tests/components/synology_dsm/conftest.py +++ b/tests/components/synology_dsm/conftest.py @@ -1,9 +1,9 @@ """Configure Synology DSM tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/synology_dsm/test_media_source.py b/tests/components/synology_dsm/test_media_source.py index 433a4b15c23..0c7ab6bc1cc 100644 --- a/tests/components/synology_dsm/test_media_source.py +++ b/tests/components/synology_dsm/test_media_source.py @@ -4,6 +4,7 @@ from pathlib import Path import tempfile from unittest.mock import AsyncMock, MagicMock, patch +from aiohttp import web import pytest from synology_dsm.api.photos import SynoPhotosAlbum, SynoPhotosItem from synology_dsm.exceptions import SynologyDSMException @@ -30,7 +31,7 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant -from homeassistant.util.aiohttp import MockRequest, web +from homeassistant.util.aiohttp import MockRequest from .consts import HOST, MACS, PASSWORD, PORT, USE_SSL, USERNAME @@ -47,11 +48,15 @@ def dsm_with_photos() -> MagicMock: dsm.surveillance_station.update = AsyncMock(return_value=True) dsm.upgrade.update = AsyncMock(return_value=True) - dsm.photos.get_albums = AsyncMock(return_value=[SynoPhotosAlbum(1, "Album 1", 10)]) + dsm.photos.get_albums = AsyncMock( + return_value=[SynoPhotosAlbum(1, "Album 1", 10, "")] + ) dsm.photos.get_items_from_album = AsyncMock( return_value=[ - SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", False), - SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", True), + SynoPhotosItem( + 10, "", "filename.jpg", 12345, "10_1298753", "sm", False, "" + ), + SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", True, ""), ] ) dsm.photos.get_item_thumbnail_url = AsyncMock( @@ -95,17 +100,22 @@ async def test_resolve_media_bad_identifier( [ ( "ABC012345/10/27643_876876/filename.jpg", - "/synology_dsm/ABC012345/27643_876876/filename.jpg", + "/synology_dsm/ABC012345/27643_876876/filename.jpg/", "image/jpeg", ), ( "ABC012345/12/12631_47189/filename.png", - "/synology_dsm/ABC012345/12631_47189/filename.png", + "/synology_dsm/ABC012345/12631_47189/filename.png/", "image/png", ), ( "ABC012345/12/12631_47189/filename.png_shared", - "/synology_dsm/ABC012345/12631_47189/filename.png_shared", + "/synology_dsm/ABC012345/12631_47189/filename.png_shared/", + "image/png", + ), + ( + "ABC012345/12_dmypass/12631_47189/filename.png", + "/synology_dsm/ABC012345/12631_47189/filename.png/dmypass", "image/png", ), ], @@ -249,7 +259,7 @@ async def test_browse_media_get_albums( assert result.children[0].identifier == "mocked_syno_dsm_entry/0" assert result.children[0].title == "All images" assert isinstance(result.children[1], BrowseMedia) - assert result.children[1].identifier == "mocked_syno_dsm_entry/1" + assert result.children[1].identifier == "mocked_syno_dsm_entry/1_" assert result.children[1].title == "Album 1" @@ -381,7 +391,7 @@ async def test_browse_media_get_items( assert len(result.children) == 2 item = result.children[0] assert isinstance(item, BrowseMedia) - assert item.identifier == "mocked_syno_dsm_entry/1/10_1298753/filename.jpg" + assert item.identifier == "mocked_syno_dsm_entry/1_/10_1298753/filename.jpg" assert item.title == "filename.jpg" assert item.media_class == MediaClass.IMAGE assert item.media_content_type == "image/jpeg" @@ -390,7 +400,7 @@ async def test_browse_media_get_items( assert item.thumbnail == "http://my.thumbnail.url" item = result.children[1] assert isinstance(item, BrowseMedia) - assert item.identifier == "mocked_syno_dsm_entry/1/10_1298753/filename.jpg_shared" + assert item.identifier == "mocked_syno_dsm_entry/1_/10_1298753/filename.jpg_shared" assert item.title == "filename.jpg" assert item.media_class == MediaClass.IMAGE assert item.media_content_type == "image/jpeg" @@ -434,24 +444,24 @@ async def test_media_view( assert await hass.config_entries.async_setup(entry.entry_id) with pytest.raises(web.HTTPNotFound): - await view.get(request, "", "10_1298753/filename") + await view.get(request, "", "10_1298753/filename/") # exception in download_item() dsm_with_photos.photos.download_item = AsyncMock( side_effect=SynologyDSMException("", None) ) with pytest.raises(web.HTTPNotFound): - await view.get(request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg") + await view.get(request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg/") # success dsm_with_photos.photos.download_item = AsyncMock(return_value=b"xxxx") with patch.object(tempfile, "tempdir", tmp_path): result = await view.get( - request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg" + request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg/" ) assert isinstance(result, web.Response) with patch.object(tempfile, "tempdir", tmp_path): result = await view.get( - request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg_shared" + request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg_shared/" ) assert isinstance(result, web.Response) diff --git a/tests/components/system_bridge/__init__.py b/tests/components/system_bridge/__init__.py index edbe5469705..0606ce8e258 100644 --- a/tests/components/system_bridge/__init__.py +++ b/tests/components/system_bridge/__init__.py @@ -1,38 +1,52 @@ """Tests for the System Bridge integration.""" from collections.abc import Awaitable, Callable -from dataclasses import asdict from ipaddress import ip_address from typing import Any -from systembridgeconnector.const import TYPE_DATA_UPDATE -from systembridgemodels.const import MODEL_SYSTEM -from systembridgemodels.modules import System -from systembridgemodels.response import Response +from systembridgemodels.fixtures.modules.battery import FIXTURE_BATTERY +from systembridgemodels.fixtures.modules.cpu import FIXTURE_CPU +from systembridgemodels.fixtures.modules.disks import FIXTURE_DISKS +from systembridgemodels.fixtures.modules.displays import FIXTURE_DISPLAYS +from systembridgemodels.fixtures.modules.gpus import FIXTURE_GPUS +from systembridgemodels.fixtures.modules.media import FIXTURE_MEDIA +from systembridgemodels.fixtures.modules.memory import FIXTURE_MEMORY +from systembridgemodels.fixtures.modules.processes import FIXTURE_PROCESSES +from systembridgemodels.fixtures.modules.system import FIXTURE_SYSTEM +from systembridgemodels.modules import Module, ModulesData from homeassistant.components import zeroconf from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN +from homeassistant.core import HomeAssistant -FIXTURE_MAC_ADDRESS = "aa:bb:cc:dd:ee:ff" -FIXTURE_UUID = "e91bf575-56f3-4c83-8f42-70ac17adcd33" +from tests.common import MockConfigEntry -FIXTURE_AUTH_INPUT = {CONF_TOKEN: "abc-123-def-456-ghi"} +FIXTURE_TITLE = "TestSystem" + +FIXTURE_REQUEST_ID = "test" + +FIXTURE_MAC_ADDRESS = FIXTURE_SYSTEM.mac_address +FIXTURE_UUID = FIXTURE_SYSTEM.uuid + +FIXTURE_AUTH_INPUT = { + CONF_TOKEN: "abc-123-def-456-ghi", +} FIXTURE_USER_INPUT = { CONF_TOKEN: "abc-123-def-456-ghi", - CONF_HOST: "test-bridge", + CONF_HOST: "127.0.0.1", CONF_PORT: "9170", } FIXTURE_ZEROCONF_INPUT = { CONF_TOKEN: "abc-123-def-456-ghi", - CONF_HOST: "1.1.1.1", + CONF_HOST: FIXTURE_USER_INPUT[CONF_HOST], CONF_PORT: "9170", } FIXTURE_ZEROCONF = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("1.1.1.1"), - ip_addresses=[ip_address("1.1.1.1")], + ip_address=ip_address(FIXTURE_USER_INPUT[CONF_HOST]), + ip_addresses=[ip_address(FIXTURE_USER_INPUT[CONF_HOST])], port=9170, hostname="test-bridge.local.", type="_system-bridge._tcp.local.", @@ -41,7 +55,7 @@ FIXTURE_ZEROCONF = zeroconf.ZeroconfServiceInfo( "address": "http://test-bridge:9170", "fqdn": "test-bridge", "host": "test-bridge", - "ip": "1.1.1.1", + "ip": FIXTURE_USER_INPUT[CONF_HOST], "mac": FIXTURE_MAC_ADDRESS, "port": "9170", "uuid": FIXTURE_UUID, @@ -49,8 +63,8 @@ FIXTURE_ZEROCONF = zeroconf.ZeroconfServiceInfo( ) FIXTURE_ZEROCONF_BAD = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("1.1.1.1"), - ip_addresses=[ip_address("1.1.1.1")], + ip_address=ip_address(FIXTURE_USER_INPUT[CONF_HOST]), + ip_addresses=[ip_address(FIXTURE_USER_INPUT[CONF_HOST])], port=9170, hostname="test-bridge.local.", type="_system-bridge._tcp.local.", @@ -60,57 +74,37 @@ FIXTURE_ZEROCONF_BAD = zeroconf.ZeroconfServiceInfo( }, ) - -FIXTURE_SYSTEM = System( - boot_time=1, - fqdn="", - hostname="1.1.1.1", - ip_address_4="1.1.1.1", - mac_address=FIXTURE_MAC_ADDRESS, - platform="", - platform_version="", - uptime=1, - uuid=FIXTURE_UUID, - version="", - version_latest="", - version_newer_available=False, - users=[], +FIXTURE_DATA_RESPONSE = ModulesData( + system=FIXTURE_SYSTEM, ) -FIXTURE_DATA_RESPONSE = Response( - id="1234", - type=TYPE_DATA_UPDATE, - subtype=None, - message="Data received", - module=MODEL_SYSTEM, - data=asdict(FIXTURE_SYSTEM), -) -FIXTURE_DATA_RESPONSE_BAD = Response( - id="1234", - type=TYPE_DATA_UPDATE, - subtype=None, - message="Data received", - module=MODEL_SYSTEM, - data={}, -) +async def setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> bool: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) -FIXTURE_DATA_RESPONSE_BAD = Response( - id="1234", - type=TYPE_DATA_UPDATE, - subtype=None, - message="Data received", - module=MODEL_SYSTEM, - data={}, -) + setup_result = await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return setup_result async def mock_data_listener( - self, callback: Callable[[str, Any], Awaitable[None]] | None = None, _: bool = False, ): """Mock websocket data listener.""" if callback is not None: # Simulate data received from the websocket - await callback(MODEL_SYSTEM, FIXTURE_SYSTEM) + await callback(Module.BATTERY, FIXTURE_BATTERY) + await callback(Module.CPU, FIXTURE_CPU) + await callback(Module.DISKS, FIXTURE_DISKS) + await callback(Module.DISPLAYS, FIXTURE_DISPLAYS) + await callback(Module.GPUS, FIXTURE_GPUS) + await callback(Module.MEDIA, FIXTURE_MEDIA) + await callback(Module.MEMORY, FIXTURE_MEMORY) + await callback(Module.PROCESSES, FIXTURE_PROCESSES) + await callback(Module.SYSTEM, FIXTURE_SYSTEM) diff --git a/tests/components/system_bridge/conftest.py b/tests/components/system_bridge/conftest.py new file mode 100644 index 00000000000..2f1f87485e7 --- /dev/null +++ b/tests/components/system_bridge/conftest.py @@ -0,0 +1,195 @@ +"""Fixtures for System Bridge integration tests.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Final +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from systembridgeconnector.const import EventKey, EventType +from systembridgemodels.fixtures.modules.battery import FIXTURE_BATTERY +from systembridgemodels.fixtures.modules.cpu import FIXTURE_CPU +from systembridgemodels.fixtures.modules.disks import FIXTURE_DISKS +from systembridgemodels.fixtures.modules.displays import FIXTURE_DISPLAYS +from systembridgemodels.fixtures.modules.gpus import FIXTURE_GPUS +from systembridgemodels.fixtures.modules.media import FIXTURE_MEDIA +from systembridgemodels.fixtures.modules.memory import FIXTURE_MEMORY +from systembridgemodels.fixtures.modules.networks import FIXTURE_NETWORKS +from systembridgemodels.fixtures.modules.processes import FIXTURE_PROCESSES +from systembridgemodels.fixtures.modules.sensors import FIXTURE_SENSORS +from systembridgemodels.fixtures.modules.system import FIXTURE_SYSTEM +from systembridgemodels.media_directories import MediaDirectory +from systembridgemodels.media_files import MediaFile, MediaFiles +from systembridgemodels.modules import Module, ModulesData, RegisterDataListener +from systembridgemodels.response import Response + +from homeassistant.components.system_bridge.config_flow import SystemBridgeConfigFlow +from homeassistant.components.system_bridge.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN +from homeassistant.core import HomeAssistant + +from . import ( + FIXTURE_REQUEST_ID, + FIXTURE_TITLE, + FIXTURE_USER_INPUT, + FIXTURE_UUID, + mock_data_listener, + setup_integration, +) + +from tests.common import MockConfigEntry + +REGISTER_MODULES: Final[list[Module]] = [ + Module.SYSTEM, +] + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock ConfigEntry.""" + return MockConfigEntry( + title=FIXTURE_TITLE, + domain=DOMAIN, + unique_id=FIXTURE_UUID, + version=SystemBridgeConfigFlow.VERSION, + minor_version=SystemBridgeConfigFlow.MINOR_VERSION, + data={ + CONF_HOST: FIXTURE_USER_INPUT[CONF_HOST], + CONF_PORT: FIXTURE_USER_INPUT[CONF_PORT], + CONF_TOKEN: FIXTURE_USER_INPUT[CONF_TOKEN], + }, + ) + + +@pytest.fixture(autouse=True) +def mock_setup_notify_platform() -> Generator[AsyncMock]: + """Mock notify platform setup.""" + with patch( + "homeassistant.helpers.discovery.async_load_platform", + ) as mock_setup_notify_platform: + yield mock_setup_notify_platform + + +@pytest.fixture +def mock_version() -> Generator[AsyncMock]: + """Return a mocked Version class.""" + with patch( + "homeassistant.components.system_bridge.Version", + autospec=True, + ) as mock_version: + version = mock_version.return_value + version.check_supported.return_value = True + + yield version + + +@pytest.fixture +def mock_websocket_client( + register_data_listener_model: RegisterDataListener = RegisterDataListener( + modules=REGISTER_MODULES, + ), +) -> Generator[MagicMock]: + """Return a mocked WebSocketClient client.""" + + with ( + patch( + "homeassistant.components.system_bridge.coordinator.WebSocketClient", + autospec=True, + ) as mock_websocket_client, + patch( + "homeassistant.components.system_bridge.config_flow.WebSocketClient", + new=mock_websocket_client, + ), + ): + websocket_client = mock_websocket_client.return_value + websocket_client.connected = False + websocket_client.get_data.return_value = ModulesData( + battery=FIXTURE_BATTERY, + cpu=FIXTURE_CPU, + disks=FIXTURE_DISKS, + displays=FIXTURE_DISPLAYS, + gpus=FIXTURE_GPUS, + media=FIXTURE_MEDIA, + memory=FIXTURE_MEMORY, + networks=FIXTURE_NETWORKS, + processes=FIXTURE_PROCESSES, + sensors=FIXTURE_SENSORS, + system=FIXTURE_SYSTEM, + ) + websocket_client.register_data_listener.return_value = Response( + id=FIXTURE_REQUEST_ID, + type=EventType.DATA_LISTENER_REGISTERED, + message="Data listener registered", + data={EventKey.MODULES: register_data_listener_model.modules}, + ) + # Trigger callback when listener is registered + websocket_client.listen.side_effect = mock_data_listener + + websocket_client.get_directories.return_value = [ + MediaDirectory( + key="documents", + path="/home/user/documents", + ) + ] + websocket_client.get_files.return_value = MediaFiles( + files=[ + MediaFile( + name="testsubdirectory", + path="testsubdirectory", + fullpath="/home/user/documents/testsubdirectory", + size=100, + last_accessed=1630000000, + created=1630000000, + modified=1630000000, + is_directory=True, + is_file=False, + is_link=False, + ), + MediaFile( + name="testfile.txt", + path="testfile.txt", + fullpath="/home/user/documents/testfile.txt", + size=100, + last_accessed=1630000000, + created=1630000000, + modified=1630000000, + is_directory=False, + is_file=True, + is_link=False, + mime_type="text/plain", + ), + MediaFile( + name="testfile.jpg", + path="testfile.jpg", + fullpath="/home/user/documents/testimage.jpg", + size=100, + last_accessed=1630000000, + created=1630000000, + modified=1630000000, + is_directory=False, + is_file=True, + is_link=False, + mime_type="image/jpeg", + ), + ], + path="", + ) + + yield websocket_client + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_version: MagicMock, + mock_websocket_client: MagicMock, +) -> MockConfigEntry: + """Initialize the System Bridge integration.""" + assert await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + return mock_config_entry diff --git a/tests/components/system_bridge/snapshots/test_media_source.ambr b/tests/components/system_bridge/snapshots/test_media_source.ambr new file mode 100644 index 00000000000..53e0e8416e9 --- /dev/null +++ b/tests/components/system_bridge/snapshots/test_media_source.ambr @@ -0,0 +1,61 @@ +# serializer version: 1 +# name: test_directory[system_bridge_media_source_directory] + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'TestSystem - documents', + }) +# --- +# name: test_entry[system_bridge_media_source_entry] + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'TestSystem', + }) +# --- +# name: test_file[system_bridge_media_source_file_image] + dict({ + 'mime_type': 'image/jpeg', + 'url': 'http://127.0.0.1:9170/api/media/file/data?token=abc-123-def-456-ghi&base=documents&path=testimage.jpg', + }) +# --- +# name: test_file[system_bridge_media_source_file_text] + dict({ + 'mime_type': 'text/plain', + 'url': 'http://127.0.0.1:9170/api/media/file/data?token=abc-123-def-456-ghi&base=documents&path=testfile.txt', + }) +# --- +# name: test_root[system_bridge_media_source_root] + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'System Bridge', + }) +# --- +# name: test_subdirectory[system_bridge_media_source_subdirectory] + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'TestSystem - documents/testsubdirectory', + }) +# --- diff --git a/tests/components/system_bridge/test_config_flow.py b/tests/components/system_bridge/test_config_flow.py index 16a6f5d0f56..727d93de893 100644 --- a/tests/components/system_bridge/test_config_flow.py +++ b/tests/components/system_bridge/test_config_flow.py @@ -69,7 +69,7 @@ async def test_user_flow(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "test-bridge" + assert result2["title"] == "127.0.0.1" assert result2["data"] == FIXTURE_USER_INPUT assert len(mock_setup_entry.mock_calls) == 1 @@ -441,7 +441,7 @@ async def test_zeroconf_flow(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "1.1.1.1" + assert result2["title"] == "127.0.0.1" assert result2["data"] == FIXTURE_ZEROCONF_INPUT assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/system_bridge/test_media_source.py b/tests/components/system_bridge/test_media_source.py new file mode 100644 index 00000000000..161d69569b6 --- /dev/null +++ b/tests/components/system_bridge/test_media_source.py @@ -0,0 +1,148 @@ +"""Test the System Bridge integration.""" + +import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import paths + +from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_source import ( + DOMAIN as MEDIA_SOURCE_DOMAIN, + URI_SCHEME, + async_browse_media, + async_resolve_media, +) +from homeassistant.components.system_bridge.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +async def setup_component(hass: HomeAssistant) -> None: + """Set up component.""" + assert await async_setup_component( + hass, + MEDIA_SOURCE_DOMAIN, + {}, + ) + + +async def test_root( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test root media browsing.""" + browse_media_root = await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}", + ) + + assert browse_media_root.as_dict() == snapshot( + name=f"{DOMAIN}_media_source_root", + exclude=paths("children", "media_content_id"), + ) + + +async def test_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test browsing entry.""" + browse_media_entry = await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}", + ) + + assert browse_media_entry.as_dict() == snapshot( + name=f"{DOMAIN}_media_source_entry", + exclude=paths("children", "media_content_id"), + ) + + +async def test_directory( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test browsing directory.""" + browse_media_directory = await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents", + ) + + assert browse_media_directory.as_dict() == snapshot( + name=f"{DOMAIN}_media_source_directory", + exclude=paths("children", "media_content_id"), + ) + + +async def test_subdirectory( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test browsing directory.""" + browse_media_directory = await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents/testsubdirectory", + ) + + assert browse_media_directory.as_dict() == snapshot( + name=f"{DOMAIN}_media_source_subdirectory", + exclude=paths("children", "media_content_id"), + ) + + +async def test_file( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test browsing file.""" + resolve_media_file = await async_resolve_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents/testfile.txt~~text/plain", + None, + ) + + assert resolve_media_file == snapshot( + name=f"{DOMAIN}_media_source_file_text", + ) + + resolve_media_file = await async_resolve_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents/testimage.jpg~~image/jpeg", + None, + ) + + assert resolve_media_file == snapshot( + name=f"{DOMAIN}_media_source_file_image", + ) + + +async def test_bad_entry( + hass: HomeAssistant, + init_integration: MockConfigEntry, +) -> None: + """Test invalid entry raises BrowseError.""" + with pytest.raises(BrowseError): + await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/badentryid", + ) + + with pytest.raises(BrowseError): + await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/badentryid~~baddirectory", + ) + + with pytest.raises(ValueError): + await async_resolve_media( + hass, + f"{URI_SCHEME}{DOMAIN}/badentryid~~baddirectory/badfile.txt~~text/plain", + None, + ) diff --git a/tests/components/system_health/test_init.py b/tests/components/system_health/test_init.py index e51ab8fab99..b93dccffb92 100644 --- a/tests/components/system_health/test_init.py +++ b/tests/components/system_health/test_init.py @@ -110,7 +110,7 @@ async def test_info_endpoint_register_callback_exc( """Test that the info endpoint requires auth.""" async def mock_info(hass): - raise Exception("TEST ERROR") # pylint: disable=broad-exception-raised + raise Exception("TEST ERROR") # noqa: TRY002 async_register_info(hass, "lovelace", mock_info) assert await async_setup_component(hass, "system_health", {}) diff --git a/tests/components/system_log/test_init.py b/tests/components/system_log/test_init.py index 918d995fab9..83adab8200b 100644 --- a/tests/components/system_log/test_init.py +++ b/tests/components/system_log/test_init.py @@ -10,10 +10,10 @@ import traceback from typing import Any from unittest.mock import MagicMock, patch -from homeassistant.bootstrap import async_setup_component from homeassistant.components import system_log from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.typing import ConfigType +from homeassistant.setup import async_setup_component from tests.common import async_capture_events from tests.typing import WebSocketGenerator @@ -36,7 +36,7 @@ async def get_error_log(hass_ws_client): def _generate_and_log_exception(exception, log): try: - raise Exception(exception) # pylint: disable=broad-exception-raised + raise Exception(exception) # noqa: TRY002, TRY301 except Exception: _LOGGER.exception(log) @@ -461,7 +461,7 @@ async def test__figure_out_source(hass: HomeAssistant) -> None: in a test because the test is not a component. """ try: - raise ValueError("test") + raise ValueError("test") # noqa: TRY301 except ValueError as ex: exc_info = (type(ex), ex, ex.__traceback__) mock_record = MagicMock( @@ -486,7 +486,7 @@ async def test__figure_out_source(hass: HomeAssistant) -> None: async def test_formatting_exception(hass: HomeAssistant) -> None: """Test that exceptions are formatted correctly.""" try: - raise ValueError("test") + raise ValueError("test") # noqa: TRY301 except ValueError as ex: exc_info = (type(ex), ex, ex.__traceback__) mock_record = MagicMock( diff --git a/tests/components/systemmonitor/conftest.py b/tests/components/systemmonitor/conftest.py index e16debdf263..5f0a7a5c76d 100644 --- a/tests/components/systemmonitor/conftest.py +++ b/tests/components/systemmonitor/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator import socket from unittest.mock import AsyncMock, Mock, NonCallableMock, patch from psutil import NoSuchProcess, Process from psutil._common import sdiskpart, sdiskusage, shwtemp, snetio, snicaddr, sswap import pytest -from typing_extensions import Generator from homeassistant.components.systemmonitor.const import DOMAIN from homeassistant.components.systemmonitor.coordinator import VirtualMemory @@ -174,11 +174,11 @@ def mock_psutil(mock_process: list[MockProcess]) -> Generator: "cpu0-thermal": [shwtemp("cpu0-thermal", 50.0, 60.0, 70.0)] } mock_psutil.disk_partitions.return_value = [ - sdiskpart("test", "/", "ext4", "", 1, 1), - sdiskpart("test2", "/media/share", "ext4", "", 1, 1), - sdiskpart("test3", "/incorrect", "", "", 1, 1), - sdiskpart("hosts", "/etc/hosts", "bind", "", 1, 1), - sdiskpart("proc", "/proc/run", "proc", "", 1, 1), + sdiskpart("test", "/", "ext4", ""), + sdiskpart("test2", "/media/share", "ext4", ""), + sdiskpart("test3", "/incorrect", "", ""), + sdiskpart("hosts", "/etc/hosts", "bind", ""), + sdiskpart("proc", "/proc/run", "proc", ""), ] mock_psutil.boot_time.return_value = 1708786800.0 mock_psutil.NoSuchProcess = NoSuchProcess diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index b50e051c816..328065f6098 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -35,7 +35,7 @@ }), 'disabled_by': None, 'domain': 'systemmonitor', - 'minor_version': 2, + 'minor_version': 3, 'options': dict({ 'binary_sensor': dict({ 'process': list([ diff --git a/tests/components/systemmonitor/snapshots/test_sensor.ambr b/tests/components/systemmonitor/snapshots/test_sensor.ambr index 3fe9ae7e809..1ee9067a528 100644 --- a/tests/components/systemmonitor/snapshots/test_sensor.ambr +++ b/tests/components/systemmonitor/snapshots/test_sensor.ambr @@ -300,24 +300,6 @@ # name: test_sensor[System Monitor Packets out eth1 - state] '150' # --- -# name: test_sensor[System Monitor Process pip - attributes] - ReadOnlyDict({ - 'friendly_name': 'System Monitor Process pip', - 'icon': 'mdi:cpu-64-bit', - }) -# --- -# name: test_sensor[System Monitor Process pip - state] - 'on' -# --- -# name: test_sensor[System Monitor Process python3 - attributes] - ReadOnlyDict({ - 'friendly_name': 'System Monitor Process python3', - 'icon': 'mdi:cpu-64-bit', - }) -# --- -# name: test_sensor[System Monitor Process python3 - state] - 'on' -# --- # name: test_sensor[System Monitor Processor temperature - attributes] ReadOnlyDict({ 'device_class': 'temperature', diff --git a/tests/components/systemmonitor/test_diagnostics.py b/tests/components/systemmonitor/test_diagnostics.py index 78128aad5f4..b0f4fca3d0c 100644 --- a/tests/components/systemmonitor/test_diagnostics.py +++ b/tests/components/systemmonitor/test_diagnostics.py @@ -23,4 +23,4 @@ async def test_diagnostics( """Test diagnostics.""" assert await get_diagnostics_for_config_entry( hass, hass_client, mock_added_config_entry - ) == snapshot(exclude=props("last_update", "entry_id")) + ) == snapshot(exclude=props("last_update", "entry_id", "created_at", "modified_at")) diff --git a/tests/components/systemmonitor/test_init.py b/tests/components/systemmonitor/test_init.py index 97f4a41b96c..6c1e4e6316c 100644 --- a/tests/components/systemmonitor/test_init.py +++ b/tests/components/systemmonitor/test_init.py @@ -95,9 +95,49 @@ async def test_migrate_process_sensor_to_binary_sensors( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON process_sensor = hass.states.get("binary_sensor.system_monitor_process_python3") assert process_sensor is not None assert process_sensor.state == STATE_ON + + assert mock_config_entry.minor_version == 3 + assert mock_config_entry.options == { + "binary_sensor": {"process": ["python3", "pip"]}, + "resources": [ + "disk_use_percent_/", + "disk_use_percent_/home/notexist/", + "memory_free_", + "network_out_eth0", + "process_python3", + ], + } + + +async def test_migration_from_future_version( + hass: HomeAssistant, + mock_psutil: Mock, + mock_os: Mock, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test migration from future version.""" + mock_config_entry = MockConfigEntry( + title="System Monitor", + domain=DOMAIN, + version=2, + data={}, + options={ + "sensor": {"process": ["python3", "pip"]}, + "resources": [ + "disk_use_percent_/", + "disk_use_percent_/home/notexist/", + "memory_free_", + "network_out_eth0", + "process_python3", + ], + }, + ) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/systemmonitor/test_repairs.py b/tests/components/systemmonitor/test_repairs.py deleted file mode 100644 index 6c1ff9dfd16..00000000000 --- a/tests/components/systemmonitor/test_repairs.py +++ /dev/null @@ -1,199 +0,0 @@ -"""Test repairs for System Monitor.""" - -from __future__ import annotations - -from http import HTTPStatus -from unittest.mock import Mock - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) -from homeassistant.components.systemmonitor.const import DOMAIN -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import entity_registry as er, issue_registry as ir -from homeassistant.setup import async_setup_component - -from tests.common import ANY, MockConfigEntry -from tests.typing import ClientSessionGenerator, WebSocketGenerator - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_migrate_process_sensor( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_psutil: Mock, - mock_os: Mock, - hass_client: ClientSessionGenerator, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test migrating process sensor to binary sensor.""" - mock_config_entry = MockConfigEntry( - title="System Monitor", - domain=DOMAIN, - data={}, - options={ - "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, - "resources": [ - "disk_use_percent_/", - "disk_use_percent_/home/notexist/", - "memory_free_", - "network_out_eth0", - "process_python3", - ], - }, - ) - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert hass.config_entries.async_entries(DOMAIN) == snapshot( - name="before_migration" - ) - - assert await async_setup_component(hass, "repairs", {}) - await hass.async_block_till_done() - - entity = "sensor.system_monitor_process_python3" - state = hass.states.get(entity) - assert state - - assert entity_registry.async_get(entity) - - ws_client = await hass_ws_client(hass) - client = await hass_client() - - await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - assert len(msg["result"]["issues"]) > 0 - issue = None - for i in msg["result"]["issues"]: - if i["issue_id"] == "process_sensor": - issue = i - assert issue is not None - - url = RepairsFlowIndexView.url - resp = await client.post( - url, json={"handler": DOMAIN, "issue_id": "process_sensor"} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data["step_id"] == "migrate_process_sensor" - - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={}) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - # Cannot use identity `is` check here as the value is parsed from JSON - assert data["type"] == FlowResultType.CREATE_ENTRY.value - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.system_monitor_process_python3") - assert state - - await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - issue = None - for i in msg["result"]["issues"]: - if i["issue_id"] == "migrate_process_sensor": - issue = i - assert not issue - - entity = "sensor.system_monitor_process_python3" - state = hass.states.get(entity) - assert not state - - assert not entity_registry.async_get(entity) - - assert hass.config_entries.async_entries(DOMAIN) == snapshot(name="after_migration") - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_other_fixable_issues( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_ws_client: WebSocketGenerator, - mock_added_config_entry: ConfigEntry, -) -> None: - """Test fixing other issues.""" - assert await async_setup_component(hass, "repairs", {}) - await hass.async_block_till_done() - - ws_client = await hass_ws_client(hass) - client = await hass_client() - - await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - - issue = { - "breaks_in_ha_version": "2022.9.0dev0", - "domain": DOMAIN, - "issue_id": "issue_1", - "is_fixable": True, - "learn_more_url": "", - "severity": "error", - "translation_key": "issue_1", - } - ir.async_create_issue( - hass, - issue["domain"], - issue["issue_id"], - breaks_in_ha_version=issue["breaks_in_ha_version"], - is_fixable=issue["is_fixable"], - is_persistent=False, - learn_more_url=None, - severity=issue["severity"], - translation_key=issue["translation_key"], - ) - - await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - results = msg["result"]["issues"] - assert { - "breaks_in_ha_version": "2022.9.0dev0", - "created": ANY, - "dismissed_version": None, - "domain": DOMAIN, - "is_fixable": True, - "issue_domain": None, - "issue_id": "issue_1", - "learn_more_url": None, - "severity": "error", - "translation_key": "issue_1", - "translation_placeholders": None, - "ignored": False, - } in results - - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "issue_1"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - # Cannot use identity `is` check here as the value is parsed from JSON - assert data["type"] == FlowResultType.CREATE_ENTRY.value - await hass.async_block_till_done() diff --git a/tests/components/systemmonitor/test_sensor.py b/tests/components/systemmonitor/test_sensor.py index ce15083da67..6d22c5354a4 100644 --- a/tests/components/systemmonitor/test_sensor.py +++ b/tests/components/systemmonitor/test_sensor.py @@ -14,12 +14,10 @@ from homeassistant.components.systemmonitor.const import DOMAIN from homeassistant.components.systemmonitor.coordinator import VirtualMemory from homeassistant.components.systemmonitor.sensor import get_cpu_icon from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockProcess - from tests.common import MockConfigEntry, async_fire_time_changed @@ -38,7 +36,6 @@ async def test_sensor( data={}, options={ "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, "resources": [ "disk_use_percent_/", "disk_use_percent_/home/notexist/", @@ -62,10 +59,6 @@ async def test_sensor( "friendly_name": "System Monitor Memory free", } - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON - for entity in er.async_entries_for_config_entry( entity_registry, mock_config_entry.entry_id ): @@ -154,7 +147,6 @@ async def test_sensor_updating( data={}, options={ "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, "resources": [ "disk_use_percent_/", "disk_use_percent_/home/notexist/", @@ -172,10 +164,6 @@ async def test_sensor_updating( assert memory_sensor is not None assert memory_sensor.state == "40.0" - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON - mock_psutil.virtual_memory.side_effect = Exception("Failed to update") freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) @@ -202,53 +190,6 @@ async def test_sensor_updating( assert memory_sensor.state == "25.0" -async def test_sensor_process_fails( - hass: HomeAssistant, - mock_psutil: Mock, - mock_os: Mock, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test process not exist failure.""" - mock_config_entry = MockConfigEntry( - title="System Monitor", - domain=DOMAIN, - data={}, - options={ - "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, - "resources": [ - "disk_use_percent_/", - "disk_use_percent_/home/notexist/", - "memory_free_", - "network_out_eth0", - "process_python3", - ], - }, - ) - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON - - _process = MockProcess("python3", True) - - mock_psutil.process_iter.return_value = [_process] - - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_OFF - - assert "Failed to load process with ID: 1, old name: python3" in caplog.text - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_network_sensors( freezer: FrozenDateTimeFactory, diff --git a/tests/components/systemmonitor/test_util.py b/tests/components/systemmonitor/test_util.py index b35c7b2e96c..582707f3574 100644 --- a/tests/components/systemmonitor/test_util.py +++ b/tests/components/systemmonitor/test_util.py @@ -50,21 +50,19 @@ async def test_disk_util( """Test the disk failures.""" mock_psutil.psutil.disk_partitions.return_value = [ - sdiskpart("test", "/", "ext4", "", 1, 1), # Should be ok - sdiskpart("test2", "/media/share", "ext4", "", 1, 1), # Should be ok - sdiskpart("test3", "/incorrect", "", "", 1, 1), # Should be skipped as no type + sdiskpart("test", "/", "ext4", ""), # Should be ok + sdiskpart("test2", "/media/share", "ext4", ""), # Should be ok + sdiskpart("test3", "/incorrect", "", ""), # Should be skipped as no type sdiskpart( - "proc", "/proc/run", "proc", "", 1, 1 + "proc", "/proc/run", "proc", "" ), # Should be skipped as in skipped disk types sdiskpart( "test4", "/tmpfs/", # noqa: S108 "tmpfs", "", - 1, - 1, ), # Should be skipped as in skipped disk types - sdiskpart("test5", "E:", "cd", "cdrom", 1, 1), # Should be skipped as cdrom + sdiskpart("test5", "E:", "cd", "cdrom"), # Should be skipped as cdrom ] mock_config_entry.add_to_hass(hass) diff --git a/tests/components/tado/fixtures/smartac4.with_fanlevel.json b/tests/components/tado/fixtures/smartac4.with_fanlevel.json new file mode 100644 index 00000000000..ea1f9cbd8e5 --- /dev/null +++ b/tests/components/tado/fixtures/smartac4.with_fanlevel.json @@ -0,0 +1,88 @@ +{ + "tadoMode": "HOME", + "geolocationOverride": false, + "geolocationOverrideDisableTime": null, + "preparation": null, + "setting": { + "type": "AIR_CONDITIONING", + "power": "ON", + "mode": "HEAT", + "temperature": { + "celsius": 25.0, + "fahrenheit": 77.0 + }, + "fanLevel": "LEVEL3", + "verticalSwing": "ON", + "horizontalSwing": "ON" + }, + "overlayType": "MANUAL", + "overlay": { + "type": "MANUAL", + "setting": { + "type": "AIR_CONDITIONING", + "power": "ON", + "mode": "HEAT", + "temperature": { + "celsius": 25.0, + "fahrenheit": 77.0 + }, + "fanLevel": "LEVEL3", + "verticalSwing": "ON" + }, + "termination": { + "type": "MANUAL", + "typeSkillBasedApp": "MANUAL", + "projectedExpiry": null + } + }, + "openWindow": null, + "nextScheduleChange": { + "start": "2024-07-01T05: 45: 00Z", + "setting": { + "type": "AIR_CONDITIONING", + "power": "ON", + "mode": "HEAT", + "temperature": { + "celsius": 24.0, + "fahrenheit": 75.2 + }, + "fanLevel": "LEVEL3", + "verticalSwing": "ON", + "horizontalSwing": "ON" + } + }, + "nextTimeBlock": { + "start": "2024-07-01T05: 45: 00.000Z" + }, + "link": { + "state": "ONLINE" + }, + "runningOfflineSchedule": false, + "activityDataPoints": { + "acPower": { + "timestamp": "2022-07-13T18: 06: 58.183Z", + "type": "POWER", + "value": "ON" + } + }, + "sensorDataPoints": { + "insideTemperature": { + "celsius": 24.3, + "fahrenheit": 75.74, + "timestamp": "2024-06-28T22: 23: 15.679Z", + "type": "TEMPERATURE", + "precision": { + "celsius": 0.1, + "fahrenheit": 0.1 + } + }, + "humidity": { + "type": "PERCENTAGE", + "percentage": 70.9, + "timestamp": "2024-06-28T22: 23: 15.679Z" + } + }, + "terminationCondition": { + "type": "MANUAL" + } +} diff --git a/tests/components/tado/fixtures/zone_states.json b/tests/components/tado/fixtures/zone_states.json index 64d457f3b50..df1a99a80f3 100644 --- a/tests/components/tado/fixtures/zone_states.json +++ b/tests/components/tado/fixtures/zone_states.json @@ -287,6 +287,79 @@ "timestamp": "2020-03-28T02:09:27.830Z" } } + }, + "6": { + "tadoMode": "HOME", + "geolocationOverride": false, + "geolocationOverrideDisableTime": null, + "preparation": null, + "setting": { + "type": "AIR_CONDITIONING", + "power": "OFF" + }, + "overlayType": "MANUAL", + "overlay": { + "type": "MANUAL", + "setting": { + "type": "AIR_CONDITIONING", + "power": "OFF" + }, + "termination": { + "type": "MANUAL", + "typeSkillBasedApp": "MANUAL", + "projectedExpiry": null + } + }, + "openWindow": null, + "nextScheduleChange": { + "start": "2024-07-01T05: 45: 00Z", + "setting": { + "type": "AIR_CONDITIONING", + "power": "ON", + "mode": "HEAT", + "temperature": { + "celsius": 24.0, + "fahrenheit": 75.2 + }, + "fanLevel": "LEVEL3", + "verticalSwing": "ON", + "horizontalSwing": "ON" + } + }, + "nextTimeBlock": { + "start": "2024-07-01T05: 45: 00.000Z" + }, + "link": { + "state": "ONLINE" + }, + "runningOfflineSchedule": false, + "activityDataPoints": { + "acPower": { + "timestamp": "2022-07-13T18: 06: 58.183Z", + "type": "POWER", + "value": "OFF" + } + }, + "sensorDataPoints": { + "insideTemperature": { + "celsius": 24.21, + "fahrenheit": 75.58, + "timestamp": "2024-06-28T21: 43: 51.067Z", + "type": "TEMPERATURE", + "precision": { + "celsius": 0.1, + "fahrenheit": 0.1 + } + }, + "humidity": { + "type": "PERCENTAGE", + "percentage": 71.4, + "timestamp": "2024-06-28T21: 43: 51.067Z" + } + }, + "terminationCondition": { + "type": "MANUAL" + } } } } diff --git a/tests/components/tado/fixtures/zone_with_fanlevel_horizontal_vertical_swing.json b/tests/components/tado/fixtures/zone_with_fanlevel_horizontal_vertical_swing.json new file mode 100644 index 00000000000..51ba70b4065 --- /dev/null +++ b/tests/components/tado/fixtures/zone_with_fanlevel_horizontal_vertical_swing.json @@ -0,0 +1,130 @@ +{ + "type": "AIR_CONDITIONING", + "COOL": { + "temperatures": { + "celsius": { + "min": 16, + "max": 31, + "step": 1.0 + }, + "fahrenheit": { + "min": 61, + "max": 88, + "step": 1.0 + } + }, + "fanLevel": ["LEVEL3", "LEVEL2", "AUTO", "LEVEL1", "LEVEL4", "LEVEL5"], + "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], + "horizontalSwing": ["OFF", "ON"], + "light": ["ON", "OFF"] + }, + "FAN": { + "temperatures": { + "celsius": { + "min": 16, + "max": 31, + "step": 1.0 + }, + "fahrenheit": { + "min": 61, + "max": 88, + "step": 1.0 + } + }, + "fanLevel": ["LEVEL3", "LEVEL2", "AUTO", "LEVEL1", "LEVEL4", "LEVEL5"], + "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], + "horizontalSwing": ["OFF", "ON"], + "light": ["ON", "OFF"] + }, + "AUTO": { + "fanLevel": ["LEVEL3", "LEVEL2", "AUTO", "LEVEL1", "LEVEL4", "LEVEL5"], + "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], + "horizontalSwing": ["OFF", "ON"], + "light": ["ON", "OFF"] + }, + "HEAT": { + "temperatures": { + "celsius": { + "min": 16, + "max": 31, + "step": 1.0 + }, + "fahrenheit": { + "min": 61, + "max": 88, + "step": 1.0 + } + }, + "fanLevel": ["LEVEL3", "LEVEL2", "AUTO", "LEVEL1", "LEVEL4", "LEVEL5"], + "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], + "horizontalSwing": ["OFF", "ON"], + "light": ["ON", "OFF"] + }, + "DRY": { + "temperatures": { + "celsius": { + "min": 16, + "max": 31, + "step": 1.0 + }, + "fahrenheit": { + "min": 61, + "max": 88, + "step": 1.0 + } + }, + "verticalSwing": ["MID_UP", "MID_DOWN", "ON", "OFF", "UP", "MID", "DOWN"], + "horizontalSwing": ["OFF", "ON"], + "light": ["ON", "OFF"] + }, + "initialStates": { + "mode": "COOL", + "modes": { + "COOL": { + "temperature": { + "celsius": 24, + "fahrenheit": 75 + }, + "fanLevel": "LEVEL3", + "verticalSwing": "OFF", + "horizontalSwing": "OFF", + "light": "ON" + }, + "HEAT": { + "temperature": { + "celsius": 24, + "fahrenheit": 75 + }, + "fanLevel": "LEVEL3", + "verticalSwing": "OFF", + "horizontalSwing": "OFF", + "light": "ON" + }, + "DRY": { + "temperature": { + "celsius": 24, + "fahrenheit": 75 + }, + "verticalSwing": "OFF", + "horizontalSwing": "OFF", + "light": "ON" + }, + "FAN": { + "temperature": { + "celsius": 24, + "fahrenheit": 75 + }, + "fanLevel": "LEVEL3", + "verticalSwing": "OFF", + "horizontalSwing": "OFF", + "light": "ON" + }, + "AUTO": { + "fanLevel": "LEVEL3", + "verticalSwing": "OFF", + "horizontalSwing": "OFF", + "light": "ON" + } + } + } +} diff --git a/tests/components/tado/fixtures/zones.json b/tests/components/tado/fixtures/zones.json index 5ef7374a660..e1d2ec759ba 100644 --- a/tests/components/tado/fixtures/zones.json +++ b/tests/components/tado/fixtures/zones.json @@ -178,5 +178,45 @@ "deviceTypes": ["WR02"], "reportAvailable": false, "type": "AIR_CONDITIONING" + }, + { + "id": 6, + "name": "Air Conditioning with fanlevel", + "type": "AIR_CONDITIONING", + "dateCreated": "2022-07-13T18: 06: 58.183Z", + "deviceTypes": ["WR02"], + "devices": [ + { + "deviceType": "WR02", + "serialNo": "WR5", + "shortSerialNo": "WR5", + "currentFwVersion": "118.7", + "connectionState": { + "value": true, + "timestamp": "2024-06-28T21: 04: 23.463Z" + }, + "characteristics": { + "capabilities": ["INSIDE_TEMPERATURE_MEASUREMENT", "IDENTIFY"] + }, + "accessPointWiFi": { + "ssid": "tado8480" + }, + "commandTableUploadState": "FINISHED", + "duties": ["ZONE_UI", "ZONE_DRIVER", "ZONE_LEADER"] + } + ], + "reportAvailable": false, + "showScheduleSetup": false, + "supportsDazzle": true, + "dazzleEnabled": true, + "dazzleMode": { + "supported": true, + "enabled": true + }, + "openWindowDetection": { + "supported": true, + "enabled": true, + "timeoutInSeconds": 900 + } } ] diff --git a/tests/components/tado/test_climate.py b/tests/components/tado/test_climate.py index 98fd2d753a4..5a43c728b6e 100644 --- a/tests/components/tado/test_climate.py +++ b/tests/components/tado/test_climate.py @@ -89,3 +89,35 @@ async def test_smartac_with_swing(hass: HomeAssistant) -> None: # Only test for a subset of attributes in case # HA changes the implementation and a new one appears assert all(item in state.attributes.items() for item in expected_attributes.items()) + + +async def test_smartac_with_fanlevel_vertical_and_horizontal_swing( + hass: HomeAssistant, +) -> None: + """Test creation of smart ac with swing climate.""" + + await async_init_integration(hass) + + state = hass.states.get("climate.air_conditioning_with_fanlevel") + assert state.state == "heat" + + expected_attributes = { + "current_humidity": 70.9, + "current_temperature": 24.3, + "fan_mode": "high", + "fan_modes": ["high", "medium", "auto", "low"], + "friendly_name": "Air Conditioning with fanlevel", + "hvac_action": "heating", + "hvac_modes": ["off", "auto", "heat", "cool", "heat_cool", "dry", "fan_only"], + "max_temp": 31.0, + "min_temp": 16.0, + "preset_mode": "auto", + "preset_modes": ["away", "home", "auto"], + "swing_modes": ["vertical", "horizontal", "both", "off"], + "supported_features": 441, + "target_temp_step": 1.0, + "temperature": 25.0, + } + # Only test for a subset of attributes in case + # HA changes the implementation and a new one appears + assert all(item in state.attributes.items() for item in expected_attributes.items()) diff --git a/tests/components/tado/test_config_flow.py b/tests/components/tado/test_config_flow.py index a8883f47fe2..4f5f4180fb5 100644 --- a/tests/components/tado/test_config_flow.py +++ b/tests/components/tado/test_config_flow.py @@ -271,147 +271,6 @@ async def test_form_homekit(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT -async def test_import_step(hass: HomeAssistant) -> None: - """Test import step.""" - mock_tado_api = _get_mock_tado_api(getMe={"homes": [{"id": 1, "name": "myhome"}]}) - - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - "username": "test-username", - "password": "test-password", - "home_id": "1", - } - assert mock_setup_entry.call_count == 1 - - -async def test_import_step_existing_entry(hass: HomeAssistant) -> None: - """Test import step with existing entry.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - ) - entry.add_to_hass(hass) - - with patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert mock_setup_entry.call_count == 0 - - -async def test_import_step_validation_failed(hass: HomeAssistant) -> None: - """Test import step with validation failed.""" - with patch( - "homeassistant.components.tado.config_flow.Tado", - side_effect=RuntimeError, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "import_failed" - - -async def test_import_step_device_authentication_failed(hass: HomeAssistant) -> None: - """Test import step with device tracker authentication failed.""" - with patch( - "homeassistant.components.tado.config_flow.Tado", - side_effect=PyTado.exceptions.TadoWrongCredentialsException, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "import_failed_invalid_auth" - - -async def test_import_step_unique_id_configured(hass: HomeAssistant) -> None: - """Test import step with unique ID already configured.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - unique_id="unique_id", - ) - entry.add_to_hass(hass) - - with patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert mock_setup_entry.call_count == 0 - - @pytest.mark.parametrize( ("exception", "error"), [ diff --git a/tests/components/tado/util.py b/tests/components/tado/util.py index dd7c108c984..de4fd515e5a 100644 --- a/tests/components/tado/util.py +++ b/tests/components/tado/util.py @@ -27,6 +27,12 @@ async def async_init_integration( # WR1 Device device_wr1_fixture = "tado/device_wr1.json" + # Smart AC with fanLevel, Vertical and Horizontal swings + zone_6_state_fixture = "tado/smartac4.with_fanlevel.json" + zone_6_capabilities_fixture = ( + "tado/zone_with_fanlevel_horizontal_vertical_swing.json" + ) + # Smart AC with Swing zone_5_state_fixture = "tado/smartac3.with_swing.json" zone_5_capabilities_fixture = "tado/zone_with_swing_capabilities.json" @@ -95,6 +101,10 @@ async def async_init_integration( "https://my.tado.com/api/v2/homes/1/zoneStates", text=load_fixture(zone_states_fixture), ) + m.get( + "https://my.tado.com/api/v2/homes/1/zones/6/capabilities", + text=load_fixture(zone_6_capabilities_fixture), + ) m.get( "https://my.tado.com/api/v2/homes/1/zones/5/capabilities", text=load_fixture(zone_5_capabilities_fixture), @@ -135,6 +145,14 @@ async def async_init_integration( "https://my.tado.com/api/v2/homes/1/zones/5/defaultOverlay", text=load_fixture(zone_def_overlay), ) + m.get( + "https://my.tado.com/api/v2/homes/1/zones/6/defaultOverlay", + text=load_fixture(zone_def_overlay), + ) + m.get( + "https://my.tado.com/api/v2/homes/1/zones/6/state", + text=load_fixture(zone_6_state_fixture), + ) m.get( "https://my.tado.com/api/v2/homes/1/zones/5/state", text=load_fixture(zone_5_state_fixture), diff --git a/tests/components/tag/test_trigger.py b/tests/components/tag/test_trigger.py index 60d45abb7b9..5c7e515d322 100644 --- a/tests/components/tag/test_trigger.py +++ b/tests/components/tag/test_trigger.py @@ -11,8 +11,6 @@ from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_mock_service - @pytest.fixture(autouse=True, name="stub_blueprint_populate") def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @@ -39,14 +37,8 @@ def tag_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_triggers( - hass: HomeAssistant, tag_setup, calls: list[ServiceCall] + hass: HomeAssistant, tag_setup, service_calls: list[ServiceCall] ) -> None: """Test tag triggers.""" assert await tag_setup() @@ -75,9 +67,9 @@ async def test_triggers( await async_scan_tag(hass, "abc123", None) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["message"] == "service called" - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["message"] == "service called" + assert service_calls[0].data["id"] == 0 await hass.services.async_call( automation.DOMAIN, @@ -85,15 +77,16 @@ async def test_triggers( {ATTR_ENTITY_ID: "automation.test"}, blocking=True, ) + assert len(service_calls) == 2 await async_scan_tag(hass, "abc123", None) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_exception_bad_trigger( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test for exception on event triggers firing.""" @@ -117,7 +110,7 @@ async def test_exception_bad_trigger( async def test_multiple_tags_and_devices_trigger( - hass: HomeAssistant, tag_setup, calls: list[ServiceCall] + hass: HomeAssistant, tag_setup, service_calls: list[ServiceCall] ) -> None: """Test multiple tags and devices triggers.""" assert await tag_setup() @@ -158,8 +151,8 @@ async def test_multiple_tags_and_devices_trigger( await async_scan_tag(hass, "def456", device_id="jkl0123") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[0].data["message"] == "service called" - assert calls[1].data["message"] == "service called" - assert calls[2].data["message"] == "service called" - assert calls[3].data["message"] == "service called" + assert len(service_calls) == 4 + assert service_calls[0].data["message"] == "service called" + assert service_calls[1].data["message"] == "service called" + assert service_calls[2].data["message"] == "service called" + assert service_calls[3].data["message"] == "service called" diff --git a/tests/components/tailscale/conftest.py b/tests/components/tailscale/conftest.py index cb7419daf89..5514678f530 100644 --- a/tests/components/tailscale/conftest.py +++ b/tests/components/tailscale/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest from tailscale.models import Devices -from typing_extensions import Generator from homeassistant.components.tailscale.const import CONF_TAILNET, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/tailwind/conftest.py b/tests/components/tailwind/conftest.py index f23463548bc..ea87c120308 100644 --- a/tests/components/tailwind/conftest.py +++ b/tests/components/tailwind/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from unittest.mock import AsyncMock, MagicMock, patch +from collections.abc import Generator +from unittest.mock import MagicMock, patch from gotailwind import TailwindDeviceStatus import pytest -from typing_extensions import Generator from homeassistant.components.tailwind.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_TOKEN @@ -36,7 +36,7 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[None]: """Mock setting up a config entry.""" with patch( "homeassistant.components.tailwind.async_setup_entry", return_value=True diff --git a/tests/components/tailwind/snapshots/test_binary_sensor.ambr b/tests/components/tailwind/snapshots/test_binary_sensor.ambr index ea2a539363d..064b391c43a 100644 --- a/tests/components/tailwind/snapshots/test_binary_sensor.ambr +++ b/tests/components/tailwind/snapshots/test_binary_sensor.ambr @@ -68,8 +68,10 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Door 1', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '10.10', @@ -145,8 +147,10 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Door 2', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '10.10', diff --git a/tests/components/tailwind/snapshots/test_button.ambr b/tests/components/tailwind/snapshots/test_button.ambr index 560d3fe692c..17b656ec5fd 100644 --- a/tests/components/tailwind/snapshots/test_button.ambr +++ b/tests/components/tailwind/snapshots/test_button.ambr @@ -72,8 +72,10 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Tailwind iQ3', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '10.10', diff --git a/tests/components/tailwind/snapshots/test_cover.ambr b/tests/components/tailwind/snapshots/test_cover.ambr index 0ecd172b2ca..b69bd9e6410 100644 --- a/tests/components/tailwind/snapshots/test_cover.ambr +++ b/tests/components/tailwind/snapshots/test_cover.ambr @@ -69,8 +69,10 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Door 1', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '10.10', @@ -147,8 +149,10 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Door 2', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '10.10', diff --git a/tests/components/tailwind/snapshots/test_number.ambr b/tests/components/tailwind/snapshots/test_number.ambr index cbd61d31a6c..3e2e0577ad5 100644 --- a/tests/components/tailwind/snapshots/test_number.ambr +++ b/tests/components/tailwind/snapshots/test_number.ambr @@ -81,8 +81,10 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Tailwind iQ3', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': '10.10', diff --git a/tests/components/tami4/conftest.py b/tests/components/tami4/conftest.py index 84b96c04735..2b4acac0b3f 100644 --- a/tests/components/tami4/conftest.py +++ b/tests/components/tami4/conftest.py @@ -1,5 +1,6 @@ """Common fixutres with default mocks as well as common test helper methods.""" +from collections.abc import Generator from datetime import datetime from unittest.mock import AsyncMock, MagicMock, patch @@ -7,7 +8,6 @@ import pytest from Tami4EdgeAPI.device import Device from Tami4EdgeAPI.device_metadata import DeviceMetadata from Tami4EdgeAPI.water_quality import UV, Filter, WaterQuality -from typing_extensions import Generator from homeassistant.components.tami4.const import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.core import HomeAssistant @@ -60,6 +60,31 @@ def mock__get_devices_metadata(request: pytest.FixtureRequest) -> Generator[None yield +@pytest.fixture +def mock__get_devices_metadata_no_name( + request: pytest.FixtureRequest, +) -> Generator[None]: + """Fixture to mock _get_devices which makes a call to the API.""" + + side_effect = getattr(request, "param", None) + + device_metadata = DeviceMetadata( + id=1, + name=None, + connected=True, + psn="psn", + type="type", + device_firmware="v1.1", + ) + + with patch( + "Tami4EdgeAPI.Tami4EdgeAPI.Tami4EdgeAPI._get_devices_metadata", + return_value=[device_metadata], + side_effect=side_effect, + ): + yield + + @pytest.fixture def mock_get_device( request: pytest.FixtureRequest, diff --git a/tests/components/tami4/test_config_flow.py b/tests/components/tami4/test_config_flow.py index 4210c391d70..4dfc27bba94 100644 --- a/tests/components/tami4/test_config_flow.py +++ b/tests/components/tami4/test_config_flow.py @@ -120,6 +120,39 @@ async def test_step_otp_valid( assert "refresh_token" in result["data"] +@pytest.mark.usefixtures( + "mock_setup_entry", + "mock_request_otp", + "mock_submit_otp", + "mock__get_devices_metadata_no_name", +) +async def test_step_otp_valid_device_no_name(hass: HomeAssistant) -> None: + """Test user step with valid phone number.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PHONE: "+972555555555"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "otp" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"otp": "123456"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Tami4" + assert "refresh_token" in result["data"] + + @pytest.mark.parametrize( ("mock_submit_otp", "expected_error"), [ diff --git a/tests/components/tankerkoenig/conftest.py b/tests/components/tankerkoenig/conftest.py index 8f2e2c2fb53..1517c3d2060 100644 --- a/tests/components/tankerkoenig/conftest.py +++ b/tests/components/tankerkoenig/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Tankerkoenig integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.tankerkoenig import DOMAIN from homeassistant.const import CONF_SHOW_ON_MAP diff --git a/tests/components/tankerkoenig/test_diagnostics.py b/tests/components/tankerkoenig/test_diagnostics.py index 441268659f3..e7b479a0c32 100644 --- a/tests/components/tankerkoenig/test_diagnostics.py +++ b/tests/components/tankerkoenig/test_diagnostics.py @@ -4,6 +4,7 @@ from __future__ import annotations import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -21,4 +22,4 @@ async def test_entry_diagnostics( ) -> None: """Test config entry diagnostics.""" result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/tasmota/conftest.py b/tests/components/tasmota/conftest.py index 07ca8b31825..0de0788d7d9 100644 --- a/tests/components/tasmota/conftest.py +++ b/tests/components/tasmota/conftest.py @@ -10,35 +10,12 @@ from homeassistant.components.tasmota.const import ( DEFAULT_PREFIX, DOMAIN, ) -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant -from tests.common import ( - MockConfigEntry, - async_mock_service, - mock_device_registry, - mock_registry, -) +from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 -@pytest.fixture -def device_reg(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture -def entity_reg(hass): - """Return an empty, loaded, registry.""" - return mock_registry(hass) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) def disable_debounce(): """Set MQTT debounce timer to zero.""" @@ -79,6 +56,6 @@ async def setup_tasmota_helper(hass): @pytest.fixture -async def setup_tasmota(hass): +async def setup_tasmota(hass: HomeAssistant) -> None: """Set up Tasmota.""" await setup_tasmota_helper(hass) diff --git a/tests/components/tasmota/snapshots/test_sensor.ambr b/tests/components/tasmota/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..be011e595b9 --- /dev/null +++ b/tests/components/tasmota/snapshots/test_sensor.ambr @@ -0,0 +1,2012 @@ +# serializer version: 1 +# name: test_controlling_state_via_mqtt[sensor_config0-entity_ids0-messages0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota DHT11 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_dht11_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config0-entity_ids0-messages0].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_dht11_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHT11 Temperature', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_DHT11_Temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config0-entity_ids0-messages0].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota DHT11 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_dht11_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.5', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config0-entity_ids0-messages0].3 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota DHT11 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_dht11_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.0', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config1-entity_ids1-messages1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota TX23 Speed Act', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_tx23_speed_act', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config1-entity_ids1-messages1].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_tx23_speed_act', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TX23 Speed Act', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_TX23_Speed_Act', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config1-entity_ids1-messages1].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota TX23 Dir Card', + }), + 'context': , + 'entity_id': 'sensor.tasmota_tx23_dir_card', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config1-entity_ids1-messages1].3 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_tx23_dir_card', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TX23 Dir Card', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_TX23_Dir_Card', + 'unit_of_measurement': None, + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config1-entity_ids1-messages1].4 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota TX23 Speed Act', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_tx23_speed_act', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.3', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config1-entity_ids1-messages1].5 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota TX23 Dir Card', + }), + 'context': , + 'entity_id': 'sensor.tasmota_tx23_dir_card', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'WSW', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config1-entity_ids1-messages1].6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota TX23 Speed Act', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_tx23_speed_act', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.4', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config1-entity_ids1-messages1].7 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota TX23 Dir Card', + }), + 'context': , + 'entity_id': 'sensor.tasmota_tx23_dir_card', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'ESE', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY TotalTariff 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_totaltariff_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_energy_totaltariff_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ENERGY TotalTariff 0', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_TotalTariff_0', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].10 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY ExportTariff 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_exporttariff_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.6', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].11 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY ExportTariff 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_exporttariff_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.8', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].12 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY TotalTariff 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_totaltariff_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.6', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].13 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY TotalTariff 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_totaltariff_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.8', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].14 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY ExportTariff 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_exporttariff_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].15 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY ExportTariff 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_exporttariff_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.4', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY TotalTariff 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_totaltariff_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].3 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_energy_totaltariff_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ENERGY TotalTariff 1', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_TotalTariff_1', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].4 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY ExportTariff 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_exporttariff_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].5 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_energy_exporttariff_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ENERGY ExportTariff 0', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_ExportTariff_0', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY ExportTariff 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_exporttariff_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].7 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_energy_exporttariff_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ENERGY ExportTariff 1', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_ExportTariff_1', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].8 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY TotalTariff 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_totaltariff_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config2-entity_ids2-messages2].9 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY TotalTariff 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_totaltariff_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.4', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config3-entity_ids3-messages3] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota DS18B20 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_ds18b20_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config3-entity_ids3-messages3].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_ds18b20_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DS18B20 Temperature', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_DS18B20_Temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config3-entity_ids3-messages3].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota DS18B20 Id', + }), + 'context': , + 'entity_id': 'sensor.tasmota_ds18b20_id', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config3-entity_ids3-messages3].3 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_ds18b20_id', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DS18B20 Id', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_DS18B20_Id', + 'unit_of_measurement': None, + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config3-entity_ids3-messages3].4 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota DS18B20 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_ds18b20_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.3', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config3-entity_ids3-messages3].5 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota DS18B20 Id', + }), + 'context': , + 'entity_id': 'sensor.tasmota_ds18b20_id', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '01191ED79190', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config3-entity_ids3-messages3].6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota DS18B20 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_ds18b20_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.4', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config3-entity_ids3-messages3].7 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota DS18B20 Id', + }), + 'context': , + 'entity_id': 'sensor.tasmota_ds18b20_id', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'meep', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config4-entity_ids4-messages4] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config4-entity_ids4-messages4].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_energy_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ENERGY Total', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_Total', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config4-entity_ids4-messages4].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config4-entity_ids4-messages4].3 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.6', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config5-entity_ids5-messages5] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config5-entity_ids5-messages5].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_energy_total_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ENERGY Total 0', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_Total_0', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config5-entity_ids5-messages5].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config5-entity_ids5-messages5].3 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_energy_total_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ENERGY Total 1', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_Total_1', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config5-entity_ids5-messages5].4 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config5-entity_ids5-messages5].5 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.4', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config5-entity_ids5-messages5].6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total 0', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.6', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config5-entity_ids5-messages5].7 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.8', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config6-entity_ids6-messages6] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total Phase1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_phase1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config6-entity_ids6-messages6].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_energy_total_phase1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ENERGY Total Phase1', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_Total_Phase1', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config6-entity_ids6-messages6].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total Phase2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_phase2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config6-entity_ids6-messages6].3 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_energy_total_phase2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ENERGY Total Phase2', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ENERGY_Total_Phase2', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config6-entity_ids6-messages6].4 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total Phase1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_phase1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config6-entity_ids6-messages6].5 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total Phase2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_phase2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.4', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config6-entity_ids6-messages6].6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total Phase1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_phase1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.6', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config6-entity_ids6-messages6].7 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ENERGY Total Phase2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_energy_total_phase2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.8', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota ANALOG Temperature1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_temperature1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_analog_temperature1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ANALOG Temperature1', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ANALOG_Temperature1', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].10 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota ANALOG Temperature2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_temperature2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.0', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].11 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'illuminance', + 'friendly_name': 'Tasmota ANALOG Illuminance3', + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_illuminance3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota ANALOG Temperature2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_temperature2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].3 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_analog_temperature2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ANALOG Temperature2', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ANALOG_Temperature2', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].4 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'illuminance', + 'friendly_name': 'Tasmota ANALOG Illuminance3', + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_illuminance3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].5 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_analog_illuminance3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ANALOG Illuminance3', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ANALOG_Illuminance3', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota ANALOG Temperature1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_temperature1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].7 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota ANALOG Temperature2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_temperature2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.4', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].8 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'illuminance', + 'friendly_name': 'Tasmota ANALOG Illuminance3', + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_illuminance3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.6', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config7-entity_ids7-messages7].9 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Tasmota ANALOG Temperature1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_temperature1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.8', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_analog_ctenergy1_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ANALOG CTEnergy1 Energy', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ANALOG_CTEnergy1_Energy', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].10 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '230', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].11 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].12 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].13 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1150', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].14 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '230', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].15 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].3 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_analog_ctenergy1_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ANALOG CTEnergy1 Power', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ANALOG_CTEnergy1_Power', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].4 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].5 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_analog_ctenergy1_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ANALOG CTEnergy1 Voltage', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ANALOG_CTEnergy1_Voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].7 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_analog_ctenergy1_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ANALOG CTEnergy1 Current', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_ANALOG_CTEnergy1_Current', + 'unit_of_measurement': , + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].8 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.5', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config8-entity_ids8-messages8].9 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Tasmota ANALOG CTEnergy1 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_analog_ctenergy1_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2300', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR1 Unknown', + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor1_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_sensor1_unknown', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SENSOR1 Unknown', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_SENSOR1_Unknown', + 'unit_of_measurement': None, + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].10 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR3 Unknown', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor3_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.5', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].11 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR4 Unknown', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor4_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.5', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].12 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR1 Unknown', + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor1_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].13 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR2 Unknown', + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor2_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].14 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR3 Unknown', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor3_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].15 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR4 Unknown', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor4_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR2 Unknown', + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor2_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].3 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_sensor2_unknown', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SENSOR2 Unknown', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_SENSOR2_Unknown', + 'unit_of_measurement': None, + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].4 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR3 Unknown', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor3_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].5 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_sensor3_unknown', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SENSOR3 Unknown', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_SENSOR3_Unknown', + 'unit_of_measurement': None, + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].6 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR4 Unknown', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor4_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].7 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.tasmota_sensor4_unknown', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SENSOR4 Unknown', + 'platform': 'tasmota', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00000049A3BC_sensor_sensor_SENSOR4_Unknown', + 'unit_of_measurement': None, + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].8 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR1 Unknown', + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor1_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.5', + }) +# --- +# name: test_controlling_state_via_mqtt[sensor_config9-entity_ids9-messages9].9 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Tasmota SENSOR2 Unknown', + }), + 'context': , + 'entity_id': 'sensor.tasmota_sensor2_unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.5', + }) +# --- diff --git a/tests/components/tasmota/test_device_trigger.py b/tests/components/tasmota/test_device_trigger.py index 450ad678ff6..bb474358006 100644 --- a/tests/components/tasmota/test_device_trigger.py +++ b/tests/components/tasmota/test_device_trigger.py @@ -30,8 +30,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: async def test_get_triggers_btn( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -46,7 +45,7 @@ async def test_get_triggers_btn( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) expected_triggers = [ @@ -77,8 +76,7 @@ async def test_get_triggers_btn( async def test_get_triggers_swc( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -90,7 +88,7 @@ async def test_get_triggers_swc( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) expected_triggers = [ @@ -112,8 +110,7 @@ async def test_get_triggers_swc( async def test_get_unknown_triggers( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -126,7 +123,7 @@ async def test_get_unknown_triggers( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -161,8 +158,7 @@ async def test_get_unknown_triggers( async def test_get_non_existing_triggers( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -175,7 +171,7 @@ async def test_get_non_existing_triggers( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) triggers = await async_get_device_automations( @@ -187,8 +183,7 @@ async def test_get_non_existing_triggers( @pytest.mark.no_fail_on_log_exception async def test_discover_bad_triggers( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -207,7 +202,7 @@ async def test_discover_bad_triggers( ) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) triggers = await async_get_device_automations( @@ -243,7 +238,7 @@ async def test_discover_bad_triggers( ) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) triggers = await async_get_device_automations( @@ -274,8 +269,7 @@ async def test_discover_bad_triggers( async def test_update_remove_triggers( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -296,7 +290,7 @@ async def test_update_remove_triggers( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -351,8 +345,8 @@ async def test_update_remove_triggers( async def test_if_fires_on_mqtt_message_btn( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -366,7 +360,7 @@ async def test_if_fires_on_mqtt_message_btn( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -412,22 +406,22 @@ async def test_if_fires_on_mqtt_message_btn( hass, "tasmota_49A3BC/stat/RESULT", '{"Button1":{"Action":"SINGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press_1" # Fake button 3 single press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Button3":{"Action":"SINGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "short_press_3" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "short_press_3" async def test_if_fires_on_mqtt_message_swc( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -442,7 +436,7 @@ async def test_if_fires_on_mqtt_message_swc( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -502,30 +496,30 @@ async def test_if_fires_on_mqtt_message_swc( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press_1" # Fake switch 2 short press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch2":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "short_press_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "short_press_2" # Fake switch 3 long press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"custom_switch":{"Action":"HOLD"}}' ) await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "long_press_3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "long_press_3" async def test_if_fires_on_mqtt_message_late_discover( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -544,7 +538,7 @@ async def test_if_fires_on_mqtt_message_late_discover( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -593,22 +587,22 @@ async def test_if_fires_on_mqtt_message_late_discover( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"custom_switch":{"Action":"HOLD"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "double_press" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "double_press" async def test_if_fires_on_mqtt_message_after_update( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -624,7 +618,7 @@ async def test_if_fires_on_mqtt_message_after_update( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -656,7 +650,7 @@ async def test_if_fires_on_mqtt_message_after_update( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Update the trigger with different topic async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config2)) @@ -666,13 +660,13 @@ async def test_if_fires_on_mqtt_message_after_update( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async_fire_mqtt_message( hass, "tasmota_49A3BC/status/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 # Update the trigger with same topic async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config2)) @@ -682,17 +676,20 @@ async def test_if_fires_on_mqtt_message_after_update( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async_fire_mqtt_message( hass, "tasmota_49A3BC/status/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 async def test_no_resubscribe_same_topic( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test subscription to topics without change.""" # Discover a device with device trigger @@ -705,7 +702,7 @@ async def test_no_resubscribe_same_topic( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -741,8 +738,8 @@ async def test_no_resubscribe_same_topic( async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -757,7 +754,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -789,7 +786,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Remove the trigger config["swc"][0] = -1 @@ -800,7 +797,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Rediscover the trigger config["swc"][0] = 0 @@ -811,14 +808,14 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -834,7 +831,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -866,7 +863,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Remove the device await remove_device(hass, hass_ws_client, device_entry.id) @@ -876,11 +873,14 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attach_remove( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test attach and removal of trigger.""" # Discover a device with device trigger @@ -893,14 +893,14 @@ async def test_attach_remove( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - calls = [] + service_calls = [] def callback(trigger, context): - calls.append(trigger["trigger"]["description"]) + service_calls.append(trigger["trigger"]["description"]) remove = await async_initialize_triggers( hass, @@ -925,8 +925,8 @@ async def test_attach_remove( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0] == "event 'tasmota_event'" + assert len(service_calls) == 1 + assert service_calls[0] == "event 'tasmota_event'" # Remove the trigger remove() @@ -937,11 +937,14 @@ async def test_attach_remove( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attach_remove_late( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test attach and removal of trigger.""" # Discover a device without device triggers @@ -956,14 +959,14 @@ async def test_attach_remove_late( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - calls = [] + service_calls = [] def callback(trigger, context): - calls.append(trigger["trigger"]["description"]) + service_calls.append(trigger["trigger"]["description"]) remove = await async_initialize_triggers( hass, @@ -988,7 +991,7 @@ async def test_attach_remove_late( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config2)) await hass.async_block_till_done() @@ -998,8 +1001,8 @@ async def test_attach_remove_late( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0] == "event 'tasmota_event'" + assert len(service_calls) == 1 + assert service_calls[0] == "event 'tasmota_event'" # Remove the trigger remove() @@ -1010,11 +1013,14 @@ async def test_attach_remove_late( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attach_remove_late2( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test attach and removal of trigger.""" # Discover a device without device triggers @@ -1029,14 +1035,14 @@ async def test_attach_remove_late2( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - calls = [] + service_calls = [] def callback(trigger, context): - calls.append(trigger["trigger"]["description"]) + service_calls.append(trigger["trigger"]["description"]) remove = await async_initialize_triggers( hass, @@ -1068,11 +1074,14 @@ async def test_attach_remove_late2( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_attach_remove_unknown1( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test attach and removal of unknown trigger.""" # Discover a device without device triggers @@ -1083,7 +1092,7 @@ async def test_attach_remove_unknown1( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -1113,7 +1122,7 @@ async def test_attach_remove_unknown1( async def test_attach_unknown_remove_device_from_registry( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - device_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -1136,7 +1145,7 @@ async def test_attach_unknown_remove_device_from_registry( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -1164,7 +1173,10 @@ async def test_attach_unknown_remove_device_from_registry( async def test_attach_remove_config_entry( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test trigger cleanup when removing a Tasmota config entry.""" # Discover a device with device trigger @@ -1177,14 +1189,14 @@ async def test_attach_remove_config_entry( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - calls = [] + service_calls = [] def callback(trigger, context): - calls.append(trigger["trigger"]["description"]) + service_calls.append(trigger["trigger"]["description"]) await async_initialize_triggers( hass, @@ -1209,8 +1221,8 @@ async def test_attach_remove_config_entry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0] == "event 'tasmota_event'" + assert len(service_calls) == 1 + assert service_calls[0] == "event 'tasmota_event'" # Remove the Tasmota config entry config_entries = hass.config_entries.async_entries("tasmota") @@ -1222,4 +1234,4 @@ async def test_attach_remove_config_entry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 diff --git a/tests/components/tasmota/test_discovery.py b/tests/components/tasmota/test_discovery.py index 5405e6c417d..35ea79f7749 100644 --- a/tests/components/tasmota/test_discovery.py +++ b/tests/components/tasmota/test_discovery.py @@ -124,9 +124,8 @@ async def test_invalid_mac( async def test_correct_config_discovery( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, setup_tasmota, ) -> None: """Test receiving valid discovery message.""" @@ -142,11 +141,11 @@ async def test_correct_config_discovery( await hass.async_block_till_done() # Verify device and registry entries are created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None - entity_entry = entity_reg.async_get("switch.tasmota_test") + entity_entry = entity_registry.async_get("switch.tasmota_test") assert entity_entry is not None state = hass.states.get("switch.tasmota_test") @@ -159,9 +158,7 @@ async def test_correct_config_discovery( async def test_device_discover( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test setting up a device.""" @@ -176,7 +173,7 @@ async def test_device_discover( await hass.async_block_till_done() # Verify device and registry entries are created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -190,9 +187,7 @@ async def test_device_discover( async def test_device_discover_deprecated( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test setting up a device with deprecated discovery message.""" @@ -207,7 +202,7 @@ async def test_device_discover_deprecated( await hass.async_block_till_done() # Verify device and registry entries are created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -220,9 +215,7 @@ async def test_device_discover_deprecated( async def test_device_update( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test updating a device.""" @@ -240,7 +233,7 @@ async def test_device_update( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -258,7 +251,7 @@ async def test_device_update( await hass.async_block_till_done() # Verify device entry is updated - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -270,9 +263,7 @@ async def test_device_update( async def test_device_remove( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a discovered device.""" @@ -287,7 +278,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -300,7 +291,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -309,9 +300,7 @@ async def test_device_remove( async def test_device_remove_multiple_config_entries_1( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a discovered device.""" @@ -321,7 +310,7 @@ async def test_device_remove_multiple_config_entries_1( mock_entry = MockConfigEntry(domain="test") mock_entry.add_to_hass(hass) - device_reg.async_get_or_create( + device_registry.async_get_or_create( config_entry_id=mock_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) @@ -336,11 +325,11 @@ async def test_device_remove_multiple_config_entries_1( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None - assert device_entry.config_entries == [tasmota_entry.entry_id, mock_entry.entry_id] + assert device_entry.config_entries == {tasmota_entry.entry_id, mock_entry.entry_id} async_fire_mqtt_message( hass, @@ -350,19 +339,17 @@ async def test_device_remove_multiple_config_entries_1( await hass.async_block_till_done() # Verify device entry is not removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None - assert device_entry.config_entries == [mock_entry.entry_id] + assert device_entry.config_entries == {mock_entry.entry_id} async def test_device_remove_multiple_config_entries_2( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a discovered device.""" @@ -372,12 +359,12 @@ async def test_device_remove_multiple_config_entries_2( mock_entry = MockConfigEntry(domain="test") mock_entry.add_to_hass(hass) - device_reg.async_get_or_create( + device_registry.async_get_or_create( config_entry_id=mock_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) - other_device_entry = device_reg.async_get_or_create( + other_device_entry = device_registry.async_get_or_create( config_entry_id=mock_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "other_device")}, ) @@ -392,29 +379,29 @@ async def test_device_remove_multiple_config_entries_2( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None - assert device_entry.config_entries == [tasmota_entry.entry_id, mock_entry.entry_id] + assert device_entry.config_entries == {tasmota_entry.entry_id, mock_entry.entry_id} assert other_device_entry.id != device_entry.id # Remove other config entry from the device - device_reg.async_update_device( + device_registry.async_update_device( device_entry.id, remove_config_entry_id=mock_entry.entry_id ) await hass.async_block_till_done() # Verify device entry is not removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None - assert device_entry.config_entries == [tasmota_entry.entry_id] + assert device_entry.config_entries == {tasmota_entry.entry_id} mqtt_mock.async_publish.assert_not_called() # Remove other config entry from the other device - Tasmota should not do any cleanup - device_reg.async_update_device( + device_registry.async_update_device( other_device_entry.id, remove_config_entry_id=mock_entry.entry_id ) await hass.async_block_till_done() @@ -425,8 +412,7 @@ async def test_device_remove_stale( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a stale (undiscovered) device does not throw.""" @@ -436,13 +422,13 @@ async def test_device_remove_stale( config_entry = hass.config_entries.async_entries("tasmota")[0] # Create a device - device_reg.async_get_or_create( + device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) # Verify device entry was created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -451,7 +437,7 @@ async def test_device_remove_stale( await remove_device(hass, hass_ws_client, device_entry.id) # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -460,9 +446,7 @@ async def test_device_remove_stale( async def test_device_rediscover( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a device.""" @@ -477,7 +461,7 @@ async def test_device_rediscover( await hass.async_block_till_done() # Verify device entry is created - device_entry1 = device_reg.async_get_device( + device_entry1 = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry1 is not None @@ -490,7 +474,7 @@ async def test_device_rediscover( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -503,7 +487,7 @@ async def test_device_rediscover( await hass.async_block_till_done() # Verify device entry is created, and id is reused - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -576,9 +560,8 @@ async def test_entity_duplicate_removal( async def test_same_topic( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, setup_tasmota, issue_registry: ir.IssueRegistry, ) -> None: @@ -605,7 +588,7 @@ async def test_same_topic( # Verify device registry entries are created for both devices for config in configs[0:2]: - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) assert device_entry is not None @@ -616,14 +599,14 @@ async def test_same_topic( assert device_entry.sw_version == config["sw"] # Verify entities are created only for the first device - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[0]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 - device_entry = device_reg.async_get_device( + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[1]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 0 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 0 # Verify a repairs issue was created issue_id = "topic_duplicated_tasmota_49A3BC/cmnd/" @@ -639,7 +622,7 @@ async def test_same_topic( await hass.async_block_till_done() # Verify device registry entries was created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[2]["mac"])} ) assert device_entry is not None @@ -650,10 +633,10 @@ async def test_same_topic( assert device_entry.sw_version == configs[2]["sw"] # Verify no entities were created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[2]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 0 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 0 # Verify the repairs issue has been updated issue = issue_registry.async_get_issue("tasmota", issue_id) @@ -669,10 +652,10 @@ async def test_same_topic( await hass.async_block_till_done() # Verify entities are created also for the third device - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[2]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 # Verify the repairs issue has been updated issue = issue_registry.async_get_issue("tasmota", issue_id) @@ -688,10 +671,10 @@ async def test_same_topic( await hass.async_block_till_done() # Verify entities are created also for the second device - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[1]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 # Verify the repairs issue has been removed assert issue_registry.async_get_issue("tasmota", issue_id) is None @@ -700,9 +683,8 @@ async def test_same_topic( async def test_topic_no_prefix( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, setup_tasmota, issue_registry: ir.IssueRegistry, ) -> None: @@ -719,7 +701,7 @@ async def test_topic_no_prefix( await hass.async_block_till_done() # Verify device registry entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) assert device_entry is not None @@ -730,10 +712,10 @@ async def test_topic_no_prefix( assert device_entry.sw_version == config["sw"] # Verify entities are not created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 0 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 0 # Verify a repairs issue was created issue_id = "topic_no_prefix_00000049A3BC" @@ -749,10 +731,10 @@ async def test_topic_no_prefix( await hass.async_block_till_done() # Verify entities are created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 # Verify the repairs issue has been removed assert ("tasmota", issue_id) not in issue_registry.issues diff --git a/tests/components/tasmota/test_fan.py b/tests/components/tasmota/test_fan.py index 654b8c955d2..49d1d36ce20 100644 --- a/tests/components/tasmota/test_fan.py +++ b/tests/components/tasmota/test_fan.py @@ -61,7 +61,12 @@ async def test_controlling_state_via_mqtt( state = hass.states.get("fan.tasmota") assert state.state == STATE_OFF assert state.attributes["percentage"] is None - assert state.attributes["supported_features"] == fan.FanEntityFeature.SET_SPEED + assert ( + state.attributes["supported_features"] + == fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON + ) assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"FanSpeed":1}') diff --git a/tests/components/tasmota/test_init.py b/tests/components/tasmota/test_init.py index 0123421d5ae..125dba811e6 100644 --- a/tests/components/tasmota/test_init.py +++ b/tests/components/tasmota/test_init.py @@ -4,8 +4,6 @@ import copy import json from unittest.mock import call -import pytest - from homeassistant.components.tasmota.const import DEFAULT_PREFIX, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -26,9 +24,7 @@ async def test_device_remove( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a discovered device through device registry.""" @@ -44,7 +40,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -53,7 +49,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -70,7 +66,7 @@ async def test_device_remove( async def test_device_remove_non_tasmota_device( hass: HomeAssistant, - device_reg, + device_registry: dr.DeviceRegistry, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, setup_tasmota, @@ -92,7 +88,7 @@ async def test_device_remove_non_tasmota_device( config_entry.add_to_hass(hass) mac = "12:34:56:AB:CD:EF" - device_entry = device_reg.async_get_or_create( + device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) @@ -102,7 +98,7 @@ async def test_device_remove_non_tasmota_device( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -113,7 +109,7 @@ async def test_device_remove_non_tasmota_device( async def test_device_remove_stale_tasmota_device( hass: HomeAssistant, - device_reg, + device_registry: dr.DeviceRegistry, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, setup_tasmota, @@ -123,7 +119,7 @@ async def test_device_remove_stale_tasmota_device( config_entry = hass.config_entries.async_entries("tasmota")[0] mac = "12:34:56:AB:CD:EF" - device_entry = device_reg.async_get_or_create( + device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) @@ -133,7 +129,7 @@ async def test_device_remove_stale_tasmota_device( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -144,8 +140,7 @@ async def test_device_remove_stale_tasmota_device( async def test_tasmota_ws_remove_discovered_device( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, setup_tasmota, @@ -159,7 +154,7 @@ async def test_tasmota_ws_remove_discovered_device( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -170,7 +165,7 @@ async def test_tasmota_ws_remove_discovered_device( ) # Verify device entry is cleared - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None diff --git a/tests/components/tasmota/test_sensor.py b/tests/components/tasmota/test_sensor.py index 2de80de4319..78235f7ebf5 100644 --- a/tests/components/tasmota/test_sensor.py +++ b/tests/components/tasmota/test_sensor.py @@ -13,9 +13,9 @@ from hatasmota.utils import ( get_topic_tele_will, ) import pytest +from syrupy import SnapshotAssertion from homeassistant import config_entries -from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass from homeassistant.components.tasmota.const import DEFAULT_PREFIX from homeassistant.const import ATTR_ASSUMED_STATE, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant @@ -50,6 +50,17 @@ BAD_LIST_SENSOR_CONFIG_3 = { } } +# This configuration has sensors which type we can't guess +DEFAULT_SENSOR_CONFIG_UNKNOWN = { + "sn": { + "Time": "2020-09-25T12:47:15", + "SENSOR1": {"Unknown": None}, + "SENSOR2": {"Unknown": "123"}, + "SENSOR3": {"Unknown": 123}, + "SENSOR4": {"Unknown": 123.0}, + } +} + # This configuration has some sensors where values are lists # Home Assistant maps this to one sensor for each list item LIST_SENSOR_CONFIG = { @@ -175,7 +186,7 @@ TEMPERATURE_SENSOR_CONFIG = { @pytest.mark.parametrize( - ("sensor_config", "entity_ids", "messages", "states"), + ("sensor_config", "entity_ids", "messages"), [ ( DEFAULT_SENSOR_CONFIG, @@ -184,20 +195,6 @@ TEMPERATURE_SENSOR_CONFIG = { '{"DHT11":{"Temperature":20.5}}', '{"StatusSNS":{"DHT11":{"Temperature":20.0}}}', ), - ( - { - "sensor.tasmota_dht11_temperature": { - "state": "20.5", - "attributes": { - "device_class": "temperature", - "unit_of_measurement": "°C", - }, - }, - }, - { - "sensor.tasmota_dht11_temperature": {"state": "20.0"}, - }, - ), ), ( DICT_SENSOR_CONFIG_1, @@ -206,48 +203,18 @@ TEMPERATURE_SENSOR_CONFIG = { '{"TX23":{"Speed":{"Act":"12.3"},"Dir": {"Card": "WSW"}}}', '{"StatusSNS":{"TX23":{"Speed":{"Act":"23.4"},"Dir": {"Card": "ESE"}}}}', ), - ( - { - "sensor.tasmota_tx23_speed_act": { - "state": "12.3", - "attributes": { - "device_class": None, - "unit_of_measurement": "km/h", - }, - }, - "sensor.tasmota_tx23_dir_card": {"state": "WSW"}, - }, - { - "sensor.tasmota_tx23_speed_act": {"state": "23.4"}, - "sensor.tasmota_tx23_dir_card": {"state": "ESE"}, - }, - ), ), ( LIST_SENSOR_CONFIG, [ "sensor.tasmota_energy_totaltariff_0", "sensor.tasmota_energy_totaltariff_1", + "sensor.tasmota_energy_exporttariff_0", + "sensor.tasmota_energy_exporttariff_1", ], ( - '{"ENERGY":{"TotalTariff":[1.2,3.4]}}', - '{"StatusSNS":{"ENERGY":{"TotalTariff":[5.6,7.8]}}}', - ), - ( - { - "sensor.tasmota_energy_totaltariff_0": { - "state": "1.2", - "attributes": { - "device_class": None, - "unit_of_measurement": None, - }, - }, - "sensor.tasmota_energy_totaltariff_1": {"state": "3.4"}, - }, - { - "sensor.tasmota_energy_totaltariff_0": {"state": "5.6"}, - "sensor.tasmota_energy_totaltariff_1": {"state": "7.8"}, - }, + '{"ENERGY":{"ExportTariff":[5.6,7.8],"TotalTariff":[1.2,3.4]}}', + '{"StatusSNS":{"ENERGY":{"ExportTariff":[1.2,3.4],"TotalTariff":[5.6,7.8]}}}', ), ), ( @@ -257,22 +224,6 @@ TEMPERATURE_SENSOR_CONFIG = { '{"DS18B20":{"Id": "01191ED79190","Temperature": 12.3}}', '{"StatusSNS":{"DS18B20":{"Id": "meep","Temperature": 23.4}}}', ), - ( - { - "sensor.tasmota_ds18b20_temperature": { - "state": "12.3", - "attributes": { - "device_class": "temperature", - "unit_of_measurement": "°C", - }, - }, - "sensor.tasmota_ds18b20_id": {"state": "01191ED79190"}, - }, - { - "sensor.tasmota_ds18b20_temperature": {"state": "23.4"}, - "sensor.tasmota_ds18b20_id": {"state": "meep"}, - }, - ), ), # Test simple Total sensor ( @@ -282,21 +233,6 @@ TEMPERATURE_SENSOR_CONFIG = { '{"ENERGY":{"Total":1.2,"TotalStartTime":"2018-11-23T15:33:47"}}', '{"StatusSNS":{"ENERGY":{"Total":5.6,"TotalStartTime":"2018-11-23T16:33:47"}}}', ), - ( - { - "sensor.tasmota_energy_total": { - "state": "1.2", - "attributes": { - "device_class": "energy", - ATTR_STATE_CLASS: SensorStateClass.TOTAL, - "unit_of_measurement": "kWh", - }, - }, - }, - { - "sensor.tasmota_energy_total": {"state": "5.6"}, - }, - ), ), # Test list Total sensors ( @@ -306,30 +242,6 @@ TEMPERATURE_SENSOR_CONFIG = { '{"ENERGY":{"Total":[1.2, 3.4],"TotalStartTime":"2018-11-23T15:33:47"}}', '{"StatusSNS":{"ENERGY":{"Total":[5.6, 7.8],"TotalStartTime":"2018-11-23T16:33:47"}}}', ), - ( - { - "sensor.tasmota_energy_total_0": { - "state": "1.2", - "attributes": { - "device_class": "energy", - ATTR_STATE_CLASS: SensorStateClass.TOTAL, - "unit_of_measurement": "kWh", - }, - }, - "sensor.tasmota_energy_total_1": { - "state": "3.4", - "attributes": { - "device_class": "energy", - ATTR_STATE_CLASS: SensorStateClass.TOTAL, - "unit_of_measurement": "kWh", - }, - }, - }, - { - "sensor.tasmota_energy_total_0": {"state": "5.6"}, - "sensor.tasmota_energy_total_1": {"state": "7.8"}, - }, - ), ), # Test dict Total sensors ( @@ -342,30 +254,6 @@ TEMPERATURE_SENSOR_CONFIG = { '{"ENERGY":{"Total":{"Phase1":1.2, "Phase2":3.4},"TotalStartTime":"2018-11-23T15:33:47"}}', '{"StatusSNS":{"ENERGY":{"Total":{"Phase1":5.6, "Phase2":7.8},"TotalStartTime":"2018-11-23T15:33:47"}}}', ), - ( - { - "sensor.tasmota_energy_total_phase1": { - "state": "1.2", - "attributes": { - "device_class": "energy", - ATTR_STATE_CLASS: SensorStateClass.TOTAL, - "unit_of_measurement": "kWh", - }, - }, - "sensor.tasmota_energy_total_phase2": { - "state": "3.4", - "attributes": { - "device_class": "energy", - ATTR_STATE_CLASS: SensorStateClass.TOTAL, - "unit_of_measurement": "kWh", - }, - }, - }, - { - "sensor.tasmota_energy_total_phase1": {"state": "5.6"}, - "sensor.tasmota_energy_total_phase2": {"state": "7.8"}, - }, - ), ), ( NUMBERED_SENSOR_CONFIG, @@ -384,39 +272,6 @@ TEMPERATURE_SENSOR_CONFIG = { '"Illuminance3":1.2}}}' ), ), - ( - { - "sensor.tasmota_analog_temperature1": { - "state": "1.2", - "attributes": { - "device_class": "temperature", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - "unit_of_measurement": "°C", - }, - }, - "sensor.tasmota_analog_temperature2": { - "state": "3.4", - "attributes": { - "device_class": "temperature", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - "unit_of_measurement": "°C", - }, - }, - "sensor.tasmota_analog_illuminance3": { - "state": "5.6", - "attributes": { - "device_class": "illuminance", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - "unit_of_measurement": "lx", - }, - }, - }, - { - "sensor.tasmota_analog_temperature1": {"state": "7.8"}, - "sensor.tasmota_analog_temperature2": {"state": "9.0"}, - "sensor.tasmota_analog_illuminance3": {"state": "1.2"}, - }, - ), ), ( NUMBERED_SENSOR_CONFIG_2, @@ -436,47 +291,19 @@ TEMPERATURE_SENSOR_CONFIG = { '{"Energy":1.0,"Power":1150,"Voltage":230,"Current":5}}}}' ), ), + ), + # Test we automatically set state class to measurement on unknown numerical sensors + ( + DEFAULT_SENSOR_CONFIG_UNKNOWN, + [ + "sensor.tasmota_sensor1_unknown", + "sensor.tasmota_sensor2_unknown", + "sensor.tasmota_sensor3_unknown", + "sensor.tasmota_sensor4_unknown", + ], ( - { - "sensor.tasmota_analog_ctenergy1_energy": { - "state": "0.5", - "attributes": { - "device_class": "energy", - ATTR_STATE_CLASS: SensorStateClass.TOTAL, - "unit_of_measurement": "kWh", - }, - }, - "sensor.tasmota_analog_ctenergy1_power": { - "state": "2300", - "attributes": { - "device_class": "power", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - "unit_of_measurement": "W", - }, - }, - "sensor.tasmota_analog_ctenergy1_voltage": { - "state": "230", - "attributes": { - "device_class": "voltage", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - "unit_of_measurement": "V", - }, - }, - "sensor.tasmota_analog_ctenergy1_current": { - "state": "10", - "attributes": { - "device_class": "current", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - "unit_of_measurement": "A", - }, - }, - }, - { - "sensor.tasmota_analog_ctenergy1_energy": {"state": "1.0"}, - "sensor.tasmota_analog_ctenergy1_power": {"state": "1150"}, - "sensor.tasmota_analog_ctenergy1_voltage": {"state": "230"}, - "sensor.tasmota_analog_ctenergy1_current": {"state": "5"}, - }, + '{"SENSOR1":{"Unknown":20.5},"SENSOR2":{"Unknown":20.5},"SENSOR3":{"Unknown":20.5},"SENSOR4":{"Unknown":20.5}}', + '{"StatusSNS":{"SENSOR1":{"Unknown":20},"SENSOR2":{"Unknown":20},"SENSOR3":{"Unknown":20},"SENSOR4":{"Unknown":20}}}', ), ), ], @@ -485,11 +312,11 @@ async def test_controlling_state_via_mqtt( hass: HomeAssistant, entity_registry: er.EntityRegistry, mqtt_mock: MqttMockHAClient, + snapshot: SnapshotAssertion, setup_tasmota, sensor_config, entity_ids, messages, - states, ) -> None: """Test state update via MQTT.""" config = copy.deepcopy(DEFAULT_CONFIG) @@ -513,11 +340,13 @@ async def test_controlling_state_via_mqtt( state = hass.states.get(entity_id) assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) + assert state == snapshot entry = entity_registry.async_get(entity_id) assert entry.disabled is False assert entry.disabled_by is None assert entry.entity_category is None + assert entry == snapshot async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") await hass.async_block_till_done() @@ -530,19 +359,13 @@ async def test_controlling_state_via_mqtt( async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", messages[0]) for entity_id in entity_ids: state = hass.states.get(entity_id) - expected_state = states[0][entity_id] - assert state.state == expected_state["state"] - for attribute, expected in expected_state.get("attributes", {}).items(): - assert state.attributes.get(attribute) == expected + assert state == snapshot # Test polled state update async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/STATUS10", messages[1]) for entity_id in entity_ids: state = hass.states.get(entity_id) - expected_state = states[1][entity_id] - assert state.state == expected_state["state"] - for attribute, expected in expected_state.get("attributes", {}).items(): - assert state.attributes.get(attribute) == expected + assert state == snapshot @pytest.mark.parametrize( diff --git a/tests/components/technove/conftest.py b/tests/components/technove/conftest.py index be34ebfefa5..a81575f1edf 100644 --- a/tests/components/technove/conftest.py +++ b/tests/components/technove/conftest.py @@ -1,10 +1,10 @@ """Fixtures for TechnoVE integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest from technove import Station as TechnoVEStation -from typing_extensions import Generator from homeassistant.components.technove.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/technove/fixtures/station_charging.json b/tests/components/technove/fixtures/station_charging.json index ea98dc0b071..63e68d0db0e 100644 --- a/tests/components/technove/fixtures/station_charging.json +++ b/tests/components/technove/fixtures/station_charging.json @@ -11,7 +11,7 @@ "normalPeriodActive": false, "maxChargePourcentage": 0.9, "isBatteryProtected": false, - "inSharingMode": true, + "inSharingMode": false, "energySession": 12.34, "energyTotal": 1234, "version": "1.82", diff --git a/tests/components/technove/snapshots/test_binary_sensor.ambr b/tests/components/technove/snapshots/test_binary_sensor.ambr index 140526b9391..cc2dcf4a04a 100644 --- a/tests/components/technove/snapshots/test_binary_sensor.ambr +++ b/tests/components/technove/snapshots/test_binary_sensor.ambr @@ -181,7 +181,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'on', + 'state': 'off', }) # --- # name: test_sensors[binary_sensor.technove_station_static_ip-entry] diff --git a/tests/components/technove/snapshots/test_diagnostics.ambr b/tests/components/technove/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..2e81f124ba5 --- /dev/null +++ b/tests/components/technove/snapshots/test_diagnostics.ambr @@ -0,0 +1,36 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'auto_charge': True, + 'conflict_in_sharing_config': False, + 'current': 23.75, + 'energy_session': 12.34, + 'energy_total': 1234, + 'high_charge_period_active': False, + 'in_sharing_mode': False, + 'is_battery_protected': False, + 'is_session_active': True, + 'is_static_ip': False, + 'is_up_to_date': True, + 'last_charge': ''' + 1701072080,0,17.39 + + ''', + 'mac_address': '**REDACTED**', + 'max_charge_percentage': 0.9, + 'max_current': 24, + 'max_station_current': 32, + 'name': 'TechnoVE Station', + 'network_ssid': 'Connecting...', + 'normal_period_active': False, + 'rssi': -82, + 'status': dict({ + '__type': "", + 'repr': "", + }), + 'time': 1701000000, + 'version': '1.82', + 'voltage_in': 238, + 'voltage_out': 238, + }) +# --- diff --git a/tests/components/technove/snapshots/test_number.ambr b/tests/components/technove/snapshots/test_number.ambr new file mode 100644 index 00000000000..622c04d542a --- /dev/null +++ b/tests/components/technove/snapshots/test_number.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_numbers[number.technove_station_maximum_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 32, + 'min': 8, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.technove_station_maximum_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Maximum current', + 'platform': 'technove', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_current', + 'unique_id': 'AA:AA:AA:AA:AA:BB_max_current', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[number.technove_station_maximum_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'TechnoVE Station Maximum current', + 'max': 32, + 'min': 8, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.technove_station_maximum_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24', + }) +# --- diff --git a/tests/components/technove/test_diagnostics.py b/tests/components/technove/test_diagnostics.py new file mode 100644 index 00000000000..878b084c0c3 --- /dev/null +++ b/tests/components/technove/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Tests for TechnoVE diagnostics.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) diff --git a/tests/components/technove/test_number.py b/tests/components/technove/test_number.py new file mode 100644 index 00000000000..c9f39cd9200 --- /dev/null +++ b/tests/components/technove/test_number.py @@ -0,0 +1,201 @@ +"""Tests for the TechnoVE number platform.""" + +from unittest.mock import MagicMock + +import pytest +from syrupy.assertion import SnapshotAssertion +from technove import TechnoVEConnectionError, TechnoVEError + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import setup_with_selected_platforms + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "mock_technove") +async def test_numbers( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the creation and values of the TechnoVE numbers.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.NUMBER]) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "method", "called_with_value"), + [ + ( + "number.technove_station_maximum_current", + "set_max_current", + {"max_current": 10}, + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_number_expected_value( + hass: HomeAssistant, + mock_technove: MagicMock, + entity_id: str, + method: str, + called_with_value: dict[str, bool | int], +) -> None: + """Test set value services with valid values.""" + state = hass.states.get(entity_id) + method_mock = getattr(mock_technove, method) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: called_with_value["max_current"]}, + blocking=True, + ) + + assert method_mock.call_count == 1 + method_mock.assert_called_with(**called_with_value) + + +@pytest.mark.parametrize( + ("entity_id", "value"), + [ + ( + "number.technove_station_maximum_current", + 1, + ), + ( + "number.technove_station_maximum_current", + 1000, + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_number_out_of_bound( + hass: HomeAssistant, + entity_id: str, + value: float, +) -> None: + """Test set value services with out of bound values.""" + state = hass.states.get(entity_id) + + with pytest.raises(ServiceValidationError, match="is outside valid range"): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: value}, + blocking=True, + ) + + assert (state := hass.states.get(state.entity_id)) + assert state.state != STATE_UNAVAILABLE + + +@pytest.mark.usefixtures("init_integration") +async def test_set_max_current_sharing_mode( + hass: HomeAssistant, + mock_technove: MagicMock, +) -> None: + """Test failure to set the max current when the station is in sharing mode.""" + entity_id = "number.technove_station_maximum_current" + state = hass.states.get(entity_id) + + # Enable power sharing mode + device = mock_technove.update.return_value + device.info.in_sharing_mode = True + + with pytest.raises( + ServiceValidationError, + match="power sharing mode is enabled", + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + assert (state := hass.states.get(state.entity_id)) + assert state.state != STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ( + "number.technove_station_maximum_current", + "set_max_current", + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_invalid_response( + hass: HomeAssistant, + mock_technove: MagicMock, + entity_id: str, + method: str, +) -> None: + """Test invalid response, not becoming unavailable.""" + state = hass.states.get(entity_id) + method_mock = getattr(mock_technove, method) + + method_mock.side_effect = TechnoVEError + with pytest.raises(HomeAssistantError, match="Invalid response from TechnoVE API"): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: 10}, + blocking=True, + ) + + assert method_mock.call_count == 1 + assert (state := hass.states.get(state.entity_id)) + assert state.state != STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ( + "number.technove_station_maximum_current", + "set_max_current", + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_connection_error( + hass: HomeAssistant, + mock_technove: MagicMock, + entity_id: str, + method: str, +) -> None: + """Test connection error, leading to becoming unavailable.""" + state = hass.states.get(entity_id) + method_mock = getattr(mock_technove, method) + + method_mock.side_effect = TechnoVEConnectionError + with pytest.raises( + HomeAssistantError, match="Error communicating with TechnoVE API" + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: 10}, + blocking=True, + ) + + assert method_mock.call_count == 1 + assert (state := hass.states.get(state.entity_id)) + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/tedee/conftest.py b/tests/components/tedee/conftest.py index 295e34fd541..68444de640c 100644 --- a/tests/components/tedee/conftest.py +++ b/tests/components/tedee/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from pytedee_async.bridge import TedeeBridge from pytedee_async.lock import TedeeLock import pytest -from typing_extensions import Generator from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID diff --git a/tests/components/tedee/snapshots/test_binary_sensor.ambr b/tests/components/tedee/snapshots/test_binary_sensor.ambr index 8c9dca1bd12..385e4ac9bc1 100644 --- a/tests/components/tedee/snapshots/test_binary_sensor.ambr +++ b/tests/components/tedee/snapshots/test_binary_sensor.ambr @@ -32,6 +32,39 @@ 'unit_of_measurement': None, }) # --- +# name: test_binary_sensors[entry-lock_uncalibrated] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lock uncalibrated', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uncalibrated', + 'unique_id': '12345-uncalibrated', + 'unit_of_measurement': None, + }) +# --- # name: test_binary_sensors[entry-pullspring_enabled] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -112,6 +145,20 @@ 'state': 'off', }) # --- +# name: test_binary_sensors[state-lock_uncalibrated] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Lock-1A2B Lock uncalibrated', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensors[state-pullspring_enabled] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/tedee/snapshots/test_init.ambr b/tests/components/tedee/snapshots/test_init.ambr index 83ab032dfb4..20d6bfcdc2a 100644 --- a/tests/components/tedee/snapshots/test_init.ambr +++ b/tests/components/tedee/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': 'Tedee', 'model': 'Bridge', + 'model_id': None, 'name': 'Bridge-AB1C', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': '0000-0000', 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/tedee/snapshots/test_lock.ambr b/tests/components/tedee/snapshots/test_lock.ambr index 8e4fc464479..14913e32ba5 100644 --- a/tests/components/tedee/snapshots/test_lock.ambr +++ b/tests/components/tedee/snapshots/test_lock.ambr @@ -68,8 +68,10 @@ }), 'manufacturer': 'Tedee', 'model': 'Tedee PRO', + 'model_id': None, 'name': 'Lock-1A2B', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -145,8 +147,10 @@ }), 'manufacturer': 'Tedee', 'model': 'Tedee GO', + 'model_id': None, 'name': 'Lock-2C3D', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/tedee/test_binary_sensor.py b/tests/components/tedee/test_binary_sensor.py index ee8c318d2dd..788d31c84d2 100644 --- a/tests/components/tedee/test_binary_sensor.py +++ b/tests/components/tedee/test_binary_sensor.py @@ -15,20 +15,17 @@ from tests.common import async_fire_time_changed pytestmark = pytest.mark.usefixtures("init_integration") -BINARY_SENSORS = ( - "charging", - "semi_locked", - "pullspring_enabled", -) +BINARY_SENSORS = ("charging", "semi_locked", "pullspring_enabled", "lock_uncalibrated") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: - """Test tedee battery charging sensor.""" + """Test tedee binary sensor.""" for key in BINARY_SENSORS: state = hass.states.get(f"binary_sensor.lock_1a2b_{key}") assert state @@ -39,6 +36,7 @@ async def test_binary_sensors( assert entry == snapshot(name=f"entry-{key}") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_new_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, diff --git a/tests/components/tedee/test_config_flow.py b/tests/components/tedee/test_config_flow.py index 588e63f693b..d5dc5d4efcf 100644 --- a/tests/components/tedee/test_config_flow.py +++ b/tests/components/tedee/test_config_flow.py @@ -10,7 +10,7 @@ from pytedee_async import ( import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -143,3 +143,44 @@ async def test_reauth_flow( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" + + +async def test_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock +) -> None: + """Test that the reconfigure flow works.""" + + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data={ + CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, + CONF_HOST: "192.168.1.42", + }, + ) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + {CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_HOST: "192.168.1.43"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.title == "My Tedee" + assert entry.data == { + CONF_HOST: "192.168.1.43", + CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, + CONF_WEBHOOK_ID: WEBHOOK_ID, + } diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index ffc4a8c30d6..741bc3156cb 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -25,7 +25,7 @@ from homeassistant.components.lock import ( STATE_UNLOCKING, ) from homeassistant.components.webhook import async_generate_url -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -276,10 +276,21 @@ async def test_new_lock( assert state +@pytest.mark.parametrize( + ("lib_state", "expected_state"), + [ + (TedeeLockState.LOCKED, STATE_LOCKED), + (TedeeLockState.HALF_OPEN, STATE_UNKNOWN), + (TedeeLockState.UNKNOWN, STATE_UNKNOWN), + (TedeeLockState.UNCALIBRATED, STATE_UNAVAILABLE), + ], +) async def test_webhook_update( hass: HomeAssistant, mock_tedee: MagicMock, hass_client_no_auth: ClientSessionGenerator, + lib_state: TedeeLockState, + expected_state: str, ) -> None: """Test updated data set through webhook.""" @@ -287,10 +298,9 @@ async def test_webhook_update( assert state assert state.state == STATE_UNLOCKED - webhook_data = {"dummystate": 6} - mock_tedee.locks_dict[ - 12345 - ].state = TedeeLockState.LOCKED # is updated in the lib, so mock and assert in L296 + webhook_data = {"dummystate": lib_state.value} + # is updated in the lib, so mock and assert below + mock_tedee.locks_dict[12345].state = lib_state client = await hass_client_no_auth() webhook_url = async_generate_url(hass, WEBHOOK_ID) @@ -302,4 +312,4 @@ async def test_webhook_update( state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == STATE_LOCKED + assert state.state == expected_state diff --git a/tests/components/telegram_bot/conftest.py b/tests/components/telegram_bot/conftest.py index 6ea5d1446dd..1afe70dcb8a 100644 --- a/tests/components/telegram_bot/conftest.py +++ b/tests/components/telegram_bot/conftest.py @@ -1,6 +1,8 @@ """Tests for the telegram_bot integration.""" +from collections.abc import AsyncGenerator, Generator from datetime import datetime +from typing import Any from unittest.mock import patch import pytest @@ -18,11 +20,12 @@ from homeassistant.const import ( CONF_URL, EVENT_HOMEASSISTANT_START, ) +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @pytest.fixture -def config_webhooks(): +def config_webhooks() -> dict[str, Any]: """Fixture for a webhooks platform configuration.""" return { DOMAIN: [ @@ -43,7 +46,7 @@ def config_webhooks(): @pytest.fixture -def config_polling(): +def config_polling() -> dict[str, Any]: """Fixture for a polling platform configuration.""" return { DOMAIN: [ @@ -62,7 +65,7 @@ def config_polling(): @pytest.fixture -def mock_register_webhook(): +def mock_register_webhook() -> Generator[None]: """Mock calls made by telegram_bot when (de)registering webhook.""" with ( patch( @@ -78,7 +81,7 @@ def mock_register_webhook(): @pytest.fixture -def mock_external_calls(): +def mock_external_calls() -> Generator[None]: """Mock calls that make calls to the live Telegram API.""" test_user = User(123456, "Testbot", True) message = Message( @@ -109,7 +112,7 @@ def mock_external_calls(): @pytest.fixture -def mock_generate_secret_token(): +def mock_generate_secret_token() -> Generator[str]: """Mock secret token generated for webhook.""" mock_secret_token = "DEADBEEF12345678DEADBEEF87654321" with patch( @@ -217,12 +220,12 @@ def update_callback_query(): @pytest.fixture async def webhook_platform( - hass, - config_webhooks, - mock_register_webhook, - mock_external_calls, - mock_generate_secret_token, -): + hass: HomeAssistant, + config_webhooks: dict[str, Any], + mock_register_webhook: None, + mock_external_calls: None, + mock_generate_secret_token: str, +) -> AsyncGenerator[None]: """Fixture for setting up the webhooks platform using appropriate config and mocks.""" await async_setup_component( hass, @@ -235,7 +238,9 @@ async def webhook_platform( @pytest.fixture -async def polling_platform(hass, config_polling, mock_external_calls): +async def polling_platform( + hass: HomeAssistant, config_polling: dict[str, Any], mock_external_calls: None +) -> None: """Fixture for setting up the polling platform using appropriate config and mocks.""" await async_setup_component( hass, diff --git a/tests/components/template/snapshots/test_button.ambr b/tests/components/template/snapshots/test_button.ambr new file mode 100644 index 00000000000..3d96ad66050 --- /dev/null +++ b/tests/components/template/snapshots/test_button.ambr @@ -0,0 +1,28 @@ +# serializer version: 1 +# name: test_setup_config_entry[config_entry_extra_options0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + }), + 'context': , + 'entity_id': 'button.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_config_entry[config_entry_extra_options1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'update', + 'friendly_name': 'My template', + }), + 'context': , + 'entity_id': 'button.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/template/snapshots/test_number.ambr b/tests/components/template/snapshots/test_number.ambr new file mode 100644 index 00000000000..d6f5b1e338d --- /dev/null +++ b/tests/components/template/snapshots/test_number.ambr @@ -0,0 +1,18 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 0.1, + }), + 'context': , + 'entity_id': 'number.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- \ No newline at end of file diff --git a/tests/components/template/snapshots/test_select.ambr b/tests/components/template/snapshots/test_select.ambr new file mode 100644 index 00000000000..e2142394cba --- /dev/null +++ b/tests/components/template/snapshots/test_select.ambr @@ -0,0 +1,19 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + 'options': Wrapper([ + 'off', + 'on', + 'auto', + ]), + }), + 'context': , + 'entity_id': 'select.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/template/snapshots/test_switch.ambr b/tests/components/template/snapshots/test_switch.ambr new file mode 100644 index 00000000000..c240a9436a0 --- /dev/null +++ b/tests/components/template/snapshots/test_switch.ambr @@ -0,0 +1,14 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + }), + 'context': , + 'entity_id': 'switch.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/template/snapshots/test_weather.ambr b/tests/components/template/snapshots/test_weather.ambr index 9b0cf2b9471..bdda5b44e94 100644 --- a/tests/components/template/snapshots/test_weather.ambr +++ b/tests/components/template/snapshots/test_weather.ambr @@ -1,87 +1,4 @@ # serializer version: 1 -# name: test_forecasts[config0-1-weather-forecast] - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-forecast].1 - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-forecast].2 - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'fog', - 'datetime': '2023-02-17T14:00:00+00:00', - 'is_daytime': True, - 'temperature': 14.2, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-forecast].3 - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 16.9, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast] - dict({ - 'forecast': list([ - ]), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast].1 - dict({ - 'forecast': list([ - ]), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'fog', - 'datetime': '2023-02-17T14:00:00+00:00', - 'is_daytime': True, - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast].3 - dict({ - 'forecast': list([ - ]), - }) -# --- # name: test_forecasts[config0-1-weather-get_forecasts] dict({ 'weather.forecast': dict({ @@ -120,51 +37,6 @@ }), }) # --- -# name: test_forecasts[config0-1-weather] - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'fog', - 'datetime': '2023-02-17T14:00:00+00:00', - 'is_daytime': True, - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather].3 - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 16.9, - }), - ]), - }) -# --- # name: test_restore_weather_save_state dict({ 'last_apparent_temperature': None, @@ -180,92 +52,6 @@ 'last_wind_speed': None, }) # --- -# name: test_trigger_weather_services[config0-1-template-forecast] - dict({ - 'weather.test': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-forecast].1 - dict({ - 'weather.test': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-forecast].2 - dict({ - 'weather.test': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'is_daytime': True, - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-get_forecast].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'is_daytime': True, - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- # name: test_trigger_weather_services[config0-1-template-get_forecasts] dict({ 'weather.test': dict({ @@ -312,43 +98,3 @@ }), }) # --- -# name: test_trigger_weather_services[config0-1-template] - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'is_daytime': True, - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- diff --git a/tests/components/template/test_alarm_control_panel.py b/tests/components/template/test_alarm_control_panel.py index 6a2a95a64eb..ea63d7b9926 100644 --- a/tests/components/template/test_alarm_control_panel.py +++ b/tests/components/template/test_alarm_control_panel.py @@ -244,7 +244,7 @@ async def test_template_syntax_error( "platform": "template", "panels": { "test_template_panel": { - "name": "Template Alarm Panel", + "name": '{{ "Template Alarm Panel" }}', "value_template": "disarmed", **OPTIMISTIC_TEMPLATE_ALARM_CONFIG, } diff --git a/tests/components/template/test_binary_sensor.py b/tests/components/template/test_binary_sensor.py index 50cad5be9e1..eb51b3f53b4 100644 --- a/tests/components/template/test_binary_sensor.py +++ b/tests/components/template/test_binary_sensor.py @@ -1,5 +1,6 @@ """The tests for the Template Binary sensor platform.""" +from copy import deepcopy from datetime import UTC, datetime, timedelta import logging from unittest.mock import patch @@ -995,20 +996,32 @@ async def test_availability_icon_picture( ], ) @pytest.mark.parametrize( - ("extra_config", "restored_state", "initial_state"), + ("extra_config", "source_state", "restored_state", "initial_state"), [ - ({}, ON, OFF), - ({}, OFF, OFF), - ({}, STATE_UNAVAILABLE, OFF), - ({}, STATE_UNKNOWN, OFF), - ({"delay_off": 5}, ON, ON), - ({"delay_off": 5}, OFF, OFF), - ({"delay_off": 5}, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_off": 5}, STATE_UNKNOWN, STATE_UNKNOWN), - ({"delay_on": 5}, ON, ON), - ({"delay_on": 5}, OFF, OFF), - ({"delay_on": 5}, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_on": 5}, STATE_UNKNOWN, STATE_UNKNOWN), + ({}, OFF, ON, OFF), + ({}, OFF, OFF, OFF), + ({}, OFF, STATE_UNAVAILABLE, OFF), + ({}, OFF, STATE_UNKNOWN, OFF), + ({"delay_off": 5}, OFF, ON, ON), + ({"delay_off": 5}, OFF, OFF, OFF), + ({"delay_off": 5}, OFF, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_off": 5}, OFF, STATE_UNKNOWN, STATE_UNKNOWN), + ({"delay_on": 5}, OFF, ON, OFF), + ({"delay_on": 5}, OFF, OFF, OFF), + ({"delay_on": 5}, OFF, STATE_UNAVAILABLE, OFF), + ({"delay_on": 5}, OFF, STATE_UNKNOWN, OFF), + ({}, ON, ON, ON), + ({}, ON, OFF, ON), + ({}, ON, STATE_UNAVAILABLE, ON), + ({}, ON, STATE_UNKNOWN, ON), + ({"delay_off": 5}, ON, ON, ON), + ({"delay_off": 5}, ON, OFF, ON), + ({"delay_off": 5}, ON, STATE_UNAVAILABLE, ON), + ({"delay_off": 5}, ON, STATE_UNKNOWN, ON), + ({"delay_on": 5}, ON, ON, ON), + ({"delay_on": 5}, ON, OFF, OFF), + ({"delay_on": 5}, ON, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_on": 5}, ON, STATE_UNKNOWN, STATE_UNKNOWN), ], ) async def test_restore_state( @@ -1017,18 +1030,20 @@ async def test_restore_state( domain, config, extra_config, + source_state, restored_state, initial_state, ) -> None: """Test restoring template binary sensor.""" + hass.states.async_set("sensor.test_state", source_state) fake_state = State( "binary_sensor.test", restored_state, {}, ) mock_restore_cache(hass, (fake_state,)) - config = dict(config) + config = deepcopy(config) config["template"]["binary_sensor"].update(**extra_config) with assert_setup_component(count, domain): assert await async_setup_component( diff --git a/tests/components/template/test_button.py b/tests/components/template/test_button.py index c861c7874d4..72c3d2351f5 100644 --- a/tests/components/template/test_button.py +++ b/tests/components/template/test_button.py @@ -3,9 +3,12 @@ import datetime as dt from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant import setup from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.template import DOMAIN from homeassistant.components.template.button import DEFAULT_NAME from homeassistant.const import ( CONF_DEVICE_CLASS, @@ -15,14 +18,58 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import assert_setup_component +from tests.common import MockConfigEntry, assert_setup_component _TEST_BUTTON = "button.template_button" _TEST_OPTIONS_BUTTON = "button.test" +@pytest.mark.parametrize( + "config_entry_extra_options", + [ + {}, + { + "device_class": "update", + }, + ], +) +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + config_entry_extra_options: dict[str, str], +) -> None: + """Test the config flow.""" + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "button", + "press": [ + { + "service": "input_boolean.toggle", + "metadata": {}, + "data": {}, + "target": {"entity_id": "input_boolean.test"}, + } + ], + } + | config_entry_extra_options, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("button.my_template") + assert state is not None + assert state == snapshot + + async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -197,3 +244,49 @@ def _verify( state = hass.states.get(entity_id) assert state.state == expected_value assert state.attributes == attributes + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for button template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "button", + "device_id": device_entry.id, + "press": [ + { + "service": "input_boolean.toggle", + "metadata": {}, + "data": {}, + "target": {"entity_id": "input_boolean.test"}, + } + ], + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("button.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index f277b918661..a62370f4261 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -31,7 +31,9 @@ from tests.typing import WebSocketGenerator [ ( "binary_sensor", - "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}", + { + "state": "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}" + }, "on", {"one": "on", "two": "off"}, {}, @@ -41,7 +43,9 @@ from tests.typing import WebSocketGenerator ), ( "sensor", - "{{ float(states('sensor.one')) + float(states('sensor.two')) }}", + { + "state": "{{ float(states('sensor.one')) + float(states('sensor.two')) }}" + }, "50.0", {"one": "30.0", "two": "20.0"}, {}, @@ -49,8 +53,85 @@ from tests.typing import WebSocketGenerator {}, {}, ), + ( + "button", + {}, + "unknown", + {"one": "30.0", "two": "20.0"}, + {}, + { + "device_class": "restart", + "press": [ + { + "service": "input_boolean.toggle", + "target": {"entity_id": "input_boolean.test"}, + "data": {}, + } + ], + }, + { + "device_class": "restart", + "press": [ + { + "service": "input_boolean.toggle", + "target": {"entity_id": "input_boolean.test"}, + "data": {}, + } + ], + }, + {}, + ), + ( + "image", + {"url": "{{ states('sensor.one') }}"}, + "2024-07-09T00:00:00+00:00", + {"one": "http://www.test.com", "two": ""}, + {}, + {"verify_ssl": True}, + {"verify_ssl": True}, + {}, + ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + "30.0", + {"one": "30.0", "two": "20.0"}, + {}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + {}, + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + "on", + {"one": "on", "two": "off"}, + {}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {}, + ), + ( + "switch", + {"value_template": "{{ states('switch.one') }}"}, + "on", + {"one": "on", "two": "off"}, + {}, + {}, + {}, + {}, + ), ], ) +@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_config_flow( hass: HomeAssistant, template_type, @@ -91,7 +172,7 @@ async def test_config_flow( result["flow_id"], { "name": "My template", - "state": state_template, + **state_template, **extra_input, }, ) @@ -102,8 +183,8 @@ async def test_config_flow( assert result["data"] == {} assert result["options"] == { "name": "My template", - "state": state_template, "template_type": template_type, + **state_template, **extra_options, } assert len(mock_setup_entry.mock_calls) == 1 @@ -112,8 +193,8 @@ async def test_config_flow( assert config_entry.data == {} assert config_entry.options == { "name": "My template", - "state": state_template, "template_type": template_type, + **state_template, **extra_options, } @@ -127,22 +208,70 @@ async def test_config_flow( ( "template_type", "state_template", + "extra_input", + "extra_options", ), [ ( "sensor", - "{{ 15 }}", + {"state": "{{ 15 }}"}, + {}, + {}, ), ( "binary_sensor", - "{{ false }}", + {"state": "{{ false }}"}, + {}, + {}, + ), + ( + "switch", + {"value_template": "{{ false }}"}, + {}, + {}, + ), + ( + "button", + {}, + {}, + {}, + ), + ( + "image", + { + "url": "{{ states('sensor.one') }}", + }, + {"verify_ssl": True}, + {"verify_ssl": True}, + ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, ), ], ) async def test_config_flow_device( hass: HomeAssistant, template_type: str, - state_template: str, + state_template: dict[str, Any], + extra_input: dict[str, Any], + extra_options: dict[str, Any], device_registry: dr.DeviceRegistry, ) -> None: """Test remove the device registry configuration entry when the device changes.""" @@ -180,8 +309,9 @@ async def test_config_flow_device( result["flow_id"], { "name": "My template", - "state": state_template, "device_id": device_id, + **state_template, + **extra_input, }, ) await hass.async_block_till_done() @@ -191,9 +321,10 @@ async def test_config_flow_device( assert result["data"] == {} assert result["options"] == { "name": "My template", - "state": state_template, "template_type": template_type, "device_id": device_id, + **state_template, + **extra_options, } assert len(mock_setup_entry.mock_calls) == 1 @@ -201,9 +332,10 @@ async def test_config_flow_device( assert config_entry.data == {} assert config_entry.options == { "name": "My template", - "state": state_template, "template_type": template_type, "device_id": device_id, + **state_template, + **extra_options, } @@ -214,8 +346,8 @@ def get_suggested(schema, key): if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] - # Wanted key absent from schema - raise KeyError("Wanted key absent from schema") + # If the desired key is missing from the schema, return None + return None @pytest.mark.parametrize( @@ -227,28 +359,122 @@ def get_suggested(schema, key): "input_states", "extra_options", "options_options", + "key_template", ), [ ( "binary_sensor", - "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}", - "{{ states('binary_sensor.one') == 'on' and states('binary_sensor.two') == 'on' }}", + { + "state": "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}" + }, + { + "state": "{{ states('binary_sensor.one') == 'on' and states('binary_sensor.two') == 'on' }}" + }, ["on", "off"], {"one": "on", "two": "off"}, {}, {}, + "state", ), ( "sensor", - "{{ float(states('sensor.one')) + float(states('sensor.two')) }}", - "{{ float(states('sensor.one')) - float(states('sensor.two')) }}", + { + "state": "{{ float(states('sensor.one')) + float(states('sensor.two')) }}" + }, + { + "state": "{{ float(states('sensor.one')) - float(states('sensor.two')) }}" + }, ["50.0", "10.0"], {"one": "30.0", "two": "20.0"}, {}, {}, + "state", + ), + ( + "button", + {}, + {}, + ["unknown", "unknown"], + {"one": "30.0", "two": "20.0"}, + { + "device_class": "restart", + "press": [ + { + "service": "input_boolean.toggle", + "target": {"entity_id": "input_boolean.test"}, + "data": {}, + } + ], + }, + { + "press": [ + { + "service": "input_boolean.toggle", + "target": {"entity_id": "input_boolean.test"}, + "data": {}, + } + ], + }, + "state", + ), + ( + "image", + { + "url": "{{ states('sensor.one') }}", + }, + { + "url": "{{ states('sensor.two') }}", + }, + ["2024-07-09T00:00:00+00:00", "2024-07-09T00:00:00+00:00"], + {"one": "http://www.test.com", "two": "http://www.test2.com"}, + {"verify_ssl": True}, + { + "url": "{{ states('sensor.two') }}", + "verify_ssl": True, + }, + "url", + ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + {"state": "{{ states('number.two') }}"}, + ["30.0", "20.0"], + {"one": "30.0", "two": "20.0"}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + "state", + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + {"state": "{{ states('select.two') }}"}, + ["on", "off"], + {"one": "on", "two": "off"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + "state", + ), + ( + "switch", + {"value_template": "{{ states('switch.one') }}"}, + {"value_template": "{{ states('switch.two') }}"}, + ["on", "off"], + {"one": "on", "two": "off"}, + {}, + {}, + "value_template", ), ], ) +@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_options( hass: HomeAssistant, template_type, @@ -258,6 +484,7 @@ async def test_options( input_states, extra_options, options_options, + key_template, ) -> None: """Test reconfiguring.""" input_entities = ["one", "two"] @@ -272,8 +499,8 @@ async def test_options( domain=DOMAIN, options={ "name": "My template", - "state": old_state_template, "template_type": template_type, + **old_state_template, **extra_options, }, title="My template", @@ -291,25 +518,30 @@ async def test_options( result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == template_type - assert get_suggested(result["data_schema"].schema, "state") == old_state_template + assert get_suggested( + result["data_schema"].schema, key_template + ) == old_state_template.get(key_template) assert "name" not in result["data_schema"].schema result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={"state": new_state_template, **options_options}, + user_input={ + **new_state_template, + **options_options, + }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "name": "My template", - "state": new_state_template, "template_type": template_type, + **new_state_template, **extra_options, } assert config_entry.data == {} assert config_entry.options == { "name": "My template", - "state": new_state_template, "template_type": template_type, + **new_state_template, **extra_options, } assert config_entry.title == "My template" @@ -334,7 +566,7 @@ async def test_options( assert result["step_id"] == template_type assert get_suggested(result["data_schema"].schema, "name") is None - assert get_suggested(result["data_schema"].schema, "state") is None + assert get_suggested(result["data_schema"].schema, key_template) is None @pytest.mark.parametrize( @@ -943,22 +1175,71 @@ async def test_option_flow_sensor_preview_config_entry_removed( ( "template_type", "state_template", + "extra_input", + "extra_options", ), [ ( "sensor", - "{{ 15 }}", + {"state": "{{ 15 }}"}, + {}, + {}, ), ( "binary_sensor", - "{{ false }}", + {"state": "{{ false }}"}, + {}, + {}, + ), + ( + "button", + {}, + {}, + {}, + ), + ( + "image", + { + "url": "{{ states('sensor.one') }}", + "verify_ssl": True, + }, + {}, + {}, + ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + ), + ( + "switch", + {"value_template": "{{ false }}"}, + {}, + {}, ), ], ) async def test_options_flow_change_device( hass: HomeAssistant, template_type: str, - state_template: str, + state_template: dict[str, Any], + extra_input: dict[str, Any], + extra_options: dict[str, Any], device_registry: dr.DeviceRegistry, ) -> None: """Test remove the device registry configuration entry when the device changes.""" @@ -992,11 +1273,12 @@ async def test_options_flow_change_device( domain=DOMAIN, options={ "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id1, + **state_template, + **extra_options, }, - title="Sensor template", + title="Template", ) template_config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(template_config_entry.entry_id) @@ -1011,23 +1293,26 @@ async def test_options_flow_change_device( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - "state": state_template, "device_id": device_id2, + **state_template, + **extra_input, }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id2, + **state_template, + **extra_input, } assert template_config_entry.data == {} assert template_config_entry.options == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id2, + **state_template, + **extra_options, } # Remove link with device @@ -1039,20 +1324,23 @@ async def test_options_flow_change_device( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - "state": state_template, + **state_template, + **extra_input, }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", + **state_template, + **extra_input, } assert template_config_entry.data == {} assert template_config_entry.options == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", + **state_template, + **extra_options, } # Change to link to device 1 @@ -1064,21 +1352,24 @@ async def test_options_flow_change_device( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - "state": state_template, "device_id": device_id1, + **state_template, + **extra_input, }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id1, + **state_template, + **extra_input, } assert template_config_entry.data == {} assert template_config_entry.options == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id1, + **state_template, + **extra_options, } diff --git a/tests/components/template/test_image.py b/tests/components/template/test_image.py index bda9e2530ca..101b475956a 100644 --- a/tests/components/template/test_image.py +++ b/tests/components/template/test_image.py @@ -8,6 +8,7 @@ import httpx from PIL import Image import pytest import respx +from syrupy.assertion import SnapshotAssertion from homeassistant import setup from homeassistant.components.input_text import ( @@ -15,12 +16,13 @@ from homeassistant.components.input_text import ( DOMAIN as INPUT_TEXT_DOMAIN, SERVICE_SET_VALUE as INPUT_TEXT_SERVICE_SET_VALUE, ) +from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ENTITY_PICTURE, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import assert_setup_component +from tests.common import MockConfigEntry, assert_setup_component from tests.typing import ClientSessionGenerator _DEFAULT = object() @@ -74,6 +76,39 @@ async def _assert_state( assert body == expected_image +@respx.mock +@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + imgbytes_jpg, +) -> None: + """Test the config flow.""" + + respx.get("http://example.com").respond( + stream=imgbytes_jpg, content_type="image/jpeg" + ) + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "image", + "url": "http://example.com", + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("image.my_template") + assert state is not None + assert state.state == "2024-07-09T00:00:00+00:00" + + @respx.mock @pytest.mark.freeze_time("2023-04-01 00:00:00+00:00") async def test_platform_config( @@ -503,3 +538,47 @@ async def test_trigger_image_custom_entity_picture( imgbytes_jpg, expected_entity_picture="http://example2.com", ) + + +@respx.mock +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for image template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + respx.get("http://example.com").respond( + stream=imgbytes_jpg, content_type="image/jpeg" + ) + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "image", + "url": "http://example.com", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("image.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index d13fd9035b0..2e5870217a2 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -1,4 +1,4 @@ -"""The test for the Template sensor platform.""" +"""Test for Template helper.""" from datetime import timedelta from unittest.mock import patch @@ -7,9 +7,9 @@ import pytest from homeassistant import config from homeassistant.components.template import DOMAIN +from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.reload import SERVICE_RELOAD from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -271,13 +271,103 @@ async def async_yaml_patch_helper(hass, filename): await hass.async_block_till_done() +@pytest.mark.parametrize( + ( + "config_entry_options", + "config_user_input", + ), + [ + ( + { + "name": "My template", + "state": "{{10}}", + "template_type": "sensor", + }, + { + "state": "{{12}}", + }, + ), + ( + { + "template_type": "binary_sensor", + "name": "My template", + "state": "{{1 == 1}}", + }, + { + "state": "{{1 == 2}}", + }, + ), + ( + { + "template_type": "image", + "name": "My template", + "url": "http://example.com", + }, + { + "url": "http://example.com", + }, + ), + ( + { + "template_type": "button", + "name": "My template", + }, + {}, + ), + ( + { + "template_type": "number", + "name": "My template", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "state": "{{ 11 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + ), + ( + { + "template_type": "select", + "name": "My template", + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + }, + { + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + }, + ), + ( + { + "template_type": "switch", + "name": "My template", + "value_template": "{{ true }}", + }, + { + "value_template": "{{ true }}", + }, + ), + ], +) async def test_change_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + config_entry_options: dict[str, str], + config_user_input: dict[str, str], ) -> None: - """Test remove the device registry configuration entry when the device changes.""" + """Test the link between the device and the config entry. - # Configure a device registry + Test, for each platform, that the device was linked to the + config entry and the link was removed when the device is + changed in the integration options. + """ + + # Configure devices registry entry_device1 = MockConfigEntry() entry_device1.add_to_hass(hass) device1 = device_registry.async_get_or_create( @@ -300,60 +390,57 @@ async def test_change_device( device_id2 = device2.id assert device_id2 is not None - # Setup the config entry (binary_sensor) - sensor_config_entry = MockConfigEntry( + # Setup the config entry + template_config_entry = MockConfigEntry( data={}, domain=DOMAIN, - options={ - "template_type": "binary_sensor", - "name": "Teste", - "state": "{{15}}", - "device_id": device_id1, - }, - title="Binary sensor template", + options=config_entry_options | {"device_id": device_id1}, + title="Template", ) - sensor_config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(sensor_config_entry.entry_id) + template_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(template_config_entry.entry_id) await hass.async_block_till_done() - # Confirm that the configuration entry has been added to the device 1 registry (current) + # Confirm that the config entry has been added to the device 1 registry (current) current_device = device_registry.async_get(device_id=device_id1) - assert sensor_config_entry.entry_id in current_device.config_entries + assert template_config_entry.entry_id in current_device.config_entries - # Change configuration options to use device 2 and reload the integration - result = await hass.config_entries.options.async_init(sensor_config_entry.entry_id) + # Change config options to use device 2 and reload the integration + result = await hass.config_entries.options.async_init( + template_config_entry.entry_id + ) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={ - "state": "{{15}}", - "device_id": device_id2, - }, + user_input=config_user_input | {"device_id": device_id2}, ) await hass.async_block_till_done() - # Confirm that the configuration entry has been removed from the device 1 registry (previous) + # Confirm that the config entry has been removed from the device 1 registry previous_device = device_registry.async_get(device_id=device_id1) - assert sensor_config_entry.entry_id not in previous_device.config_entries + assert template_config_entry.entry_id not in previous_device.config_entries - # Confirm that the configuration entry has been added to the device 2 registry (current) + # Confirm that the config entry has been added to the device 2 registry (current) current_device = device_registry.async_get(device_id=device_id2) - assert sensor_config_entry.entry_id in current_device.config_entries + assert template_config_entry.entry_id in current_device.config_entries - result = await hass.config_entries.options.async_init(sensor_config_entry.entry_id) + # Change the config options to remove the device and reload the integration + result = await hass.config_entries.options.async_init( + template_config_entry.entry_id + ) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={ - "state": "{{15}}", - }, + user_input=config_user_input, ) await hass.async_block_till_done() - # Confirm that the configuration entry has been removed from the device 2 registry (previous) + # Confirm that the config entry has been removed from the device 2 registry previous_device = device_registry.async_get(device_id=device_id2) - assert sensor_config_entry.entry_id not in previous_device.config_entries + assert template_config_entry.entry_id not in previous_device.config_entries - # Confirm that there is no device with the helper configuration entry + # Confirm that there is no device with the helper config entry assert ( - dr.async_entries_for_config_entry(device_registry, sensor_config_entry.entry_id) + dr.async_entries_for_config_entry( + device_registry, template_config_entry.entry_id + ) == [] ) diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index ad97146d0fb..065a1488dc9 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -1,5 +1,7 @@ """The tests for the Template light platform.""" +from typing import Any + import pytest from homeassistant.components import light @@ -152,7 +154,9 @@ OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG = { } -async def async_setup_light(hass, count, light_config): +async def async_setup_light( + hass: HomeAssistant, count: int, light_config: dict[str, Any] +) -> None: """Do setup of light integration.""" config = {"light": {"platform": "template", "lights": light_config}} @@ -169,7 +173,9 @@ async def async_setup_light(hass, count, light_config): @pytest.fixture -async def setup_light(hass, count, light_config): +async def setup_light( + hass: HomeAssistant, count: int, light_config: dict[str, Any] +) -> None: """Do setup of light integration.""" await async_setup_light(hass, count, light_config) diff --git a/tests/components/template/test_number.py b/tests/components/template/test_number.py index bf04151fd36..ca9fe2d7688 100644 --- a/tests/components/template/test_number.py +++ b/tests/components/template/test_number.py @@ -1,5 +1,7 @@ """The tests for the Template number platform.""" +from syrupy.assertion import SnapshotAssertion + from homeassistant import setup from homeassistant.components.input_number import ( ATTR_VALUE as INPUT_NUMBER_ATTR_VALUE, @@ -14,11 +16,12 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE as NUMBER_SERVICE_SET_VALUE, ) +from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ICON, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import assert_setup_component, async_capture_events +from tests.common import MockConfigEntry, assert_setup_component, async_capture_events _TEST_NUMBER = "number.template_number" # Represent for number's value @@ -42,6 +45,35 @@ _VALUE_INPUT_NUMBER_CONFIG = { } +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "number", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("number.my_template") + assert state is not None + assert state == snapshot + + async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -460,3 +492,45 @@ async def test_icon_template_with_trigger(hass: HomeAssistant) -> None: state = hass.states.get(_TEST_NUMBER) assert float(state.state) == 51 assert state.attributes[ATTR_ICON] == "mdi:greater" + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for number template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "number", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("number.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_select.py b/tests/components/template/test_select.py index 4106abdd469..2268c0840aa 100644 --- a/tests/components/template/test_select.py +++ b/tests/components/template/test_select.py @@ -1,5 +1,7 @@ """The tests for the Template select platform.""" +from syrupy.assertion import SnapshotAssertion + from homeassistant import setup from homeassistant.components.input_select import ( ATTR_OPTION as INPUT_SELECT_ATTR_OPTION, @@ -14,17 +16,45 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION as SELECT_SERVICE_SELECT_OPTION, ) +from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ICON, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import assert_setup_component, async_capture_events +from tests.common import MockConfigEntry, assert_setup_component, async_capture_events _TEST_SELECT = "select.template_select" # Represent for select's current_option _OPTION_INPUT_SELECT = "input_select.option" +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "select", + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("select.my_template") + assert state is not None + assert state == snapshot + + async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -428,3 +458,43 @@ async def test_template_icon_with_trigger(hass: HomeAssistant) -> None: state = hass.states.get(_TEST_SELECT) assert state.state == "a" assert state.attributes[ATTR_ICON] == "mdi:greater" + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for select template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "select", + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("select.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_switch.py b/tests/components/template/test_switch.py index 68cca990ef1..2fc0f29acaf 100644 --- a/tests/components/template/test_switch.py +++ b/tests/components/template/test_switch.py @@ -1,8 +1,10 @@ """The tests for the Template switch platform.""" import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant import setup +from homeassistant.components import template from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -13,9 +15,15 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import CoreState, HomeAssistant, ServiceCall, State +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import assert_setup_component, mock_component, mock_restore_cache +from tests.common import ( + MockConfigEntry, + assert_setup_component, + mock_component, + mock_restore_cache, +) OPTIMISTIC_SWITCH_CONFIG = { "turn_on": { @@ -35,6 +43,38 @@ OPTIMISTIC_SWITCH_CONFIG = { } +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + hass.states.async_set( + "switch.one", + "on", + {}, + ) + + template_config_entry = MockConfigEntry( + data={}, + domain=template.DOMAIN, + options={ + "name": "My template", + "value_template": "{{ states('switch.one') }}", + "template_type": SWITCH_DOMAIN, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("switch.my_template") + assert state is not None + assert state == snapshot + + async def test_template_state_text(hass: HomeAssistant) -> None: """Test the state text of a template.""" with assert_setup_component(1, "switch"): @@ -655,3 +695,42 @@ async def test_unique_id(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(hass.states.async_all("switch")) == 1 + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for Template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=template.DOMAIN, + options={ + "name": "My template", + "value_template": "{{ true }}", + "template_type": "switch", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("switch.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_template_entity.py b/tests/components/template/test_template_entity.py index dcceea95181..c09a09750fe 100644 --- a/tests/components/template/test_template_entity.py +++ b/tests/components/template/test_template_entity.py @@ -11,14 +11,14 @@ async def test_template_entity_requires_hass_set(hass: HomeAssistant) -> None: """Test template entity requires hass to be set before accepting templates.""" entity = template_entity.TemplateEntity(hass) - with pytest.raises(AssertionError): + with pytest.raises(ValueError, match="^hass cannot be None"): entity.add_template_attribute("_hello", template.Template("Hello")) entity.hass = object() - entity.add_template_attribute("_hello", template.Template("Hello", None)) + with pytest.raises(ValueError, match="^template.hass cannot be None"): + entity.add_template_attribute("_hello", template.Template("Hello", None)) tpl_with_hass = template.Template("Hello", entity.hass) entity.add_template_attribute("_hello", tpl_with_hass) - # Because hass is set in `add_template_attribute`, both templates match `tpl_with_hass` - assert len(entity._template_attrs.get(tpl_with_hass, [])) == 2 + assert len(entity._template_attrs.get(tpl_with_hass, [])) == 1 diff --git a/tests/components/tesla_fleet/__init__.py b/tests/components/tesla_fleet/__init__.py new file mode 100644 index 00000000000..78159402bff --- /dev/null +++ b/tests/components/tesla_fleet/__init__.py @@ -0,0 +1,74 @@ +"""Tests for the Tesla Fleet integration.""" + +from unittest.mock import patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.tesla_fleet.const import CLIENT_ID, DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +async def setup_platform( + hass: HomeAssistant, + config_entry: MockConfigEntry, + platforms: list[Platform] | None = None, +) -> None: + """Set up the Tesla Fleet platform.""" + + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, "", "Home Assistant"), + DOMAIN, + ) + + config_entry.add_to_hass(hass) + + if platforms is None: + await hass.config_entries.async_setup(config_entry.entry_id) + else: + with patch("homeassistant.components.tesla_fleet.PLATFORMS", platforms): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +def assert_entities( + hass: HomeAssistant, + entry_id: str, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that all entities match their snapshot.""" + + entity_entries = er.async_entries_for_config_entry(entity_registry, entry_id) + + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") + + +def assert_entities_alt( + hass: HomeAssistant, + entry_id: str, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that all entities match their alt snapshot.""" + entity_entries = er.async_entries_for_config_entry(entity_registry, entry_id) + + assert entity_entries + for entity_entry in entity_entries: + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-statealt") diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py new file mode 100644 index 00000000000..49f0be9cca7 --- /dev/null +++ b/tests/components/tesla_fleet/conftest.py @@ -0,0 +1,133 @@ +"""Fixtures for Tessie.""" + +from __future__ import annotations + +from collections.abc import Generator +from copy import deepcopy +import time +from unittest.mock import AsyncMock, patch + +import jwt +import pytest + +from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES + +from .const import LIVE_STATUS, PRODUCTS, SITE_INFO, VEHICLE_DATA, VEHICLE_ONLINE + +from tests.common import MockConfigEntry + +UID = "abc-123" + + +@pytest.fixture(name="expires_at") +def mock_expires_at() -> int: + """Fixture to set the oauth token expiration time.""" + return time.time() + 3600 + + +@pytest.fixture(name="scopes") +def mock_scopes() -> list[str]: + """Fixture to set the scopes present in the OAuth token.""" + return SCOPES + + +@pytest.fixture +def normal_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: + """Create Tesla Fleet entry in Home Assistant.""" + + access_token = jwt.encode( + { + "sub": UID, + "aud": [], + "scp": scopes, + "ou_code": "NA", + }, + key="", + algorithm="none", + ) + + return MockConfigEntry( + domain=DOMAIN, + title=UID, + unique_id=UID, + data={ + "auth_implementation": DOMAIN, + "token": { + "status": 0, + "userid": UID, + "access_token": access_token, + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": ",".join(scopes), + }, + }, + ) + + +@pytest.fixture(autouse=True) +def mock_products() -> Generator[AsyncMock]: + """Mock Tesla Fleet Api products method.""" + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi.products", + return_value=PRODUCTS, + ) as mock_products: + yield mock_products + + +@pytest.fixture(autouse=True) +def mock_vehicle_state() -> Generator[AsyncMock]: + """Mock Tesla Fleet API Vehicle Specific vehicle method.""" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle", + return_value=VEHICLE_ONLINE, + ) as mock_vehicle: + yield mock_vehicle + + +@pytest.fixture(autouse=True) +def mock_vehicle_data() -> Generator[AsyncMock]: + """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle_data", + return_value=VEHICLE_DATA, + ) as mock_vehicle_data: + yield mock_vehicle_data + + +@pytest.fixture(autouse=True) +def mock_wake_up() -> Generator[AsyncMock]: + """Mock Tesla Fleet API Vehicle Specific wake_up method.""" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.wake_up", + return_value=VEHICLE_ONLINE, + ) as mock_wake_up: + yield mock_wake_up + + +@pytest.fixture(autouse=True) +def mock_live_status() -> Generator[AsyncMock]: + """Mock Tesla Fleet API Energy Specific live_status method.""" + with patch( + "homeassistant.components.tesla_fleet.EnergySpecific.live_status", + side_effect=lambda: deepcopy(LIVE_STATUS), + ) as mock_live_status: + yield mock_live_status + + +@pytest.fixture(autouse=True) +def mock_site_info() -> Generator[AsyncMock]: + """Mock Tesla Fleet API Energy Specific site_info method.""" + with patch( + "homeassistant.components.tesla_fleet.EnergySpecific.site_info", + side_effect=lambda: deepcopy(SITE_INFO), + ) as mock_live_status: + yield mock_live_status + + +@pytest.fixture(autouse=True) +def mock_find_server() -> Generator[AsyncMock]: + """Mock Tesla Fleet find server method.""" + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi.find_server", + ) as mock_find_server: + yield mock_find_server diff --git a/tests/components/tesla_fleet/const.py b/tests/components/tesla_fleet/const.py new file mode 100644 index 00000000000..76b4ae20092 --- /dev/null +++ b/tests/components/tesla_fleet/const.py @@ -0,0 +1,28 @@ +"""Constants for the Tesla Fleet tests.""" + +from homeassistant.components.tesla_fleet.const import DOMAIN, TeslaFleetState + +from tests.common import load_json_object_fixture + +VEHICLE_ONLINE = {"response": {"state": TeslaFleetState.ONLINE}, "error": None} +VEHICLE_ASLEEP = {"response": {"state": TeslaFleetState.ASLEEP}, "error": None} + +PRODUCTS = load_json_object_fixture("products.json", DOMAIN) +VEHICLE_DATA = load_json_object_fixture("vehicle_data.json", DOMAIN) +VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) +LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) +SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) + +COMMAND_OK = {"response": {"result": True, "reason": ""}} +COMMAND_REASON = {"response": {"result": False, "reason": "already closed"}} +COMMAND_IGNORED_REASON = {"response": {"result": False, "reason": "already_set"}} +COMMAND_NOREASON = {"response": {"result": False}} # Unexpected +COMMAND_ERROR = { + "response": None, + "error": "vehicle unavailable: vehicle is offline or asleep", + "error_description": "", +} +COMMAND_NOERROR = {"answer": 42} +COMMAND_ERRORS = (COMMAND_REASON, COMMAND_NOREASON, COMMAND_ERROR, COMMAND_NOERROR) + +RESPONSE_OK = {"response": {}, "error": None} diff --git a/tests/components/tesla_fleet/fixtures/live_status.json b/tests/components/tesla_fleet/fixtures/live_status.json new file mode 100644 index 00000000000..486f9f4fadd --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/live_status.json @@ -0,0 +1,33 @@ +{ + "response": { + "solar_power": 1185, + "energy_left": 38896.47368421053, + "total_pack_energy": 40727, + "percentage_charged": 95.50537403739663, + "backup_capable": true, + "battery_power": 5060, + "load_power": 6245, + "grid_status": "Active", + "grid_services_active": false, + "grid_power": 0, + "grid_services_power": 0, + "generator_power": 0, + "island_status": "on_grid", + "storm_mode_active": false, + "timestamp": "2024-01-01T00:00:00+00:00", + "wall_connectors": [ + { + "din": "abd-123", + "wall_connector_state": 2, + "wall_connector_fault_state": 2, + "wall_connector_power": 0 + }, + { + "din": "bcd-234", + "wall_connector_state": 2, + "wall_connector_fault_state": 2, + "wall_connector_power": 0 + } + ] + } +} diff --git a/tests/components/tesla_fleet/fixtures/products.json b/tests/components/tesla_fleet/fixtures/products.json new file mode 100644 index 00000000000..8da921a33f4 --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/products.json @@ -0,0 +1,131 @@ +{ + "response": [ + { + "id": 1234, + "user_id": 1234, + "vehicle_id": 1234, + "vin": "LRWXF7EK4KC700000", + "color": null, + "access_type": "OWNER", + "display_name": "Test", + "option_codes": null, + "cached_data": null, + "granular_access": { "hide_private": false }, + "tokens": ["abc", "def"], + "state": "asleep", + "in_service": false, + "id_s": "1234", + "calendar_enabled": true, + "api_version": 71, + "backseat_token": null, + "backseat_token_updated_at": null, + "ble_autopair_enrolled": false, + "vehicle_config": { + "aux_park_lamps": "Eu", + "badge_version": 1, + "can_accept_navigation_requests": true, + "can_actuate_trunks": true, + "car_special_type": "base", + "car_type": "model3", + "charge_port_type": "CCS", + "cop_user_set_temp_supported": false, + "dashcam_clip_save_supported": true, + "default_charge_to_max": false, + "driver_assist": "TeslaAP3", + "ece_restrictions": false, + "efficiency_package": "M32021", + "eu_vehicle": true, + "exterior_color": "DeepBlue", + "exterior_trim": "Black", + "exterior_trim_override": "", + "has_air_suspension": false, + "has_ludicrous_mode": false, + "has_seat_cooling": false, + "headlamp_type": "Global", + "interior_trim_type": "White2", + "key_version": 2, + "motorized_charge_port": true, + "paint_color_override": "0,9,25,0.7,0.04", + "performance_package": "Base", + "plg": true, + "pws": true, + "rear_drive_unit": "PM216MOSFET", + "rear_seat_heaters": 1, + "rear_seat_type": 0, + "rhd": true, + "roof_color": "RoofColorGlass", + "seat_type": null, + "spoiler_type": "None", + "sun_roof_installed": null, + "supports_qr_pairing": false, + "third_row_seats": "None", + "timestamp": 1705701487912, + "trim_badging": "74d", + "use_range_badging": true, + "utc_offset": 36000, + "webcam_selfie_supported": true, + "webcam_supported": true, + "wheel_type": "Pinwheel18CapKit" + }, + "command_signing": "allowed", + "release_notes_supported": true + }, + { + "energy_site_id": 123456, + "resource_type": "battery", + "site_name": "Energy Site", + "id": "ABC123", + "gateway_id": "ABC123", + "asset_site_id": "c0ffee", + "warp_site_number": "GA123456", + "energy_left": 23286.105263157893, + "total_pack_energy": 40804, + "percentage_charged": 57.068192488868476, + "battery_type": "ac_powerwall", + "backup_capable": true, + "battery_power": 14990, + "go_off_grid_test_banner_enabled": null, + "storm_mode_enabled": true, + "powerwall_onboarding_settings_set": true, + "powerwall_tesla_electric_interested_in": null, + "vpp_tour_enabled": null, + "sync_grid_alert_enabled": true, + "breaker_alert_enabled": true, + "components": { + "battery": true, + "battery_type": "ac_powerwall", + "solar": true, + "solar_type": "pv_panel", + "grid": true, + "load_meter": true, + "market_type": "residential", + "wall_connectors": [ + { + "device_id": "abc-123", + "din": "123-abc", + "is_active": true + }, + { + "device_id": "bcd-234", + "din": "234-bcd", + "is_active": true + } + ] + }, + "features": { + "rate_plan_manager_no_pricing_constraint": true + } + }, + { + "energy_site_id": 98765, + "components": { + "battery": false, + "solar": false, + "grid": false, + "load_meter": false, + "market_type": "residential" + } + } + ], + "count": 3 +} diff --git a/tests/components/tesla_fleet/fixtures/site_info.json b/tests/components/tesla_fleet/fixtures/site_info.json new file mode 100644 index 00000000000..60958bbabbb --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/site_info.json @@ -0,0 +1,127 @@ +{ + "response": { + "id": "1233-abcd", + "site_name": "Site", + "backup_reserve_percent": 0, + "default_real_mode": "self_consumption", + "installation_date": "2022-01-01T00:00:00+00:00", + "user_settings": { + "go_off_grid_test_banner_enabled": false, + "storm_mode_enabled": true, + "powerwall_onboarding_settings_set": true, + "powerwall_tesla_electric_interested_in": false, + "vpp_tour_enabled": true, + "sync_grid_alert_enabled": true, + "breaker_alert_enabled": false + }, + "components": { + "solar": true, + "solar_type": "pv_panel", + "battery": true, + "grid": true, + "backup": true, + "gateway": "teg", + "load_meter": true, + "tou_capable": true, + "storm_mode_capable": true, + "flex_energy_request_capable": false, + "car_charging_data_supported": false, + "off_grid_vehicle_charging_reserve_supported": true, + "vehicle_charging_performance_view_enabled": false, + "vehicle_charging_solar_offset_view_enabled": false, + "battery_solar_offset_view_enabled": true, + "solar_value_enabled": true, + "energy_value_header": "Energy Value", + "energy_value_subheader": "Estimated Value", + "energy_service_self_scheduling_enabled": true, + "show_grid_import_battery_source_cards": true, + "set_islanding_mode_enabled": true, + "wifi_commissioning_enabled": true, + "backup_time_remaining_enabled": true, + "battery_type": "ac_powerwall", + "configurable": true, + "grid_services_enabled": false, + "gateways": [ + { + "device_id": "gateway-id", + "din": "gateway-din", + "serial_number": "CN00000000J50D", + "part_number": "1152100-14-J", + "part_type": 10, + "part_name": "Tesla Backup Gateway 2", + "is_active": true, + "site_id": "1234-abcd", + "firmware_version": "24.4.0 0fe780c9", + "updated_datetime": "2024-05-14T00:00:00.000Z" + } + ], + "batteries": [ + { + "device_id": "battery-1-id", + "din": "battery-1-din", + "serial_number": "TG000000001DA5", + "part_number": "3012170-10-B", + "part_type": 2, + "part_name": "Powerwall 2", + "nameplate_max_charge_power": 5000, + "nameplate_max_discharge_power": 5000, + "nameplate_energy": 13500 + }, + { + "device_id": "battery-2-id", + "din": "battery-2-din", + "serial_number": "TG000000002DA5", + "part_number": "3012170-05-C", + "part_type": 2, + "part_name": "Powerwall 2", + "nameplate_max_charge_power": 5000, + "nameplate_max_discharge_power": 5000, + "nameplate_energy": 13500 + } + ], + "wall_connectors": [ + { + "device_id": "123abc", + "din": "abd-123", + "part_name": "Gen 3 Wall Connector", + "is_active": true + }, + { + "device_id": "234bcd", + "din": "bcd-234", + "part_name": "Gen 3 Wall Connector", + "is_active": true + } + ], + "disallow_charge_from_grid_with_solar_installed": true, + "customer_preferred_export_rule": "pv_only", + "net_meter_mode": "battery_ok", + "system_alerts_enabled": true + }, + "version": "23.44.0 eb113390", + "battery_count": 2, + "tou_settings": { + "optimization_strategy": "economics", + "schedule": [ + { + "target": "off_peak", + "week_days": [1, 0], + "start_seconds": 0, + "end_seconds": 3600 + }, + { + "target": "peak", + "week_days": [1, 0], + "start_seconds": 3600, + "end_seconds": 0 + } + ] + }, + "nameplate_power": 15000, + "nameplate_energy": 40500, + "installation_time_zone": "", + "max_site_meter_power_ac": 1000000000, + "min_site_meter_power_ac": -1000000000, + "vpp_backup_reserve_percent": 0 + } +} diff --git a/tests/components/tesla_fleet/fixtures/vehicle_data.json b/tests/components/tesla_fleet/fixtures/vehicle_data.json new file mode 100644 index 00000000000..3845ae48559 --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/vehicle_data.json @@ -0,0 +1,282 @@ +{ + "response": { + "id": 1234, + "user_id": 1234, + "vehicle_id": 1234, + "vin": "LRWXF7EK4KC700000", + "color": null, + "access_type": "OWNER", + "granular_access": { + "hide_private": false + }, + "tokens": ["abc", "def"], + "state": "online", + "in_service": false, + "id_s": "1234", + "calendar_enabled": true, + "api_version": 71, + "backseat_token": null, + "backseat_token_updated_at": null, + "ble_autopair_enrolled": false, + "charge_state": { + "battery_heater_on": false, + "battery_level": 77, + "battery_range": 266.87, + "charge_amps": 16, + "charge_current_request": 16, + "charge_current_request_max": 16, + "charge_enable_request": true, + "charge_energy_added": 0, + "charge_limit_soc": 80, + "charge_limit_soc_max": 100, + "charge_limit_soc_min": 50, + "charge_limit_soc_std": 80, + "charge_miles_added_ideal": 0, + "charge_miles_added_rated": 0, + "charge_port_cold_weather_mode": false, + "charge_port_color": "", + "charge_port_door_open": true, + "charge_port_latch": "Engaged", + "charge_rate": 0, + "charger_actual_current": 0, + "charger_phases": null, + "charger_pilot_current": 16, + "charger_power": 0, + "charger_voltage": 2, + "charging_state": "Stopped", + "conn_charge_cable": "IEC", + "est_battery_range": 275.04, + "fast_charger_brand": "", + "fast_charger_present": false, + "fast_charger_type": "ACSingleWireCAN", + "ideal_battery_range": 266.87, + "max_range_charge_counter": 0, + "minutes_to_full_charge": 0, + "not_enough_power_to_heat": null, + "off_peak_charging_enabled": false, + "off_peak_charging_times": "all_week", + "off_peak_hours_end_time": 900, + "preconditioning_enabled": false, + "preconditioning_times": "all_week", + "scheduled_charging_mode": "Off", + "scheduled_charging_pending": false, + "scheduled_charging_start_time": null, + "scheduled_charging_start_time_app": 600, + "scheduled_departure_time": 1704837600, + "scheduled_departure_time_minutes": 480, + "supercharger_session_trip_planner": false, + "time_to_full_charge": 0, + "timestamp": 1705707520649, + "trip_charging": false, + "usable_battery_level": 77, + "user_charge_enable_request": null + }, + "climate_state": { + "allow_cabin_overheat_protection": true, + "auto_seat_climate_left": true, + "auto_seat_climate_right": true, + "auto_steering_wheel_heat": false, + "battery_heater": false, + "battery_heater_no_power": null, + "cabin_overheat_protection": "On", + "cabin_overheat_protection_actively_cooling": false, + "climate_keeper_mode": "keep", + "cop_activation_temperature": "High", + "defrost_mode": 0, + "driver_temp_setting": 22, + "fan_status": 0, + "hvac_auto_request": "On", + "inside_temp": 29.8, + "is_auto_conditioning_on": false, + "is_climate_on": true, + "is_front_defroster_on": false, + "is_preconditioning": false, + "is_rear_defroster_on": false, + "left_temp_direction": 251, + "max_avail_temp": 28, + "min_avail_temp": 15, + "outside_temp": 30, + "passenger_temp_setting": 22, + "remote_heater_control_enabled": false, + "right_temp_direction": 251, + "seat_heater_left": 0, + "seat_heater_rear_center": 0, + "seat_heater_rear_left": 0, + "seat_heater_rear_right": 0, + "seat_heater_right": 0, + "side_mirror_heaters": false, + "steering_wheel_heat_level": 0, + "steering_wheel_heater": false, + "supports_fan_only_cabin_overheat_protection": true, + "timestamp": 1705707520649, + "wiper_blade_heater": false + }, + "drive_state": { + "active_route_latitude": 30.2226265, + "active_route_longitude": -97.6236871, + "active_route_miles_to_arrival": 0.039491, + "active_route_minutes_to_arrival": 0.103577, + "active_route_traffic_minutes_delay": 0, + "gps_as_of": 1701129612, + "heading": 185, + "latitude": -30.222626, + "longitude": -97.6236871, + "native_latitude": -30.222626, + "native_location_supported": 1, + "native_longitude": -97.6236871, + "native_type": "wgs", + "power": -7, + "shift_state": null, + "speed": null, + "timestamp": 1705707520649 + }, + "gui_settings": { + "gui_24_hour_time": false, + "gui_charge_rate_units": "kW", + "gui_distance_units": "km/hr", + "gui_range_display": "Rated", + "gui_temperature_units": "C", + "gui_tirepressure_units": "Psi", + "show_range_units": false, + "timestamp": 1705707520649 + }, + "vehicle_config": { + "aux_park_lamps": "Eu", + "badge_version": 1, + "can_accept_navigation_requests": true, + "can_actuate_trunks": true, + "car_special_type": "base", + "car_type": "model3", + "charge_port_type": "CCS", + "cop_user_set_temp_supported": true, + "dashcam_clip_save_supported": true, + "default_charge_to_max": false, + "driver_assist": "TeslaAP3", + "ece_restrictions": false, + "efficiency_package": "M32021", + "eu_vehicle": true, + "exterior_color": "DeepBlue", + "exterior_trim": "Black", + "exterior_trim_override": "", + "has_air_suspension": false, + "has_ludicrous_mode": false, + "has_seat_cooling": false, + "headlamp_type": "Global", + "interior_trim_type": "White2", + "key_version": 2, + "motorized_charge_port": true, + "paint_color_override": "0,9,25,0.7,0.04", + "performance_package": "Base", + "plg": true, + "pws": true, + "rear_drive_unit": "PM216MOSFET", + "rear_seat_heaters": 1, + "rear_seat_type": 0, + "rhd": true, + "roof_color": "RoofColorGlass", + "seat_type": null, + "spoiler_type": "None", + "sun_roof_installed": true, + "supports_qr_pairing": false, + "third_row_seats": "None", + "timestamp": 1705707520649, + "trim_badging": "74d", + "use_range_badging": true, + "utc_offset": 36000, + "webcam_selfie_supported": true, + "webcam_supported": true, + "wheel_type": "Pinwheel18CapKit" + }, + "vehicle_state": { + "api_version": 71, + "autopark_state_v2": "unavailable", + "calendar_supported": true, + "car_version": "2023.44.30.8 06f534d46010", + "center_display_state": 0, + "dashcam_clip_save_available": true, + "dashcam_state": "Recording", + "df": 0, + "dr": 0, + "fd_window": 0, + "feature_bitmask": "fbdffbff,187f", + "fp_window": 0, + "ft": 0, + "is_user_present": false, + "locked": false, + "media_info": { + "a2dp_source_name": "Pixel 8 Pro", + "audio_volume": 1.6667, + "audio_volume_increment": 0.333333, + "audio_volume_max": 10.333333, + "media_playback_status": "Playing", + "now_playing_album": "Elon Musk", + "now_playing_artist": "Walter Isaacson", + "now_playing_duration": 651000, + "now_playing_elapsed": 1000, + "now_playing_source": "Audible", + "now_playing_station": "Elon Musk", + "now_playing_title": "Chapter 51: Cybertruck: Tesla, 2018–2019" + }, + "media_state": { + "remote_control_enabled": true + }, + "notifications_supported": true, + "odometer": 6481.019282, + "parsed_calendar_supported": true, + "pf": 0, + "pr": 0, + "rd_window": 0, + "remote_start": false, + "remote_start_enabled": true, + "remote_start_supported": true, + "rp_window": 0, + "rt": 0, + "santa_mode": 0, + "sentry_mode": false, + "sentry_mode_available": true, + "service_mode": false, + "service_mode_plus": false, + "software_update": { + "download_perc": 100, + "expected_duration_sec": 2700, + "install_perc": 1, + "status": "available", + "version": "2024.12.0.0" + }, + "speed_limit_mode": { + "active": false, + "current_limit_mph": 69, + "max_limit_mph": 120, + "min_limit_mph": 50, + "pin_code_set": true + }, + "sun_roof_state": "open", + "vehicle_state_sun_roof_percent_open": 20, + "timestamp": 1705707520649, + "tpms_hard_warning_fl": false, + "tpms_hard_warning_fr": false, + "tpms_hard_warning_rl": false, + "tpms_hard_warning_rr": false, + "tpms_last_seen_pressure_time_fl": 1705700812, + "tpms_last_seen_pressure_time_fr": 1705700793, + "tpms_last_seen_pressure_time_rl": 1705700794, + "tpms_last_seen_pressure_time_rr": 1705700823, + "tpms_pressure_fl": 2.775, + "tpms_pressure_fr": 2.8, + "tpms_pressure_rl": 2.775, + "tpms_pressure_rr": 2.775, + "tpms_rcp_front_value": 2.9, + "tpms_rcp_rear_value": 2.9, + "tpms_soft_warning_fl": false, + "tpms_soft_warning_fr": false, + "tpms_soft_warning_rl": false, + "tpms_soft_warning_rr": false, + "valet_mode": false, + "valet_pin_needed": false, + "vehicle_name": "Test", + "vehicle_self_test_progress": 0, + "vehicle_self_test_requested": false, + "webcam_available": true + } + } +} diff --git a/tests/components/tesla_fleet/fixtures/vehicle_data_alt.json b/tests/components/tesla_fleet/fixtures/vehicle_data_alt.json new file mode 100644 index 00000000000..76416982eba --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/vehicle_data_alt.json @@ -0,0 +1,279 @@ +{ + "response": { + "id": 1234, + "user_id": 1234, + "vehicle_id": 1234, + "vin": "LRWXF7EK4KC700000", + "color": null, + "access_type": "OWNER", + "granular_access": { + "hide_private": false + }, + "tokens": ["abc", "def"], + "state": "online", + "in_service": false, + "id_s": "1234", + "calendar_enabled": true, + "api_version": 71, + "backseat_token": null, + "backseat_token_updated_at": null, + "ble_autopair_enrolled": false, + "charge_state": { + "battery_heater_on": true, + "battery_level": 77, + "battery_range": 266.87, + "charge_amps": 16, + "charge_current_request": 16, + "charge_current_request_max": 16, + "charge_enable_request": true, + "charge_energy_added": 0, + "charge_limit_soc": 80, + "charge_limit_soc_max": 100, + "charge_limit_soc_min": 50, + "charge_limit_soc_std": 80, + "charge_miles_added_ideal": 0, + "charge_miles_added_rated": 0, + "charge_port_cold_weather_mode": false, + "charge_port_color": "", + "charge_port_door_open": true, + "charge_port_latch": "Engaged", + "charge_rate": 0, + "charger_actual_current": 0, + "charger_phases": null, + "charger_pilot_current": 16, + "charger_power": 0, + "charger_voltage": 2, + "charging_state": "Stopped", + "conn_charge_cable": "IEC", + "est_battery_range": 275.04, + "fast_charger_brand": "", + "fast_charger_present": false, + "fast_charger_type": "ACSingleWireCAN", + "ideal_battery_range": 266.87, + "max_range_charge_counter": 0, + "minutes_to_full_charge": "bad value", + "not_enough_power_to_heat": null, + "off_peak_charging_enabled": false, + "off_peak_charging_times": "all_week", + "off_peak_hours_end_time": 900, + "preconditioning_enabled": false, + "preconditioning_times": "all_week", + "scheduled_charging_mode": "Off", + "scheduled_charging_pending": false, + "scheduled_charging_start_time": null, + "scheduled_charging_start_time_app": 600, + "scheduled_departure_time": 1704837600, + "scheduled_departure_time_minutes": 480, + "supercharger_session_trip_planner": false, + "time_to_full_charge": null, + "timestamp": null, + "trip_charging": false, + "usable_battery_level": 77, + "user_charge_enable_request": true + }, + "climate_state": { + "allow_cabin_overheat_protection": true, + "auto_seat_climate_left": false, + "auto_seat_climate_right": false, + "auto_steering_wheel_heat": false, + "battery_heater": true, + "battery_heater_no_power": null, + "cabin_overheat_protection": "Off", + "cabin_overheat_protection_actively_cooling": false, + "climate_keeper_mode": "off", + "cop_activation_temperature": "Low", + "defrost_mode": 0, + "driver_temp_setting": 22, + "fan_status": 0, + "hvac_auto_request": "On", + "inside_temp": 29.8, + "is_auto_conditioning_on": false, + "is_climate_on": false, + "is_front_defroster_on": false, + "is_preconditioning": false, + "is_rear_defroster_on": false, + "left_temp_direction": 251, + "max_avail_temp": 28, + "min_avail_temp": 15, + "outside_temp": 30, + "passenger_temp_setting": 22, + "remote_heater_control_enabled": false, + "right_temp_direction": 251, + "seat_heater_left": 0, + "seat_heater_rear_center": 0, + "seat_heater_rear_left": 0, + "seat_heater_rear_right": 0, + "seat_heater_right": 0, + "side_mirror_heaters": false, + "steering_wheel_heat_level": 0, + "steering_wheel_heater": false, + "supports_fan_only_cabin_overheat_protection": true, + "timestamp": 1705707520649, + "wiper_blade_heater": false + }, + "drive_state": { + "active_route_latitude": 30.2226265, + "active_route_longitude": -97.6236871, + "active_route_miles_to_arrival": 0, + "active_route_minutes_to_arrival": 0, + "active_route_traffic_minutes_delay": 0, + "gps_as_of": 1701129612, + "heading": 185, + "latitude": -30.222626, + "longitude": -97.6236871, + "native_latitude": -30.222626, + "native_location_supported": 1, + "native_longitude": -97.6236871, + "native_type": "wgs", + "power": -7, + "shift_state": null, + "speed": null, + "timestamp": 1705707520649 + }, + "gui_settings": { + "gui_24_hour_time": false, + "gui_charge_rate_units": "kW", + "gui_distance_units": "km/hr", + "gui_range_display": "Rated", + "gui_temperature_units": "C", + "gui_tirepressure_units": "Psi", + "show_range_units": false, + "timestamp": 1705707520649 + }, + "vehicle_config": { + "aux_park_lamps": "Eu", + "badge_version": 1, + "can_accept_navigation_requests": true, + "can_actuate_trunks": true, + "car_special_type": "base", + "car_type": "model3", + "charge_port_type": "CCS", + "cop_user_set_temp_supported": false, + "dashcam_clip_save_supported": true, + "default_charge_to_max": false, + "driver_assist": "TeslaAP3", + "ece_restrictions": false, + "efficiency_package": "M32021", + "eu_vehicle": true, + "exterior_color": "DeepBlue", + "exterior_trim": "Black", + "exterior_trim_override": "", + "has_air_suspension": false, + "has_ludicrous_mode": false, + "has_seat_cooling": false, + "headlamp_type": "Global", + "interior_trim_type": "White2", + "key_version": 2, + "motorized_charge_port": true, + "paint_color_override": "0,9,25,0.7,0.04", + "performance_package": "Base", + "plg": true, + "pws": true, + "rear_drive_unit": "PM216MOSFET", + "rear_seat_heaters": 1, + "rear_seat_type": 0, + "rhd": true, + "roof_color": "RoofColorGlass", + "seat_type": null, + "spoiler_type": "None", + "sun_roof_installed": null, + "supports_qr_pairing": false, + "third_row_seats": "None", + "timestamp": 1705707520649, + "trim_badging": "74d", + "use_range_badging": true, + "utc_offset": 36000, + "webcam_selfie_supported": true, + "webcam_supported": true, + "wheel_type": "Pinwheel18CapKit" + }, + "vehicle_state": { + "api_version": 71, + "autopark_state_v2": "unavailable", + "calendar_supported": true, + "car_version": "2023.44.30.8 06f534d46010", + "center_display_state": 0, + "dashcam_clip_save_available": true, + "dashcam_state": "Recording", + "df": 0, + "dr": 0, + "fd_window": 1, + "feature_bitmask": "fbdffbff,187f", + "fp_window": 1, + "ft": 1, + "is_user_present": true, + "locked": false, + "media_info": { + "audio_volume": 2.6667, + "audio_volume_increment": 0.333333, + "audio_volume_max": 10.333333, + "media_playback_status": "Stopped", + "now_playing_album": "", + "now_playing_artist": "", + "now_playing_duration": 0, + "now_playing_elapsed": 0, + "now_playing_source": "Spotify", + "now_playing_station": "", + "now_playing_title": "" + }, + "media_state": { + "remote_control_enabled": true + }, + "notifications_supported": true, + "odometer": 6481.019282, + "parsed_calendar_supported": true, + "pf": 0, + "pr": 0, + "rd_window": 1, + "remote_start": false, + "remote_start_enabled": true, + "remote_start_supported": true, + "rp_window": 1, + "rt": 1, + "santa_mode": 0, + "sentry_mode": false, + "sentry_mode_available": true, + "service_mode": false, + "service_mode_plus": false, + "software_update": { + "download_perc": 0, + "expected_duration_sec": 2700, + "install_perc": 1, + "status": "", + "version": " " + }, + "speed_limit_mode": { + "active": false, + "current_limit_mph": 69, + "max_limit_mph": 120, + "min_limit_mph": 50, + "pin_code_set": true + }, + "timestamp": 1705707520649, + "tpms_hard_warning_fl": false, + "tpms_hard_warning_fr": false, + "tpms_hard_warning_rl": false, + "tpms_hard_warning_rr": false, + "tpms_last_seen_pressure_time_fl": 1705700812, + "tpms_last_seen_pressure_time_fr": 1705700793, + "tpms_last_seen_pressure_time_rl": 1705700794, + "tpms_last_seen_pressure_time_rr": 1705700823, + "tpms_pressure_fl": 2.775, + "tpms_pressure_fr": 2.8, + "tpms_pressure_rl": 2.775, + "tpms_pressure_rr": 2.775, + "tpms_rcp_front_value": 2.9, + "tpms_rcp_rear_value": 2.9, + "tpms_soft_warning_fl": false, + "tpms_soft_warning_fr": false, + "tpms_soft_warning_rl": false, + "tpms_soft_warning_rr": false, + "valet_mode": false, + "valet_pin_needed": false, + "vehicle_name": "Test", + "vehicle_self_test_progress": 0, + "vehicle_self_test_requested": false, + "webcam_available": true + } + } +} diff --git a/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr b/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr new file mode 100644 index 00000000000..05ef4879de6 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr @@ -0,0 +1,1571 @@ +# serializer version: 1 +# name: test_binary_sensor[binary_sensor.energy_site_backup_capable-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Backup capable', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_capable', + 'unique_id': '123456-backup_capable', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_backup_capable-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Backup capable', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_grid_services_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid services active', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_services_active', + 'unique_id': '123456-grid_services_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_grid_services_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_grid_services_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid services enabled', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_grid_services_enabled', + 'unique_id': '123456-components_grid_services_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_grid_services_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_battery_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_battery_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery heater', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_battery_heater_on', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_heater_on', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_battery_heater-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Battery heater', + }), + 'context': , + 'entity_id': 'binary_sensor.test_battery_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_cabin_overheat_protection_actively_cooling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_cabin_overheat_protection_actively_cooling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cabin overheat protection actively cooling', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_cabin_overheat_protection_actively_cooling', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection_actively_cooling', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_cabin_overheat_protection_actively_cooling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Cabin overheat protection actively cooling', + }), + 'context': , + 'entity_id': 'binary_sensor.test_cabin_overheat_protection_actively_cooling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_charge_cable-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_charge_cable', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge cable', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_conn_charge_cable', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_charge_cable-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Test Charge cable', + }), + 'context': , + 'entity_id': 'binary_sensor.test_charge_cable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_charger_has_multiple_phases-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_charger_has_multiple_phases', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charger has multiple phases', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charger_phases', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_phases', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_charger_has_multiple_phases-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Charger has multiple phases', + }), + 'context': , + 'entity_id': 'binary_sensor.test_charger_has_multiple_phases', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_dashcam-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_dashcam', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Dashcam', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_dashcam_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dashcam_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_dashcam-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Dashcam', + }), + 'context': , + 'entity_id': 'binary_sensor.test_dashcam', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_driver_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_front_driver_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front driver door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_df', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_df', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_driver_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Front driver door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_driver_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_driver_window-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_front_driver_window', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front driver window', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_fd_window', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fd_window', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_driver_window-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Front driver window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_driver_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_passenger_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_front_passenger_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front passenger door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_pf', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pf', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_passenger_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Front passenger door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_passenger_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_passenger_window-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_front_passenger_window', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front passenger window', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_fp_window', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fp_window', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_passenger_window-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Front passenger window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_passenger_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_preconditioning-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_preconditioning', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Preconditioning', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_is_preconditioning', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_is_preconditioning', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_preconditioning-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Preconditioning', + }), + 'context': , + 'entity_id': 'binary_sensor.test_preconditioning', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_preconditioning_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_preconditioning_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Preconditioning enabled', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_preconditioning_enabled', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_preconditioning_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_preconditioning_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Preconditioning enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.test_preconditioning_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_driver_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_rear_driver_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear driver door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_dr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dr', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_driver_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Rear driver door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_driver_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_driver_window-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_rear_driver_window', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear driver window', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_rd_window', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rd_window', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_driver_window-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Rear driver window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_driver_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_passenger_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_rear_passenger_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear passenger door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_pr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pr', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_passenger_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Rear passenger door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_passenger_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_passenger_window-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_rear_passenger_window', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear passenger window', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_rp_window', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rp_window', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_passenger_window-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Rear passenger window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_passenger_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_scheduled_charging_pending-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_scheduled_charging_pending', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Scheduled charging pending', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_scheduled_charging_pending', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_scheduled_charging_pending', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_scheduled_charging_pending-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Scheduled charging pending', + }), + 'context': , + 'entity_id': 'binary_sensor.test_scheduled_charging_pending', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state', + 'unique_id': 'LRWXF7EK4KC700000-state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Test Status', + }), + 'context': , + 'entity_id': 'binary_sensor.test_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure warning front left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_soft_warning_fl', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fl', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning front left', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure warning front right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_soft_warning_fr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fr', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning front right', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure warning rear left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_soft_warning_rl', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rl', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning rear left', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure warning rear right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_soft_warning_rr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rr', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning rear right', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_trip_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_trip_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Trip charging', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_trip_charging', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_trip_charging', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_trip_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Trip charging', + }), + 'context': , + 'entity_id': 'binary_sensor.test_trip_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_user_present-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_user_present', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'User present', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_is_user_present', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_is_user_present', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_user_present-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'presence', + 'friendly_name': 'Test User present', + }), + 'context': , + 'entity_id': 'binary_sensor.test_user_present', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.energy_site_backup_capable-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Backup capable', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.energy_site_grid_services_active-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.energy_site_grid_services_enabled-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_battery_heater-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Battery heater', + }), + 'context': , + 'entity_id': 'binary_sensor.test_battery_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_cabin_overheat_protection_actively_cooling-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Cabin overheat protection actively cooling', + }), + 'context': , + 'entity_id': 'binary_sensor.test_cabin_overheat_protection_actively_cooling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_charge_cable-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Test Charge cable', + }), + 'context': , + 'entity_id': 'binary_sensor.test_charge_cable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_charger_has_multiple_phases-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Charger has multiple phases', + }), + 'context': , + 'entity_id': 'binary_sensor.test_charger_has_multiple_phases', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_dashcam-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Dashcam', + }), + 'context': , + 'entity_id': 'binary_sensor.test_dashcam', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_front_driver_door-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Front driver door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_driver_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_front_driver_window-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Front driver window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_driver_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_front_passenger_door-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Front passenger door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_passenger_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_front_passenger_window-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Front passenger window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_passenger_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_preconditioning-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Preconditioning', + }), + 'context': , + 'entity_id': 'binary_sensor.test_preconditioning', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_preconditioning_enabled-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Preconditioning enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.test_preconditioning_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_rear_driver_door-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Rear driver door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_driver_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_rear_driver_window-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Rear driver window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_driver_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_rear_passenger_door-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Rear passenger door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_passenger_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_rear_passenger_window-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Rear passenger window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_passenger_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_scheduled_charging_pending-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Scheduled charging pending', + }), + 'context': , + 'entity_id': 'binary_sensor.test_scheduled_charging_pending', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_status-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Test Status', + }), + 'context': , + 'entity_id': 'binary_sensor.test_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_front_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning front left', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_front_right-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning front right', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_rear_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning rear left', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_rear_right-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning rear right', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_trip_charging-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Trip charging', + }), + 'context': , + 'entity_id': 'binary_sensor.test_trip_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_user_present-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'presence', + 'friendly_name': 'Test User present', + }), + 'context': , + 'entity_id': 'binary_sensor.test_user_present', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr b/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr new file mode 100644 index 00000000000..194eda6fcff --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr @@ -0,0 +1,101 @@ +# serializer version: 1 +# name: test_device_tracker[device_tracker.test_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.test_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Location', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'location', + 'unique_id': 'LRWXF7EK4KC700000-location', + 'unit_of_measurement': None, + }) +# --- +# name: test_device_tracker[device_tracker.test_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Location', + 'gps_accuracy': 0, + 'latitude': -30.222626, + 'longitude': -97.6236871, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.test_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- +# name: test_device_tracker[device_tracker.test_route-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.test_route', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Route', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'route', + 'unique_id': 'LRWXF7EK4KC700000-route', + 'unit_of_measurement': None, + }) +# --- +# name: test_device_tracker[device_tracker.test_route-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Route', + 'gps_accuracy': 0, + 'latitude': 30.2226265, + 'longitude': -97.6236871, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.test_route', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..902c7af131e --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr @@ -0,0 +1,436 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'energysites': list([ + dict({ + 'info': dict({ + 'backup_reserve_percent': 0, + 'battery_count': 2, + 'components_backup': True, + 'components_backup_time_remaining_enabled': True, + 'components_batteries': list([ + dict({ + 'device_id': 'battery-1-id', + 'din': 'battery-1-din', + 'nameplate_energy': 13500, + 'nameplate_max_charge_power': 5000, + 'nameplate_max_discharge_power': 5000, + 'part_name': 'Powerwall 2', + 'part_number': '3012170-10-B', + 'part_type': 2, + 'serial_number': '**REDACTED**', + }), + dict({ + 'device_id': 'battery-2-id', + 'din': 'battery-2-din', + 'nameplate_energy': 13500, + 'nameplate_max_charge_power': 5000, + 'nameplate_max_discharge_power': 5000, + 'part_name': 'Powerwall 2', + 'part_number': '3012170-05-C', + 'part_type': 2, + 'serial_number': '**REDACTED**', + }), + ]), + 'components_battery': True, + 'components_battery_solar_offset_view_enabled': True, + 'components_battery_type': 'ac_powerwall', + 'components_car_charging_data_supported': False, + 'components_configurable': True, + 'components_customer_preferred_export_rule': 'pv_only', + 'components_disallow_charge_from_grid_with_solar_installed': True, + 'components_energy_service_self_scheduling_enabled': True, + 'components_energy_value_header': 'Energy Value', + 'components_energy_value_subheader': 'Estimated Value', + 'components_flex_energy_request_capable': False, + 'components_gateway': 'teg', + 'components_gateways': list([ + dict({ + 'device_id': 'gateway-id', + 'din': 'gateway-din', + 'firmware_version': '24.4.0 0fe780c9', + 'is_active': True, + 'part_name': 'Tesla Backup Gateway 2', + 'part_number': '1152100-14-J', + 'part_type': 10, + 'serial_number': '**REDACTED**', + 'site_id': '1234-abcd', + 'updated_datetime': '2024-05-14T00:00:00.000Z', + }), + ]), + 'components_grid': True, + 'components_grid_services_enabled': False, + 'components_load_meter': True, + 'components_net_meter_mode': 'battery_ok', + 'components_off_grid_vehicle_charging_reserve_supported': True, + 'components_set_islanding_mode_enabled': True, + 'components_show_grid_import_battery_source_cards': True, + 'components_solar': True, + 'components_solar_type': 'pv_panel', + 'components_solar_value_enabled': True, + 'components_storm_mode_capable': True, + 'components_system_alerts_enabled': True, + 'components_tou_capable': True, + 'components_vehicle_charging_performance_view_enabled': False, + 'components_vehicle_charging_solar_offset_view_enabled': False, + 'components_wall_connectors': list([ + dict({ + 'device_id': '123abc', + 'din': 'abd-123', + 'is_active': True, + 'part_name': 'Gen 3 Wall Connector', + }), + dict({ + 'device_id': '234bcd', + 'din': 'bcd-234', + 'is_active': True, + 'part_name': 'Gen 3 Wall Connector', + }), + ]), + 'components_wifi_commissioning_enabled': True, + 'default_real_mode': 'self_consumption', + 'id': '1233-abcd', + 'installation_date': '**REDACTED**', + 'installation_time_zone': '', + 'max_site_meter_power_ac': 1000000000, + 'min_site_meter_power_ac': -1000000000, + 'nameplate_energy': 40500, + 'nameplate_power': 15000, + 'site_name': 'Site', + 'tou_settings_optimization_strategy': 'economics', + 'tou_settings_schedule': list([ + dict({ + 'end_seconds': 3600, + 'start_seconds': 0, + 'target': 'off_peak', + 'week_days': list([ + 1, + 0, + ]), + }), + dict({ + 'end_seconds': 0, + 'start_seconds': 3600, + 'target': 'peak', + 'week_days': list([ + 1, + 0, + ]), + }), + ]), + 'user_settings_breaker_alert_enabled': False, + 'user_settings_go_off_grid_test_banner_enabled': False, + 'user_settings_powerwall_onboarding_settings_set': True, + 'user_settings_powerwall_tesla_electric_interested_in': False, + 'user_settings_storm_mode_enabled': True, + 'user_settings_sync_grid_alert_enabled': True, + 'user_settings_vpp_tour_enabled': True, + 'version': '23.44.0 eb113390', + 'vpp_backup_reserve_percent': 0, + }), + 'live': dict({ + 'backup_capable': True, + 'battery_power': 5060, + 'energy_left': 38896.47368421053, + 'generator_power': 0, + 'grid_power': 0, + 'grid_services_active': False, + 'grid_services_power': 0, + 'grid_status': 'Active', + 'island_status': 'on_grid', + 'load_power': 6245, + 'percentage_charged': 95.50537403739663, + 'solar_power': 1185, + 'storm_mode_active': False, + 'timestamp': '2024-01-01T00:00:00+00:00', + 'total_pack_energy': 40727, + 'wall_connectors': dict({ + 'abd-123': dict({ + 'din': 'abd-123', + 'wall_connector_fault_state': 2, + 'wall_connector_power': 0, + 'wall_connector_state': 2, + }), + 'bcd-234': dict({ + 'din': 'bcd-234', + 'wall_connector_fault_state': 2, + 'wall_connector_power': 0, + 'wall_connector_state': 2, + }), + }), + }), + }), + ]), + 'scopes': list([ + 'openid', + 'offline_access', + 'vehicle_device_data', + 'vehicle_cmds', + 'vehicle_charging_cmds', + 'energy_device_data', + 'energy_cmds', + ]), + 'vehicles': list([ + dict({ + 'data': dict({ + 'access_type': 'OWNER', + 'api_version': 71, + 'backseat_token': None, + 'backseat_token_updated_at': None, + 'ble_autopair_enrolled': False, + 'calendar_enabled': True, + 'charge_state_battery_heater_on': False, + 'charge_state_battery_level': 77, + 'charge_state_battery_range': 266.87, + 'charge_state_charge_amps': 16, + 'charge_state_charge_current_request': 16, + 'charge_state_charge_current_request_max': 16, + 'charge_state_charge_enable_request': True, + 'charge_state_charge_energy_added': 0, + 'charge_state_charge_limit_soc': 80, + 'charge_state_charge_limit_soc_max': 100, + 'charge_state_charge_limit_soc_min': 50, + 'charge_state_charge_limit_soc_std': 80, + 'charge_state_charge_miles_added_ideal': 0, + 'charge_state_charge_miles_added_rated': 0, + 'charge_state_charge_port_cold_weather_mode': False, + 'charge_state_charge_port_color': '', + 'charge_state_charge_port_door_open': True, + 'charge_state_charge_port_latch': 'Engaged', + 'charge_state_charge_rate': 0, + 'charge_state_charger_actual_current': 0, + 'charge_state_charger_phases': None, + 'charge_state_charger_pilot_current': 16, + 'charge_state_charger_power': 0, + 'charge_state_charger_voltage': 2, + 'charge_state_charging_state': 'Stopped', + 'charge_state_conn_charge_cable': 'IEC', + 'charge_state_est_battery_range': 275.04, + 'charge_state_fast_charger_brand': '', + 'charge_state_fast_charger_present': False, + 'charge_state_fast_charger_type': 'ACSingleWireCAN', + 'charge_state_ideal_battery_range': 266.87, + 'charge_state_max_range_charge_counter': 0, + 'charge_state_minutes_to_full_charge': 0, + 'charge_state_not_enough_power_to_heat': None, + 'charge_state_off_peak_charging_enabled': False, + 'charge_state_off_peak_charging_times': 'all_week', + 'charge_state_off_peak_hours_end_time': 900, + 'charge_state_preconditioning_enabled': False, + 'charge_state_preconditioning_times': 'all_week', + 'charge_state_scheduled_charging_mode': 'Off', + 'charge_state_scheduled_charging_pending': False, + 'charge_state_scheduled_charging_start_time': None, + 'charge_state_scheduled_charging_start_time_app': 600, + 'charge_state_scheduled_departure_time': 1704837600, + 'charge_state_scheduled_departure_time_minutes': 480, + 'charge_state_supercharger_session_trip_planner': False, + 'charge_state_time_to_full_charge': 0, + 'charge_state_timestamp': 1705707520649, + 'charge_state_trip_charging': False, + 'charge_state_usable_battery_level': 77, + 'charge_state_user_charge_enable_request': None, + 'climate_state_allow_cabin_overheat_protection': True, + 'climate_state_auto_seat_climate_left': True, + 'climate_state_auto_seat_climate_right': True, + 'climate_state_auto_steering_wheel_heat': False, + 'climate_state_battery_heater': False, + 'climate_state_battery_heater_no_power': None, + 'climate_state_cabin_overheat_protection': 'On', + 'climate_state_cabin_overheat_protection_actively_cooling': False, + 'climate_state_climate_keeper_mode': 'keep', + 'climate_state_cop_activation_temperature': 'High', + 'climate_state_defrost_mode': 0, + 'climate_state_driver_temp_setting': 22, + 'climate_state_fan_status': 0, + 'climate_state_hvac_auto_request': 'On', + 'climate_state_inside_temp': 29.8, + 'climate_state_is_auto_conditioning_on': False, + 'climate_state_is_climate_on': True, + 'climate_state_is_front_defroster_on': False, + 'climate_state_is_preconditioning': False, + 'climate_state_is_rear_defroster_on': False, + 'climate_state_left_temp_direction': 251, + 'climate_state_max_avail_temp': 28, + 'climate_state_min_avail_temp': 15, + 'climate_state_outside_temp': 30, + 'climate_state_passenger_temp_setting': 22, + 'climate_state_remote_heater_control_enabled': False, + 'climate_state_right_temp_direction': 251, + 'climate_state_seat_heater_left': 0, + 'climate_state_seat_heater_rear_center': 0, + 'climate_state_seat_heater_rear_left': 0, + 'climate_state_seat_heater_rear_right': 0, + 'climate_state_seat_heater_right': 0, + 'climate_state_side_mirror_heaters': False, + 'climate_state_steering_wheel_heat_level': 0, + 'climate_state_steering_wheel_heater': False, + 'climate_state_supports_fan_only_cabin_overheat_protection': True, + 'climate_state_timestamp': 1705707520649, + 'climate_state_wiper_blade_heater': False, + 'color': None, + 'drive_state_active_route_latitude': '**REDACTED**', + 'drive_state_active_route_longitude': '**REDACTED**', + 'drive_state_active_route_miles_to_arrival': 0.039491, + 'drive_state_active_route_minutes_to_arrival': 0.103577, + 'drive_state_active_route_traffic_minutes_delay': 0, + 'drive_state_gps_as_of': 1701129612, + 'drive_state_heading': 185, + 'drive_state_latitude': '**REDACTED**', + 'drive_state_longitude': '**REDACTED**', + 'drive_state_native_latitude': '**REDACTED**', + 'drive_state_native_location_supported': 1, + 'drive_state_native_longitude': '**REDACTED**', + 'drive_state_native_type': 'wgs', + 'drive_state_power': -7, + 'drive_state_shift_state': None, + 'drive_state_speed': None, + 'drive_state_timestamp': 1705707520649, + 'granular_access_hide_private': False, + 'gui_settings_gui_24_hour_time': False, + 'gui_settings_gui_charge_rate_units': 'kW', + 'gui_settings_gui_distance_units': 'km/hr', + 'gui_settings_gui_range_display': 'Rated', + 'gui_settings_gui_temperature_units': 'C', + 'gui_settings_gui_tirepressure_units': 'Psi', + 'gui_settings_show_range_units': False, + 'gui_settings_timestamp': 1705707520649, + 'id': '**REDACTED**', + 'id_s': '**REDACTED**', + 'in_service': False, + 'state': 'online', + 'tokens': '**REDACTED**', + 'user_id': '**REDACTED**', + 'vehicle_config_aux_park_lamps': 'Eu', + 'vehicle_config_badge_version': 1, + 'vehicle_config_can_accept_navigation_requests': True, + 'vehicle_config_can_actuate_trunks': True, + 'vehicle_config_car_special_type': 'base', + 'vehicle_config_car_type': 'model3', + 'vehicle_config_charge_port_type': 'CCS', + 'vehicle_config_cop_user_set_temp_supported': True, + 'vehicle_config_dashcam_clip_save_supported': True, + 'vehicle_config_default_charge_to_max': False, + 'vehicle_config_driver_assist': 'TeslaAP3', + 'vehicle_config_ece_restrictions': False, + 'vehicle_config_efficiency_package': 'M32021', + 'vehicle_config_eu_vehicle': True, + 'vehicle_config_exterior_color': 'DeepBlue', + 'vehicle_config_exterior_trim': 'Black', + 'vehicle_config_exterior_trim_override': '', + 'vehicle_config_has_air_suspension': False, + 'vehicle_config_has_ludicrous_mode': False, + 'vehicle_config_has_seat_cooling': False, + 'vehicle_config_headlamp_type': 'Global', + 'vehicle_config_interior_trim_type': 'White2', + 'vehicle_config_key_version': 2, + 'vehicle_config_motorized_charge_port': True, + 'vehicle_config_paint_color_override': '0,9,25,0.7,0.04', + 'vehicle_config_performance_package': 'Base', + 'vehicle_config_plg': True, + 'vehicle_config_pws': True, + 'vehicle_config_rear_drive_unit': 'PM216MOSFET', + 'vehicle_config_rear_seat_heaters': 1, + 'vehicle_config_rear_seat_type': 0, + 'vehicle_config_rhd': True, + 'vehicle_config_roof_color': 'RoofColorGlass', + 'vehicle_config_seat_type': None, + 'vehicle_config_spoiler_type': 'None', + 'vehicle_config_sun_roof_installed': True, + 'vehicle_config_supports_qr_pairing': False, + 'vehicle_config_third_row_seats': 'None', + 'vehicle_config_timestamp': 1705707520649, + 'vehicle_config_trim_badging': '74d', + 'vehicle_config_use_range_badging': True, + 'vehicle_config_utc_offset': 36000, + 'vehicle_config_webcam_selfie_supported': True, + 'vehicle_config_webcam_supported': True, + 'vehicle_config_wheel_type': 'Pinwheel18CapKit', + 'vehicle_id': '**REDACTED**', + 'vehicle_state_api_version': 71, + 'vehicle_state_autopark_state_v2': 'unavailable', + 'vehicle_state_calendar_supported': True, + 'vehicle_state_car_version': '2023.44.30.8 06f534d46010', + 'vehicle_state_center_display_state': 0, + 'vehicle_state_dashcam_clip_save_available': True, + 'vehicle_state_dashcam_state': 'Recording', + 'vehicle_state_df': 0, + 'vehicle_state_dr': 0, + 'vehicle_state_fd_window': 0, + 'vehicle_state_feature_bitmask': 'fbdffbff,187f', + 'vehicle_state_fp_window': 0, + 'vehicle_state_ft': 0, + 'vehicle_state_is_user_present': False, + 'vehicle_state_locked': False, + 'vehicle_state_media_info_a2dp_source_name': 'Pixel 8 Pro', + 'vehicle_state_media_info_audio_volume': 1.6667, + 'vehicle_state_media_info_audio_volume_increment': 0.333333, + 'vehicle_state_media_info_audio_volume_max': 10.333333, + 'vehicle_state_media_info_media_playback_status': 'Playing', + 'vehicle_state_media_info_now_playing_album': 'Elon Musk', + 'vehicle_state_media_info_now_playing_artist': 'Walter Isaacson', + 'vehicle_state_media_info_now_playing_duration': 651000, + 'vehicle_state_media_info_now_playing_elapsed': 1000, + 'vehicle_state_media_info_now_playing_source': 'Audible', + 'vehicle_state_media_info_now_playing_station': 'Elon Musk', + 'vehicle_state_media_info_now_playing_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', + 'vehicle_state_media_state_remote_control_enabled': True, + 'vehicle_state_notifications_supported': True, + 'vehicle_state_odometer': 6481.019282, + 'vehicle_state_parsed_calendar_supported': True, + 'vehicle_state_pf': 0, + 'vehicle_state_pr': 0, + 'vehicle_state_rd_window': 0, + 'vehicle_state_remote_start': False, + 'vehicle_state_remote_start_enabled': True, + 'vehicle_state_remote_start_supported': True, + 'vehicle_state_rp_window': 0, + 'vehicle_state_rt': 0, + 'vehicle_state_santa_mode': 0, + 'vehicle_state_sentry_mode': False, + 'vehicle_state_sentry_mode_available': True, + 'vehicle_state_service_mode': False, + 'vehicle_state_service_mode_plus': False, + 'vehicle_state_software_update_download_perc': 100, + 'vehicle_state_software_update_expected_duration_sec': 2700, + 'vehicle_state_software_update_install_perc': 1, + 'vehicle_state_software_update_status': 'available', + 'vehicle_state_software_update_version': '2024.12.0.0', + 'vehicle_state_speed_limit_mode_active': False, + 'vehicle_state_speed_limit_mode_current_limit_mph': 69, + 'vehicle_state_speed_limit_mode_max_limit_mph': 120, + 'vehicle_state_speed_limit_mode_min_limit_mph': 50, + 'vehicle_state_speed_limit_mode_pin_code_set': True, + 'vehicle_state_sun_roof_state': 'open', + 'vehicle_state_timestamp': 1705707520649, + 'vehicle_state_tpms_hard_warning_fl': False, + 'vehicle_state_tpms_hard_warning_fr': False, + 'vehicle_state_tpms_hard_warning_rl': False, + 'vehicle_state_tpms_hard_warning_rr': False, + 'vehicle_state_tpms_last_seen_pressure_time_fl': 1705700812, + 'vehicle_state_tpms_last_seen_pressure_time_fr': 1705700793, + 'vehicle_state_tpms_last_seen_pressure_time_rl': 1705700794, + 'vehicle_state_tpms_last_seen_pressure_time_rr': 1705700823, + 'vehicle_state_tpms_pressure_fl': 2.775, + 'vehicle_state_tpms_pressure_fr': 2.8, + 'vehicle_state_tpms_pressure_rl': 2.775, + 'vehicle_state_tpms_pressure_rr': 2.775, + 'vehicle_state_tpms_rcp_front_value': 2.9, + 'vehicle_state_tpms_rcp_rear_value': 2.9, + 'vehicle_state_tpms_soft_warning_fl': False, + 'vehicle_state_tpms_soft_warning_fr': False, + 'vehicle_state_tpms_soft_warning_rl': False, + 'vehicle_state_tpms_soft_warning_rr': False, + 'vehicle_state_valet_mode': False, + 'vehicle_state_valet_pin_needed': False, + 'vehicle_state_vehicle_name': 'Test', + 'vehicle_state_vehicle_self_test_progress': 0, + 'vehicle_state_vehicle_self_test_requested': False, + 'vehicle_state_vehicle_state_sun_roof_percent_open': 20, + 'vehicle_state_webcam_available': True, + 'vin': '**REDACTED**', + }), + }), + ]), + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_init.ambr b/tests/components/tesla_fleet/snapshots/test_init.ambr new file mode 100644 index 00000000000..e9828db9f1b --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_init.ambr @@ -0,0 +1,129 @@ +# serializer version: 1 +# name: test_devices[{('tesla_fleet', '123456')}] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tesla_fleet', + '123456', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tesla', + 'model': 'Powerwall 2, Tesla Backup Gateway 2', + 'model_id': None, + 'name': 'Energy Site', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '123456', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_devices[{('tesla_fleet', 'LRWXF7EK4KC700000')}] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tesla_fleet', + 'LRWXF7EK4KC700000', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tesla', + 'model': 'Model X', + 'model_id': None, + 'name': 'Test', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': 'LRWXF7EK4KC700000', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_devices[{('tesla_fleet', 'abd-123')}] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tesla_fleet', + 'abd-123', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tesla', + 'model': 'Gen 3 Wall Connector', + 'model_id': None, + 'name': 'Wall Connector', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '123', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- +# name: test_devices[{('tesla_fleet', 'bcd-234')}] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tesla_fleet', + 'bcd-234', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tesla', + 'model': 'Gen 3 Wall Connector', + 'model_id': None, + 'name': 'Wall Connector', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '234', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_sensor.ambr b/tests/components/tesla_fleet/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c6a4860056a --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_sensor.ambr @@ -0,0 +1,3363 @@ +# serializer version: 1 +# name: test_sensors[sensor.energy_site_battery_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_battery_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_power', + 'unique_id': '123456-battery_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_battery_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Battery power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.06', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Battery power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.06', + }) +# --- +# name: test_sensors[sensor.energy_site_energy_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.energy_site_energy_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_left', + 'unique_id': '123456-energy_left', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_energy_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Energy left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_energy_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.8964736842105', + }) +# --- +# name: test_sensors[sensor.energy_site_energy_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Energy left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_energy_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.8964736842105', + }) +# --- +# name: test_sensors[sensor.energy_site_generator_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_generator_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Generator power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'generator_power', + 'unique_id': '123456-generator_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_generator_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Generator power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_generator_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_generator_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Generator power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_generator_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_power', + 'unique_id': '123456-grid_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_services_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid services power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_services_power', + 'unique_id': '123456-grid_services_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid services power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_services_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid services power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_services_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_load_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_load_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Load power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'load_power', + 'unique_id': '123456-load_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_load_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Load power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_load_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.245', + }) +# --- +# name: test_sensors[sensor.energy_site_load_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Load power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_load_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.245', + }) +# --- +# name: test_sensors[sensor.energy_site_percentage_charged-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_percentage_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Percentage charged', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'percentage_charged', + 'unique_id': '123456-percentage_charged', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.energy_site_percentage_charged-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Percentage charged', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_percentage_charged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '95.5053740373966', + }) +# --- +# name: test_sensors[sensor.energy_site_percentage_charged-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Percentage charged', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_percentage_charged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '95.5053740373966', + }) +# --- +# name: test_sensors[sensor.energy_site_solar_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_solar_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Solar power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'solar_power', + 'unique_id': '123456-solar_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_solar_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Solar power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_solar_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.185', + }) +# --- +# name: test_sensors[sensor.energy_site_solar_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Solar power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_solar_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.185', + }) +# --- +# name: test_sensors[sensor.energy_site_total_pack_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.energy_site_total_pack_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total pack energy', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_pack_energy', + 'unique_id': '123456-total_pack_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_total_pack_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Total pack energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_total_pack_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.727', + }) +# --- +# name: test_sensors[sensor.energy_site_total_pack_energy-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Total pack energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_total_pack_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.727', + }) +# --- +# name: test_sensors[sensor.energy_site_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'version', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'version', + 'unique_id': '123456-version', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.energy_site_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site version', + }), + 'context': , + 'entity_id': 'sensor.energy_site_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.44.0 eb113390', + }) +# --- +# name: test_sensors[sensor.energy_site_version-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site version', + }), + 'context': , + 'entity_id': 'sensor.energy_site_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.44.0 eb113390', + }) +# --- +# name: test_sensors[sensor.energy_site_vpp_backup_reserve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.energy_site_vpp_backup_reserve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VPP backup reserve', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vpp_backup_reserve_percent', + 'unique_id': '123456-vpp_backup_reserve_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.energy_site_vpp_backup_reserve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site VPP backup reserve', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_vpp_backup_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.energy_site_vpp_backup_reserve-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site VPP backup reserve', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_vpp_backup_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery level', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_battery_level', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '77', + }) +# --- +# name: test_sensors[sensor.test_battery_level-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '77', + }) +# --- +# name: test_sensors[sensor.test_battery_range-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_battery_range', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery range', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_battery_range', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_range', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_battery_range-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '429.48563328', + }) +# --- +# name: test_sensors[sensor.test_battery_range-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '429.48563328', + }) +# --- +# name: test_sensors[sensor.test_charge_cable-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_charge_cable', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charge cable', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_conn_charge_cable', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_charge_cable-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Charge cable', + }), + 'context': , + 'entity_id': 'sensor.test_charge_cable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'IEC', + }) +# --- +# name: test_sensors[sensor.test_charge_cable-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Charge cable', + }), + 'context': , + 'entity_id': 'sensor.test_charge_cable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'IEC', + }) +# --- +# name: test_sensors[sensor.test_charge_energy_added-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_charge_energy_added', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge energy added', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charge_energy_added', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_energy_added', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charge_energy_added-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charge_energy_added', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charge_energy_added-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charge_energy_added', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charge_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_charge_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge rate', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charge_rate', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_rate', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charge_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charge_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charge_rate-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charge_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_charger_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charger current', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charger_actual_current', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_actual_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charger_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_current-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_charger_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charger power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charger_power', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charger_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_charger_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charger voltage', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charger_voltage', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charger_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.test_charger_voltage-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.test_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charging', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charging_state', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charging_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stopped', + }) +# --- +# name: test_sensors[sensor.test_charging-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stopped', + }) +# --- +# name: test_sensors[sensor.test_distance_to_arrival-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_distance_to_arrival', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Distance to arrival', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_active_route_miles_to_arrival', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_miles_to_arrival', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_distance_to_arrival-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_distance_to_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.063555', + }) +# --- +# name: test_sensors[sensor.test_distance_to_arrival-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_distance_to_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_driver_temperature_setting-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_driver_temperature_setting', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Driver temperature setting', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_driver_temp_setting', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_driver_temp_setting', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_driver_temperature_setting-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_driver_temperature_setting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_sensors[sensor.test_driver_temperature_setting-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_driver_temperature_setting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_sensors[sensor.test_estimate_battery_range-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_estimate_battery_range', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Estimate battery range', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_est_battery_range', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_est_battery_range', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_estimate_battery_range-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_estimate_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '442.63397376', + }) +# --- +# name: test_sensors[sensor.test_estimate_battery_range-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_estimate_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '442.63397376', + }) +# --- +# name: test_sensors[sensor.test_fast_charger_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_fast_charger_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fast charger type', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_fast_charger_type', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_fast_charger_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_fast_charger_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Fast charger type', + }), + 'context': , + 'entity_id': 'sensor.test_fast_charger_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'ACSingleWireCAN', + }) +# --- +# name: test_sensors[sensor.test_fast_charger_type-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Fast charger type', + }), + 'context': , + 'entity_id': 'sensor.test_fast_charger_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'ACSingleWireCAN', + }) +# --- +# name: test_sensors[sensor.test_ideal_battery_range-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ideal_battery_range', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ideal battery range', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_ideal_battery_range', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_ideal_battery_range', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_ideal_battery_range-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_ideal_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '429.48563328', + }) +# --- +# name: test_sensors[sensor.test_ideal_battery_range-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_ideal_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '429.48563328', + }) +# --- +# name: test_sensors[sensor.test_inside_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_inside_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inside temperature', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_inside_temp', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_inside_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_inside_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_inside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '29.8', + }) +# --- +# name: test_sensors[sensor.test_inside_temperature-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_inside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '29.8', + }) +# --- +# name: test_sensors[sensor.test_odometer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_odometer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Odometer', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_odometer', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_odometer', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_odometer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_odometer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10430.189495371', + }) +# --- +# name: test_sensors[sensor.test_odometer-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_odometer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10430.189495371', + }) +# --- +# name: test_sensors[sensor.test_outside_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_outside_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outside temperature', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_outside_temp', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_outside_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_outside_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_outside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensors[sensor.test_outside_temperature-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_outside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensors[sensor.test_passenger_temperature_setting-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_passenger_temperature_setting', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Passenger temperature setting', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_passenger_temp_setting', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_passenger_temp_setting', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_passenger_temperature_setting-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_passenger_temperature_setting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_sensors[sensor.test_passenger_temperature_setting-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_passenger_temperature_setting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_sensors[sensor.test_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_power', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-7', + }) +# --- +# name: test_sensors[sensor.test_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-7', + }) +# --- +# name: test_sensors[sensor.test_shift_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_shift_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Shift state', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_shift_state', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_shift_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_shift_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_shift_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'p', + }) +# --- +# name: test_sensors[sensor.test_shift_state-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_shift_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'p', + }) +# --- +# name: test_sensors[sensor.test_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_speed', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_speed', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_speed-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_state_of_charge_at_arrival-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_state_of_charge_at_arrival', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State of charge at arrival', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_active_route_energy_at_arrival', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_energy_at_arrival', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_state_of_charge_at_arrival-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_state_of_charge_at_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.test_state_of_charge_at_arrival-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_state_of_charge_at_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.test_time_to_arrival-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_time_to_arrival', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Time to arrival', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_active_route_minutes_to_arrival', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_minutes_to_arrival', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_time_to_arrival-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Time to arrival', + }), + 'context': , + 'entity_id': 'sensor.test_time_to_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:06+00:00', + }) +# --- +# name: test_sensors[sensor.test_time_to_arrival-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Time to arrival', + }), + 'context': , + 'entity_id': 'sensor.test_time_to_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.test_time_to_full_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_time_to_full_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Time to full charge', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_minutes_to_full_charge', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_minutes_to_full_charge', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_time_to_full_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Time to full charge', + }), + 'context': , + 'entity_id': 'sensor.test_time_to_full_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.test_time_to_full_charge-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Time to full charge', + }), + 'context': , + 'entity_id': 'sensor.test_time_to_full_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_tire_pressure_front_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure front left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_pressure_fl', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fl', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_front_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_front_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_tire_pressure_front_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure front right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_pressure_fr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fr', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_front_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.6105682912393', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_right-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_front_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.6105682912393', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_tire_pressure_rear_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure rear left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_pressure_rl', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rl', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_rear_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_rear_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_tire_pressure_rear_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure rear right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_pressure_rr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rr', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_rear_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_right-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_rear_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_traffic_delay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_traffic_delay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Traffic delay', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_active_route_traffic_minutes_delay', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_traffic_minutes_delay', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_traffic_delay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_traffic_delay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_traffic_delay-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_traffic_delay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_usable_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_usable_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Usable battery level', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_usable_battery_level', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_usable_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_usable_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_usable_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '77', + }) +# --- +# name: test_sensors[sensor.test_usable_battery_level-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_usable_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '77', + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_fault_state_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fault state code', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_fault_state', + 'unique_id': '123456-abd-123-wall_connector_fault_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Fault state code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_fault_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Fault state code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_fault_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_fault_state_code_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fault state code', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_fault_state', + 'unique_id': '123456-bcd-234-wall_connector_fault_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Fault state code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_fault_state_code_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code_2-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Fault state code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_fault_state_code_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_power', + 'unique_id': '123456-abd-123-wall_connector_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.wall_connector_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_power_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_power_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_power', + 'unique_id': '123456-bcd-234-wall_connector_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.wall_connector_power_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_power_2-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_state_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'State code', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_state', + 'unique_id': '123456-abd-123-wall_connector_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector State code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector State code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_state_code_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'State code', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_state', + 'unique_id': '123456-bcd-234-wall_connector_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector State code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state_code_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code_2-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector State code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state_code_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_vehicle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Vehicle', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vin', + 'unique_id': '123456-abd-123-vin', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_vehicle_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Vehicle', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vin', + 'unique_id': '123456-bcd-234-vin', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle_2-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/tesla_fleet/test_binary_sensors.py b/tests/components/tesla_fleet/test_binary_sensors.py new file mode 100644 index 00000000000..a759e5ced70 --- /dev/null +++ b/tests/components/tesla_fleet/test_binary_sensors.py @@ -0,0 +1,66 @@ +"""Test the Tesla Fleet binary sensor platform.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, assert_entities_alt, setup_platform +from .const import VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensor( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the binary sensor entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.BINARY_SENSOR]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensor_refresh( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + freezer: FrozenDateTimeFactory, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the binary sensor entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.BINARY_SENSOR]) + + # Refresh + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_binary_sensor_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the binary sensor entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.BINARY_SENSOR]) + state = hass.states.get("binary_sensor.test_status") + assert state.state == STATE_UNKNOWN diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py new file mode 100644 index 00000000000..81ba92f1e9c --- /dev/null +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -0,0 +1,309 @@ +"""Test the Tesla Fleet config flow.""" + +from unittest.mock import patch +from urllib.parse import parse_qs, urlparse + +import pytest + +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.tesla_fleet.const import ( + AUTHORIZE_URL, + CLIENT_ID, + DOMAIN, + SCOPES, + TOKEN_URL, +) +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + +REDIRECT = "https://example.com/auth/external/callback" +UNIQUE_ID = "uid" + + +@pytest.fixture +async def access_token(hass: HomeAssistant) -> str: + """Return a valid access token.""" + return config_entry_oauth2_flow._encode_jwt( + hass, + { + "sub": UNIQUE_ID, + "aud": [], + "scp": [ + "vehicle_device_data", + "vehicle_cmds", + "vehicle_charging_cmds", + "energy_device_data", + "energy_cmds", + "offline_access", + "openid", + ], + "ou_code": "NA", + }, + ) + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + access_token: str, +) -> None: + """Check full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT, + }, + ) + + assert result["type"] is FlowResultType.EXTERNAL_STEP + + assert result["url"].startswith(AUTHORIZE_URL) + parsed_url = urlparse(result["url"]) + parsed_query = parse_qs(parsed_url.query) + assert parsed_query["response_type"][0] == "code" + assert parsed_query["client_id"][0] == CLIENT_ID + assert parsed_query["redirect_uri"][0] == REDIRECT + assert parsed_query["state"][0] == state + assert parsed_query["scope"][0] == " ".join(SCOPES) + assert parsed_query["code_challenge"][0] is not None + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": 60, + }, + ) + with patch( + "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == UNIQUE_ID + assert "result" in result + assert result["result"].unique_id == UNIQUE_ID + assert "token" in result["result"].data + assert result["result"].data["token"]["access_token"] == access_token + assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow_user_cred( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + access_token: str, +) -> None: + """Check full flow.""" + + # Create user application credential + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential("user_client_id", "user_client_secret"), + "user_cred", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"implementation": "user_cred"} + ) + assert result["type"] is FlowResultType.EXTERNAL_STEP + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT, + }, + ) + + assert result["url"].startswith(AUTHORIZE_URL) + parsed_url = urlparse(result["url"]) + parsed_query = parse_qs(parsed_url.query) + assert parsed_query["response_type"][0] == "code" + assert parsed_query["client_id"][0] == "user_client_id" + assert parsed_query["redirect_uri"][0] == REDIRECT + assert parsed_query["state"][0] == state + assert parsed_query["scope"][0] == " ".join(SCOPES) + assert "code_challenge" not in parsed_query # Ensure not a PKCE flow + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": 60, + }, + ) + with patch( + "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == UNIQUE_ID + assert "result" in result + assert result["result"].unique_id == UNIQUE_ID + assert "token" in result["result"].data + assert result["result"].data["token"]["access_token"] == access_token + assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauthentication( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + access_token: str, +) -> None: + """Test Tesla Fleet reauthentication.""" + old_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=UNIQUE_ID, + version=1, + data={}, + ) + old_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT, + }, + ) + client = await hass_client_no_auth() + await client.get(f"/auth/external/callback?code=abcd&state={state}") + + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth_account_mismatch( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + access_token: str, +) -> None: + """Test Tesla Fleet reauthentication with different account.""" + old_entry = MockConfigEntry(domain=DOMAIN, unique_id="baduid", version=1, data={}) + old_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) + + flows = hass.config_entries.flow.async_progress() + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT, + }, + ) + client = await hass_client_no_auth() + await client.get(f"/auth/external/callback?code=abcd&state={state}") + + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_account_mismatch" diff --git a/tests/components/tesla_fleet/test_device_tracker.py b/tests/components/tesla_fleet/test_device_tracker.py new file mode 100644 index 00000000000..e6f483d7953 --- /dev/null +++ b/tests/components/tesla_fleet/test_device_tracker.py @@ -0,0 +1,39 @@ +"""Test the Tesla Fleet device tracker platform.""" + +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, setup_platform + +from tests.common import MockConfigEntry + + +async def test_device_tracker( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the device tracker entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.DEVICE_TRACKER]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_device_tracker_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the device tracker entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.DEVICE_TRACKER]) + state = hass.states.get("device_tracker.test_location") + assert state.state == STATE_UNKNOWN diff --git a/tests/components/tesla_fleet/test_diagnostics.py b/tests/components/tesla_fleet/test_diagnostics.py new file mode 100644 index 00000000000..e0ef24097bb --- /dev/null +++ b/tests/components/tesla_fleet/test_diagnostics.py @@ -0,0 +1,27 @@ +"""Test the Tesla Fleet Diagnostics.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_platform + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + normal_config_entry: MockConfigEntry, +) -> None: + """Test diagnostics.""" + + await setup_platform(hass, normal_config_entry) + + diag = await get_diagnostics_for_config_entry( + hass, hass_client, normal_config_entry + ) + assert diag == snapshot diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py new file mode 100644 index 00000000000..b5eb21d1cdd --- /dev/null +++ b/tests/components/tesla_fleet/test_init.py @@ -0,0 +1,359 @@ +"""Test the Tesla Fleet init.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion +from tesla_fleet_api.exceptions import ( + InvalidRegion, + InvalidToken, + LibraryError, + LoginRequired, + OAuthExpired, + RateLimited, + TeslaFleetError, + VehicleOffline, +) + +from homeassistant.components.tesla_fleet.coordinator import ( + ENERGY_INTERVAL, + ENERGY_INTERVAL_SECONDS, + VEHICLE_INTERVAL, + VEHICLE_INTERVAL_SECONDS, + VEHICLE_WAIT, +) +from homeassistant.components.tesla_fleet.models import TeslaFleetData +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_platform +from .const import VEHICLE_ASLEEP, VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry, async_fire_time_changed + +ERRORS = [ + (InvalidToken, ConfigEntryState.SETUP_ERROR), + (OAuthExpired, ConfigEntryState.SETUP_ERROR), + (LoginRequired, ConfigEntryState.SETUP_ERROR), + (TeslaFleetError, ConfigEntryState.SETUP_RETRY), +] + + +async def test_load_unload( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, +) -> None: + """Test load and unload.""" + + await setup_platform(hass, normal_config_entry) + + assert normal_config_entry.state is ConfigEntryState.LOADED + assert isinstance(normal_config_entry.runtime_data, TeslaFleetData) + assert await hass.config_entries.async_unload(normal_config_entry.entry_id) + await hass.async_block_till_done() + assert normal_config_entry.state is ConfigEntryState.NOT_LOADED + assert not hasattr(normal_config_entry, "runtime_data") + + +@pytest.mark.parametrize(("side_effect", "state"), ERRORS) +async def test_init_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, +) -> None: + """Test init with errors.""" + + mock_products.side_effect = side_effect + await setup_platform(hass, normal_config_entry) + assert normal_config_entry.state is state + + +# Test devices +async def test_devices( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test device registry.""" + await setup_platform(hass, normal_config_entry) + devices = dr.async_entries_for_config_entry( + device_registry, normal_config_entry.entry_id + ) + + for device in devices: + assert device == snapshot(name=f"{device.identifiers}") + + +# Vehicle Coordinator +async def test_vehicle_refresh_offline( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_vehicle_state: AsyncMock, + mock_vehicle_data: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh with an error.""" + await setup_platform(hass, normal_config_entry) + assert normal_config_entry.state is ConfigEntryState.LOADED + + mock_vehicle_state.assert_called_once() + mock_vehicle_data.assert_called_once() + mock_vehicle_state.reset_mock() + mock_vehicle_data.reset_mock() + + # Then the vehicle goes offline + mock_vehicle_data.side_effect = VehicleOffline + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_vehicle_state.assert_not_called() + mock_vehicle_data.assert_called_once() + mock_vehicle_data.reset_mock() + + # And stays offline + mock_vehicle_state.return_value = VEHICLE_ASLEEP + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_vehicle_state.assert_called_once() + mock_vehicle_data.assert_not_called() + + +@pytest.mark.parametrize(("side_effect"), ERRORS) +async def test_vehicle_refresh_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_vehicle_data: AsyncMock, + side_effect: TeslaFleetError, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh makes entity unavailable.""" + + await setup_platform(hass, normal_config_entry) + + mock_vehicle_data.side_effect = side_effect + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (state := hass.states.get("sensor.test_battery_level")) + assert state.state == "unavailable" + + +async def test_vehicle_refresh_ratelimited( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_vehicle_data: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh handles 429.""" + + mock_vehicle_data.side_effect = RateLimited( + {"after": VEHICLE_INTERVAL_SECONDS + 10} + ) + await setup_platform(hass, normal_config_entry) + + assert (state := hass.states.get("sensor.test_battery_level")) + assert state.state == "unknown" + assert mock_vehicle_data.call_count == 1 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should not call for another 10 seconds + assert mock_vehicle_data.call_count == 1 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_vehicle_data.call_count == 2 + + +async def test_vehicle_sleep( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_vehicle_data: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh with an error.""" + await setup_platform(hass, normal_config_entry) + assert mock_vehicle_data.call_count == 1 + + freezer.tick(VEHICLE_WAIT + VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # Let vehicle sleep, no updates for 15 minutes + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 2 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # No polling, call_count should not increase + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 2 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # No polling, call_count should not increase + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 2 + + freezer.tick(VEHICLE_WAIT) + async_fire_time_changed(hass) + # Vehicle didn't sleep, go back to normal + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 3 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # Regular polling + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 4 + + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # Vehicle active + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 5 + + freezer.tick(VEHICLE_WAIT) + async_fire_time_changed(hass) + # Dont let sleep when active + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 6 + + freezer.tick(VEHICLE_WAIT) + async_fire_time_changed(hass) + # Dont let sleep when active + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 7 + + +# Test Energy Live Coordinator +@pytest.mark.parametrize(("side_effect", "state"), ERRORS) +async def test_energy_live_refresh_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_live_status: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, +) -> None: + """Test coordinator refresh with an error.""" + mock_live_status.side_effect = side_effect + await setup_platform(hass, normal_config_entry) + assert normal_config_entry.state is state + + +# Test Energy Site Coordinator +@pytest.mark.parametrize(("side_effect", "state"), ERRORS) +async def test_energy_site_refresh_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_site_info: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, +) -> None: + """Test coordinator refresh with an error.""" + mock_site_info.side_effect = side_effect + await setup_platform(hass, normal_config_entry) + assert normal_config_entry.state is state + + +async def test_energy_live_refresh_ratelimited( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_live_status, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh handles 429.""" + + await setup_platform(hass, normal_config_entry) + + mock_live_status.side_effect = RateLimited({"after": ENERGY_INTERVAL_SECONDS + 10}) + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_live_status.call_count == 2 + + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should not call for another 10 seconds + assert mock_live_status.call_count == 2 + + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_live_status.call_count == 3 + + +async def test_energy_info_refresh_ratelimited( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_site_info: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh handles 429.""" + + await setup_platform(hass, normal_config_entry) + + mock_site_info.side_effect = RateLimited({"after": ENERGY_INTERVAL_SECONDS + 10}) + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_site_info.call_count == 2 + + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should not call for another 10 seconds + assert mock_site_info.call_count == 2 + + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_site_info.call_count == 3 + + +async def test_init_region_issue( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, + mock_find_server: AsyncMock, +) -> None: + """Test init with region issue.""" + + mock_products.side_effect = InvalidRegion + await setup_platform(hass, normal_config_entry) + mock_find_server.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_init_region_issue_failed( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, + mock_find_server: AsyncMock, +) -> None: + """Test init with unresolvable region issue.""" + + mock_products.side_effect = InvalidRegion + mock_find_server.side_effect = LibraryError + await setup_platform(hass, normal_config_entry) + mock_find_server.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR diff --git a/tests/components/tesla_fleet/test_sensor.py b/tests/components/tesla_fleet/test_sensor.py new file mode 100644 index 00000000000..377179ca26a --- /dev/null +++ b/tests/components/tesla_fleet/test_sensor.py @@ -0,0 +1,43 @@ +"""Test the Tesla Fleet sensor platform.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, assert_entities_alt, setup_platform +from .const import VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + normal_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_vehicle_data: AsyncMock, +) -> None: + """Tests that the sensor entities are correct.""" + + freezer.move_to("2024-01-01 00:00:00+00:00") + + await setup_platform(hass, normal_config_entry, [Platform.SENSOR]) + + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + # Coordinator refresh + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index 410eaa62b69..03b9e2c6eb6 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -2,8 +2,9 @@ from __future__ import annotations +from collections.abc import Generator from copy import deepcopy -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -37,7 +38,7 @@ def mock_products(): @pytest.fixture(autouse=True) -def mock_vehicle_data(): +def mock_vehicle_data() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" with patch( "homeassistant.components.teslemetry.VehicleSpecific.vehicle_data", @@ -57,7 +58,7 @@ def mock_wake_up(): @pytest.fixture(autouse=True) -def mock_vehicle(): +def mock_vehicle() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle method.""" with patch( "homeassistant.components.teslemetry.VehicleSpecific.vehicle", diff --git a/tests/components/teslemetry/fixtures/products.json b/tests/components/teslemetry/fixtures/products.json index e1b76e4cefb..8da921a33f4 100644 --- a/tests/components/teslemetry/fixtures/products.json +++ b/tests/components/teslemetry/fixtures/products.json @@ -115,7 +115,17 @@ "features": { "rate_plan_manager_no_pricing_constraint": true } + }, + { + "energy_site_id": 98765, + "components": { + "battery": false, + "solar": false, + "grid": false, + "load_meter": false, + "market_type": "residential" + } } ], - "count": 2 + "count": 3 } diff --git a/tests/components/teslemetry/fixtures/vehicle_data.json b/tests/components/teslemetry/fixtures/vehicle_data.json index 6c787df4897..3845ae48559 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data.json +++ b/tests/components/teslemetry/fixtures/vehicle_data.json @@ -176,7 +176,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": null, + "sun_roof_installed": true, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1705707520649, @@ -250,6 +250,8 @@ "min_limit_mph": 50, "pin_code_set": true }, + "sun_roof_state": "open", + "vehicle_state_sun_roof_percent_open": 20, "timestamp": 1705707520649, "tpms_hard_warning_fl": false, "tpms_hard_warning_fr": false, diff --git a/tests/components/teslemetry/snapshots/test_cover.ambr b/tests/components/teslemetry/snapshots/test_cover.ambr index 7689a08a373..7ffb9c4a1f9 100644 --- a/tests/components/teslemetry/snapshots/test_cover.ambr +++ b/tests/components/teslemetry/snapshots/test_cover.ambr @@ -95,6 +95,54 @@ 'state': 'closed', }) # --- +# name: test_cover[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- # name: test_cover[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -287,6 +335,54 @@ 'state': 'open', }) # --- +# name: test_cover_alt[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_alt[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_cover_alt[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -479,6 +575,54 @@ 'state': 'closed', }) # --- +# name: test_cover_noscope[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_noscope[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- # name: test_cover_noscope[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/teslemetry/snapshots/test_diagnostics.ambr b/tests/components/teslemetry/snapshots/test_diagnostics.ambr index 4a942daa508..11f8a91c1aa 100644 --- a/tests/components/teslemetry/snapshots/test_diagnostics.ambr +++ b/tests/components/teslemetry/snapshots/test_diagnostics.ambr @@ -337,7 +337,7 @@ 'vehicle_config_roof_color': 'RoofColorGlass', 'vehicle_config_seat_type': None, 'vehicle_config_spoiler_type': 'None', - 'vehicle_config_sun_roof_installed': None, + 'vehicle_config_sun_roof_installed': True, 'vehicle_config_supports_qr_pairing': False, 'vehicle_config_third_row_seats': 'None', 'vehicle_config_timestamp': 1705707520649, @@ -402,6 +402,7 @@ 'vehicle_state_speed_limit_mode_max_limit_mph': 120, 'vehicle_state_speed_limit_mode_min_limit_mph': 50, 'vehicle_state_speed_limit_mode_pin_code_set': True, + 'vehicle_state_sun_roof_state': 'open', 'vehicle_state_timestamp': 1705707520649, 'vehicle_state_tpms_hard_warning_fl': False, 'vehicle_state_tpms_hard_warning_fr': False, @@ -426,6 +427,7 @@ 'vehicle_state_vehicle_name': 'Test', 'vehicle_state_vehicle_self_test_progress': 0, 'vehicle_state_vehicle_self_test_requested': False, + 'vehicle_state_vehicle_state_sun_roof_percent_open': 20, 'vehicle_state_webcam_available': True, 'vin': '**REDACTED**', }), diff --git a/tests/components/teslemetry/snapshots/test_init.ambr b/tests/components/teslemetry/snapshots/test_init.ambr index 951e4557bdd..e07f075b7d8 100644 --- a/tests/components/teslemetry/snapshots/test_init.ambr +++ b/tests/components/teslemetry/snapshots/test_init.ambr @@ -21,8 +21,10 @@ }), 'manufacturer': 'Tesla', 'model': 'Powerwall 2, Tesla Backup Gateway 2', + 'model_id': None, 'name': 'Energy Site', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': '123456', 'suggested_area': None, 'sw_version': None, @@ -51,8 +53,10 @@ }), 'manufacturer': 'Tesla', 'model': 'Model X', + 'model_id': None, 'name': 'Test', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': 'LRWXF7EK4KC700000', 'suggested_area': None, 'sw_version': None, @@ -81,8 +85,10 @@ }), 'manufacturer': 'Tesla', 'model': 'Gen 3 Wall Connector', + 'model_id': None, 'name': 'Wall Connector', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': '123', 'suggested_area': None, 'sw_version': None, @@ -111,8 +117,10 @@ }), 'manufacturer': 'Tesla', 'model': 'Gen 3 Wall Connector', + 'model_id': None, 'name': 'Wall Connector', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': '234', 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 250413396c1..31a39f1f21a 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -199,7 +199,7 @@ async def test_climate( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 34}, blocking=True, ) diff --git a/tests/components/teslemetry/test_cover.py b/tests/components/teslemetry/test_cover.py index 5f99a5d9c79..8d4493ab25f 100644 --- a/tests/components/teslemetry/test_cover.py +++ b/tests/components/teslemetry/test_cover.py @@ -2,6 +2,7 @@ from unittest.mock import patch +import pytest from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline @@ -9,6 +10,7 @@ from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, + SERVICE_STOP_COVER, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -24,6 +26,7 @@ from . import assert_entities, setup_platform from .const import COMMAND_OK, METADATA_NOSCOPE, VEHICLE_DATA_ALT +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -35,19 +38,21 @@ async def test_cover( assert_entities(hass, entry.entry_id, entity_registry, snapshot) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data, ) -> None: - """Tests that the cover entities are correct without scopes.""" + """Tests that the cover entities are correct with alternate values.""" mock_vehicle_data.return_value = VEHICLE_DATA_ALT entry = await setup_platform(hass, [Platform.COVER]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -73,6 +78,7 @@ async def test_cover_offline( assert state.state == STATE_UNKNOWN +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_services( hass: HomeAssistant, ) -> None: @@ -186,3 +192,44 @@ async def test_cover_services( state = hass.states.get(entity_id) assert state assert state.state is STATE_CLOSED + + # Sunroof + entity_id = "cover.test_sunroof" + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state is STATE_OPEN + + call.reset_mock() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state is STATE_OPEN + + call.reset_mock() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state is STATE_CLOSED diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index 31b4202b521..5520a5549bd 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -1,5 +1,7 @@ """Test the Teslemetry init.""" +from unittest.mock import AsyncMock + from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion @@ -21,7 +23,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from . import setup_platform -from .const import VEHICLE_DATA_ALT +from .const import VEHICLE_DATA_ALT, WAKE_UP_ASLEEP from tests.common import async_fire_time_changed @@ -68,6 +70,21 @@ async def test_devices( # Vehicle Coordinator +async def test_vehicle_refresh_asleep( + hass: HomeAssistant, + mock_vehicle: AsyncMock, + mock_vehicle_data: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh with an error.""" + + mock_vehicle.return_value = WAKE_UP_ASLEEP + entry = await setup_platform(hass, [Platform.CLIMATE]) + assert entry.state is ConfigEntryState.LOADED + mock_vehicle.assert_called_once() + mock_vehicle_data.assert_not_called() + + async def test_vehicle_refresh_offline( hass: HomeAssistant, mock_vehicle_data, freezer: FrozenDateTimeFactory ) -> None: diff --git a/tests/components/teslemetry/test_services.py b/tests/components/teslemetry/test_services.py new file mode 100644 index 00000000000..a5b55f5dcc5 --- /dev/null +++ b/tests/components/teslemetry/test_services.py @@ -0,0 +1,238 @@ +"""Test the Teslemetry services.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.teslemetry.const import DOMAIN +from homeassistant.components.teslemetry.services import ( + ATTR_DEPARTURE_TIME, + ATTR_ENABLE, + ATTR_END_OFF_PEAK_TIME, + ATTR_GPS, + ATTR_OFF_PEAK_CHARGING_ENABLED, + ATTR_OFF_PEAK_CHARGING_WEEKDAYS, + ATTR_PIN, + ATTR_PRECONDITIONING_ENABLED, + ATTR_PRECONDITIONING_WEEKDAYS, + ATTR_TIME, + ATTR_TOU_SETTINGS, + SERVICE_NAVIGATE_ATTR_GPS_REQUEST, + SERVICE_SET_SCHEDULED_CHARGING, + SERVICE_SET_SCHEDULED_DEPARTURE, + SERVICE_SPEED_LIMIT, + SERVICE_TIME_OF_USE, + SERVICE_VALET_MODE, +) +from homeassistant.const import CONF_DEVICE_ID, CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import setup_platform +from .const import COMMAND_ERROR, COMMAND_OK + +lat = -27.9699373 +lon = 153.3726526 + + +async def test_services( + hass: HomeAssistant, +) -> None: + """Tests that the custom services are correct.""" + + await setup_platform(hass) + entity_registry = er.async_get(hass) + + # Get a vehicle device ID + vehicle_device = entity_registry.async_get("sensor.test_charging").device_id + energy_device = entity_registry.async_get( + "sensor.energy_site_battery_power" + ).device_id + + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.navigation_gps_request", + return_value=COMMAND_OK, + ) as navigation_gps_request: + await hass.services.async_call( + DOMAIN, + SERVICE_NAVIGATE_ATTR_GPS_REQUEST, + { + CONF_DEVICE_ID: vehicle_device, + ATTR_GPS: {CONF_LATITUDE: lat, CONF_LONGITUDE: lon}, + }, + blocking=True, + ) + navigation_gps_request.assert_called_once() + + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.set_scheduled_charging", + return_value=COMMAND_OK, + ) as set_scheduled_charging: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_SCHEDULED_CHARGING, + { + CONF_DEVICE_ID: vehicle_device, + ATTR_ENABLE: True, + ATTR_TIME: "6:00", + }, + blocking=True, + ) + set_scheduled_charging.assert_called_once() + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_SCHEDULED_CHARGING, + { + CONF_DEVICE_ID: vehicle_device, + ATTR_ENABLE: True, + }, + blocking=True, + ) + + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.set_scheduled_departure", + return_value=COMMAND_OK, + ) as set_scheduled_departure: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_SCHEDULED_DEPARTURE, + { + CONF_DEVICE_ID: vehicle_device, + ATTR_ENABLE: True, + ATTR_PRECONDITIONING_ENABLED: True, + ATTR_PRECONDITIONING_WEEKDAYS: False, + ATTR_DEPARTURE_TIME: "6:00", + ATTR_OFF_PEAK_CHARGING_ENABLED: True, + ATTR_OFF_PEAK_CHARGING_WEEKDAYS: False, + ATTR_END_OFF_PEAK_TIME: "5:00", + }, + blocking=True, + ) + set_scheduled_departure.assert_called_once() + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_SCHEDULED_DEPARTURE, + { + CONF_DEVICE_ID: vehicle_device, + ATTR_ENABLE: True, + ATTR_PRECONDITIONING_ENABLED: True, + }, + blocking=True, + ) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_SCHEDULED_DEPARTURE, + { + CONF_DEVICE_ID: vehicle_device, + ATTR_ENABLE: True, + ATTR_OFF_PEAK_CHARGING_ENABLED: True, + }, + blocking=True, + ) + + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.set_valet_mode", + return_value=COMMAND_OK, + ) as set_valet_mode: + await hass.services.async_call( + DOMAIN, + SERVICE_VALET_MODE, + { + CONF_DEVICE_ID: vehicle_device, + ATTR_ENABLE: True, + ATTR_PIN: 1234, + }, + blocking=True, + ) + set_valet_mode.assert_called_once() + + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.speed_limit_activate", + return_value=COMMAND_OK, + ) as speed_limit_activate: + await hass.services.async_call( + DOMAIN, + SERVICE_SPEED_LIMIT, + { + CONF_DEVICE_ID: vehicle_device, + ATTR_ENABLE: True, + ATTR_PIN: 1234, + }, + blocking=True, + ) + speed_limit_activate.assert_called_once() + + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.speed_limit_deactivate", + return_value=COMMAND_OK, + ) as speed_limit_deactivate: + await hass.services.async_call( + DOMAIN, + SERVICE_SPEED_LIMIT, + { + CONF_DEVICE_ID: vehicle_device, + ATTR_ENABLE: False, + ATTR_PIN: 1234, + }, + blocking=True, + ) + speed_limit_deactivate.assert_called_once() + + with patch( + "homeassistant.components.teslemetry.EnergySpecific.time_of_use_settings", + return_value=COMMAND_OK, + ) as set_time_of_use: + await hass.services.async_call( + DOMAIN, + SERVICE_TIME_OF_USE, + { + CONF_DEVICE_ID: energy_device, + ATTR_TOU_SETTINGS: {}, + }, + blocking=True, + ) + set_time_of_use.assert_called_once() + + with ( + patch( + "homeassistant.components.teslemetry.EnergySpecific.time_of_use_settings", + return_value=COMMAND_ERROR, + ) as set_time_of_use, + pytest.raises(HomeAssistantError), + ): + await hass.services.async_call( + DOMAIN, + SERVICE_TIME_OF_USE, + { + CONF_DEVICE_ID: energy_device, + ATTR_TOU_SETTINGS: {}, + }, + blocking=True, + ) + + +async def test_service_validation_errors( + hass: HomeAssistant, +) -> None: + """Tests that the custom services handle bad data.""" + + await setup_platform(hass) + + # Bad device ID + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_NAVIGATE_ATTR_GPS_REQUEST, + { + CONF_DEVICE_ID: "nope", + ATTR_GPS: {CONF_LATITUDE: lat, CONF_LONGITUDE: lon}, + }, + blocking=True, + ) diff --git a/tests/components/tessie/common.py b/tests/components/tessie/common.py index c19f6f65201..37a38fffaa4 100644 --- a/tests/components/tessie/common.py +++ b/tests/components/tessie/common.py @@ -16,6 +16,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType from tests.common import MockConfigEntry, load_json_object_fixture +# Tessie library TEST_STATE_OF_ALL_VEHICLES = load_json_object_fixture("vehicles.json", DOMAIN) TEST_VEHICLE_STATE_ONLINE = load_json_object_fixture("online.json", DOMAIN) TEST_VEHICLE_STATUS_AWAKE = {"status": TessieStatus.AWAKE} @@ -47,6 +48,24 @@ ERROR_VIRTUAL_KEY = ClientResponseError( ) ERROR_CONNECTION = ClientConnectionError() +# Fleet API library +PRODUCTS = load_json_object_fixture("products.json", DOMAIN) +LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) +SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) +RESPONSE_OK = {"response": {}, "error": None} +COMMAND_OK = {"response": {"result": True, "reason": ""}} +SCOPES = [ + "user_data", + "vehicle_device_data", + "vehicle_cmds", + "vehicle_charging_cmds", + "energy_device_data", + "energy_cmds", + "offline_access", + "openid", +] +NO_SCOPES = ["user_data", "offline_access", "openid"] + async def setup_platform( hass: HomeAssistant, platforms: list[Platform] | UndefinedType = UNDEFINED diff --git a/tests/components/tessie/conftest.py b/tests/components/tessie/conftest.py index 77d1e3fd3e2..e0aba73af17 100644 --- a/tests/components/tessie/conftest.py +++ b/tests/components/tessie/conftest.py @@ -2,16 +2,24 @@ from __future__ import annotations +from copy import deepcopy from unittest.mock import patch import pytest from .common import ( + COMMAND_OK, + LIVE_STATUS, + PRODUCTS, + SCOPES, + SITE_INFO, TEST_STATE_OF_ALL_VEHICLES, TEST_VEHICLE_STATE_ONLINE, TEST_VEHICLE_STATUS_AWAKE, ) +# Tessie + @pytest.fixture(autouse=True) def mock_get_state(): @@ -41,3 +49,53 @@ def mock_get_state_of_all_vehicles(): return_value=TEST_STATE_OF_ALL_VEHICLES, ) as mock_get_state_of_all_vehicles: yield mock_get_state_of_all_vehicles + + +# Fleet API +@pytest.fixture(autouse=True) +def mock_scopes(): + """Mock scopes function.""" + with patch( + "homeassistant.components.tessie.Tessie.scopes", + return_value=SCOPES, + ) as mock_scopes: + yield mock_scopes + + +@pytest.fixture(autouse=True) +def mock_products(): + """Mock Tesla Fleet Api products method.""" + with patch( + "homeassistant.components.tessie.Tessie.products", return_value=PRODUCTS + ) as mock_products: + yield mock_products + + +@pytest.fixture(autouse=True) +def mock_request(): + """Mock Tesla Fleet API request method.""" + with patch( + "homeassistant.components.tessie.Tessie._request", + return_value=COMMAND_OK, + ) as mock_request: + yield mock_request + + +@pytest.fixture(autouse=True) +def mock_live_status(): + """Mock Tesla Fleet API EnergySpecific live_status method.""" + with patch( + "homeassistant.components.tessie.EnergySpecific.live_status", + side_effect=lambda: deepcopy(LIVE_STATUS), + ) as mock_live_status: + yield mock_live_status + + +@pytest.fixture(autouse=True) +def mock_site_info(): + """Mock Tesla Fleet API EnergySpecific site_info method.""" + with patch( + "homeassistant.components.tessie.EnergySpecific.site_info", + side_effect=lambda: deepcopy(SITE_INFO), + ) as mock_live_status: + yield mock_live_status diff --git a/tests/components/tessie/fixtures/live_status.json b/tests/components/tessie/fixtures/live_status.json new file mode 100644 index 00000000000..486f9f4fadd --- /dev/null +++ b/tests/components/tessie/fixtures/live_status.json @@ -0,0 +1,33 @@ +{ + "response": { + "solar_power": 1185, + "energy_left": 38896.47368421053, + "total_pack_energy": 40727, + "percentage_charged": 95.50537403739663, + "backup_capable": true, + "battery_power": 5060, + "load_power": 6245, + "grid_status": "Active", + "grid_services_active": false, + "grid_power": 0, + "grid_services_power": 0, + "generator_power": 0, + "island_status": "on_grid", + "storm_mode_active": false, + "timestamp": "2024-01-01T00:00:00+00:00", + "wall_connectors": [ + { + "din": "abd-123", + "wall_connector_state": 2, + "wall_connector_fault_state": 2, + "wall_connector_power": 0 + }, + { + "din": "bcd-234", + "wall_connector_state": 2, + "wall_connector_fault_state": 2, + "wall_connector_power": 0 + } + ] + } +} diff --git a/tests/components/tessie/fixtures/online.json b/tests/components/tessie/fixtures/online.json index ed49b4bfd75..38b904cdffb 100644 --- a/tests/components/tessie/fixtures/online.json +++ b/tests/components/tessie/fixtures/online.json @@ -98,6 +98,8 @@ "passenger_temp_setting": 22.5, "remote_heater_control_enabled": false, "right_temp_direction": 234, + "seat_fan_front_left": 0, + "seat_fan_front_right": 0, "seat_heater_left": 0, "seat_heater_rear_center": 0, "seat_heater_rear_left": 0, @@ -157,7 +159,7 @@ "exterior_trim_override": "", "has_air_suspension": false, "has_ludicrous_mode": false, - "has_seat_cooling": false, + "has_seat_cooling": true, "headlamp_type": "Global", "interior_trim_type": "White2", "key_version": 2, @@ -173,7 +175,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": null, + "sun_roof_installed": true, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1701139037461, diff --git a/tests/components/tessie/fixtures/products.json b/tests/components/tessie/fixtures/products.json new file mode 100644 index 00000000000..8da921a33f4 --- /dev/null +++ b/tests/components/tessie/fixtures/products.json @@ -0,0 +1,131 @@ +{ + "response": [ + { + "id": 1234, + "user_id": 1234, + "vehicle_id": 1234, + "vin": "LRWXF7EK4KC700000", + "color": null, + "access_type": "OWNER", + "display_name": "Test", + "option_codes": null, + "cached_data": null, + "granular_access": { "hide_private": false }, + "tokens": ["abc", "def"], + "state": "asleep", + "in_service": false, + "id_s": "1234", + "calendar_enabled": true, + "api_version": 71, + "backseat_token": null, + "backseat_token_updated_at": null, + "ble_autopair_enrolled": false, + "vehicle_config": { + "aux_park_lamps": "Eu", + "badge_version": 1, + "can_accept_navigation_requests": true, + "can_actuate_trunks": true, + "car_special_type": "base", + "car_type": "model3", + "charge_port_type": "CCS", + "cop_user_set_temp_supported": false, + "dashcam_clip_save_supported": true, + "default_charge_to_max": false, + "driver_assist": "TeslaAP3", + "ece_restrictions": false, + "efficiency_package": "M32021", + "eu_vehicle": true, + "exterior_color": "DeepBlue", + "exterior_trim": "Black", + "exterior_trim_override": "", + "has_air_suspension": false, + "has_ludicrous_mode": false, + "has_seat_cooling": false, + "headlamp_type": "Global", + "interior_trim_type": "White2", + "key_version": 2, + "motorized_charge_port": true, + "paint_color_override": "0,9,25,0.7,0.04", + "performance_package": "Base", + "plg": true, + "pws": true, + "rear_drive_unit": "PM216MOSFET", + "rear_seat_heaters": 1, + "rear_seat_type": 0, + "rhd": true, + "roof_color": "RoofColorGlass", + "seat_type": null, + "spoiler_type": "None", + "sun_roof_installed": null, + "supports_qr_pairing": false, + "third_row_seats": "None", + "timestamp": 1705701487912, + "trim_badging": "74d", + "use_range_badging": true, + "utc_offset": 36000, + "webcam_selfie_supported": true, + "webcam_supported": true, + "wheel_type": "Pinwheel18CapKit" + }, + "command_signing": "allowed", + "release_notes_supported": true + }, + { + "energy_site_id": 123456, + "resource_type": "battery", + "site_name": "Energy Site", + "id": "ABC123", + "gateway_id": "ABC123", + "asset_site_id": "c0ffee", + "warp_site_number": "GA123456", + "energy_left": 23286.105263157893, + "total_pack_energy": 40804, + "percentage_charged": 57.068192488868476, + "battery_type": "ac_powerwall", + "backup_capable": true, + "battery_power": 14990, + "go_off_grid_test_banner_enabled": null, + "storm_mode_enabled": true, + "powerwall_onboarding_settings_set": true, + "powerwall_tesla_electric_interested_in": null, + "vpp_tour_enabled": null, + "sync_grid_alert_enabled": true, + "breaker_alert_enabled": true, + "components": { + "battery": true, + "battery_type": "ac_powerwall", + "solar": true, + "solar_type": "pv_panel", + "grid": true, + "load_meter": true, + "market_type": "residential", + "wall_connectors": [ + { + "device_id": "abc-123", + "din": "123-abc", + "is_active": true + }, + { + "device_id": "bcd-234", + "din": "234-bcd", + "is_active": true + } + ] + }, + "features": { + "rate_plan_manager_no_pricing_constraint": true + } + }, + { + "energy_site_id": 98765, + "components": { + "battery": false, + "solar": false, + "grid": false, + "load_meter": false, + "market_type": "residential" + } + } + ], + "count": 3 +} diff --git a/tests/components/tessie/fixtures/site_info.json b/tests/components/tessie/fixtures/site_info.json new file mode 100644 index 00000000000..f581707ff14 --- /dev/null +++ b/tests/components/tessie/fixtures/site_info.json @@ -0,0 +1,125 @@ +{ + "response": { + "id": "1233-abcd", + "site_name": "Site", + "backup_reserve_percent": 0, + "default_real_mode": "self_consumption", + "installation_date": "2022-01-01T00:00:00+00:00", + "user_settings": { + "go_off_grid_test_banner_enabled": false, + "storm_mode_enabled": true, + "powerwall_onboarding_settings_set": true, + "powerwall_tesla_electric_interested_in": false, + "vpp_tour_enabled": true, + "sync_grid_alert_enabled": true, + "breaker_alert_enabled": false + }, + "components": { + "solar": true, + "solar_type": "pv_panel", + "battery": true, + "grid": true, + "backup": true, + "gateway": "teg", + "load_meter": true, + "tou_capable": true, + "storm_mode_capable": true, + "flex_energy_request_capable": false, + "car_charging_data_supported": false, + "off_grid_vehicle_charging_reserve_supported": true, + "vehicle_charging_performance_view_enabled": false, + "vehicle_charging_solar_offset_view_enabled": false, + "battery_solar_offset_view_enabled": true, + "solar_value_enabled": true, + "energy_value_header": "Energy Value", + "energy_value_subheader": "Estimated Value", + "energy_service_self_scheduling_enabled": true, + "show_grid_import_battery_source_cards": true, + "set_islanding_mode_enabled": true, + "wifi_commissioning_enabled": true, + "backup_time_remaining_enabled": true, + "battery_type": "ac_powerwall", + "configurable": true, + "grid_services_enabled": false, + "gateways": [ + { + "device_id": "gateway-id", + "din": "gateway-din", + "serial_number": "CN00000000J50D", + "part_number": "1152100-14-J", + "part_type": 10, + "part_name": "Tesla Backup Gateway 2", + "is_active": true, + "site_id": "1234-abcd", + "firmware_version": "24.4.0 0fe780c9", + "updated_datetime": "2024-05-14T00:00:00.000Z" + } + ], + "batteries": [ + { + "device_id": "battery-1-id", + "din": "battery-1-din", + "serial_number": "TG000000001DA5", + "part_number": "3012170-10-B", + "part_type": 2, + "part_name": "Powerwall 2", + "nameplate_max_charge_power": 5000, + "nameplate_max_discharge_power": 5000, + "nameplate_energy": 13500 + }, + { + "device_id": "battery-2-id", + "din": "battery-2-din", + "serial_number": "TG000000002DA5", + "part_number": "3012170-05-C", + "part_type": 2, + "part_name": "Powerwall 2", + "nameplate_max_charge_power": 5000, + "nameplate_max_discharge_power": 5000, + "nameplate_energy": 13500 + } + ], + "wall_connectors": [ + { + "device_id": "123abc", + "din": "abc123", + "is_active": true + }, + { + "device_id": "234bcd", + "din": "bcd234", + "is_active": true + } + ], + "disallow_charge_from_grid_with_solar_installed": true, + "customer_preferred_export_rule": "pv_only", + "net_meter_mode": "battery_ok", + "system_alerts_enabled": true + }, + "version": "23.44.0 eb113390", + "battery_count": 2, + "tou_settings": { + "optimization_strategy": "economics", + "schedule": [ + { + "target": "off_peak", + "week_days": [1, 0], + "start_seconds": 0, + "end_seconds": 3600 + }, + { + "target": "peak", + "week_days": [1, 0], + "start_seconds": 3600, + "end_seconds": 0 + } + ] + }, + "nameplate_power": 15000, + "nameplate_energy": 40500, + "installation_time_zone": "", + "max_site_meter_power_ac": 1000000000, + "min_site_meter_power_ac": -1000000000, + "vpp_backup_reserve_percent": 0 + } +} diff --git a/tests/components/tessie/fixtures/vehicles.json b/tests/components/tessie/fixtures/vehicles.json index 359e23f9cdd..622b31bae69 100644 --- a/tests/components/tessie/fixtures/vehicles.json +++ b/tests/components/tessie/fixtures/vehicles.json @@ -111,6 +111,8 @@ "passenger_temp_setting": 22.5, "remote_heater_control_enabled": false, "right_temp_direction": 234, + "seat_fan_front_left": 0, + "seat_fan_front_right": 0, "seat_heater_left": 0, "seat_heater_rear_center": 0, "seat_heater_rear_left": 0, @@ -174,7 +176,7 @@ "exterior_trim_override": "", "has_air_suspension": false, "has_ludicrous_mode": false, - "has_seat_cooling": false, + "has_seat_cooling": true, "headlamp_type": "Global", "interior_trim_type": "White2", "key_version": 2, @@ -190,7 +192,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": null, + "sun_roof_installed": true, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1701139037461, diff --git a/tests/components/tessie/snapshots/test_binary_sensors.ambr b/tests/components/tessie/snapshots/test_binary_sensors.ambr index 7bc191de6ed..e8912bb0e7f 100644 --- a/tests/components/tessie/snapshots/test_binary_sensors.ambr +++ b/tests/components/tessie/snapshots/test_binary_sensors.ambr @@ -1,4 +1,142 @@ # serializer version: 1 +# name: test_binary_sensors[binary_sensor.energy_site_backup_capable-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Backup capable', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_capable', + 'unique_id': '123456-backup_capable', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_backup_capable-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Backup capable', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_grid_services_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid services active', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_services_active', + 'unique_id': '123456-grid_services_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_grid_services_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_grid_services_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid services enabled', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_grid_services_enabled', + 'unique_id': '123456-components_grid_services_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_grid_services_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensors[binary_sensor.test_auto_seat_climate_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_cover.ambr b/tests/components/tessie/snapshots/test_cover.ambr index ff04c528244..6338758afb7 100644 --- a/tests/components/tessie/snapshots/test_cover.ambr +++ b/tests/components/tessie/snapshots/test_cover.ambr @@ -95,6 +95,54 @@ 'state': 'closed', }) # --- +# name: test_covers[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'VINVINVIN-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- # name: test_covers[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_diagnostics.ambr b/tests/components/tessie/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..8eef7cbd549 --- /dev/null +++ b/tests/components/tessie/snapshots/test_diagnostics.ambr @@ -0,0 +1,428 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'energysites': list([ + dict({ + 'info': dict({ + 'backup_reserve_percent': 0, + 'battery_count': 2, + 'components_backup': True, + 'components_backup_time_remaining_enabled': True, + 'components_batteries': list([ + dict({ + 'device_id': 'battery-1-id', + 'din': 'battery-1-din', + 'nameplate_energy': 13500, + 'nameplate_max_charge_power': 5000, + 'nameplate_max_discharge_power': 5000, + 'part_name': 'Powerwall 2', + 'part_number': '3012170-10-B', + 'part_type': 2, + 'serial_number': '**REDACTED**', + }), + dict({ + 'device_id': 'battery-2-id', + 'din': 'battery-2-din', + 'nameplate_energy': 13500, + 'nameplate_max_charge_power': 5000, + 'nameplate_max_discharge_power': 5000, + 'part_name': 'Powerwall 2', + 'part_number': '3012170-05-C', + 'part_type': 2, + 'serial_number': '**REDACTED**', + }), + ]), + 'components_battery': True, + 'components_battery_solar_offset_view_enabled': True, + 'components_battery_type': 'ac_powerwall', + 'components_car_charging_data_supported': False, + 'components_configurable': True, + 'components_customer_preferred_export_rule': 'pv_only', + 'components_disallow_charge_from_grid_with_solar_installed': True, + 'components_energy_service_self_scheduling_enabled': True, + 'components_energy_value_header': 'Energy Value', + 'components_energy_value_subheader': 'Estimated Value', + 'components_flex_energy_request_capable': False, + 'components_gateway': 'teg', + 'components_gateways': list([ + dict({ + 'device_id': 'gateway-id', + 'din': 'gateway-din', + 'firmware_version': '24.4.0 0fe780c9', + 'is_active': True, + 'part_name': 'Tesla Backup Gateway 2', + 'part_number': '1152100-14-J', + 'part_type': 10, + 'serial_number': '**REDACTED**', + 'site_id': '1234-abcd', + 'updated_datetime': '2024-05-14T00:00:00.000Z', + }), + ]), + 'components_grid': True, + 'components_grid_services_enabled': False, + 'components_load_meter': True, + 'components_net_meter_mode': 'battery_ok', + 'components_off_grid_vehicle_charging_reserve_supported': True, + 'components_set_islanding_mode_enabled': True, + 'components_show_grid_import_battery_source_cards': True, + 'components_solar': True, + 'components_solar_type': 'pv_panel', + 'components_solar_value_enabled': True, + 'components_storm_mode_capable': True, + 'components_system_alerts_enabled': True, + 'components_tou_capable': True, + 'components_vehicle_charging_performance_view_enabled': False, + 'components_vehicle_charging_solar_offset_view_enabled': False, + 'components_wall_connectors': list([ + dict({ + 'device_id': '123abc', + 'din': 'abc123', + 'is_active': True, + }), + dict({ + 'device_id': '234bcd', + 'din': 'bcd234', + 'is_active': True, + }), + ]), + 'components_wifi_commissioning_enabled': True, + 'default_real_mode': 'self_consumption', + 'id': '1233-abcd', + 'installation_date': '**REDACTED**', + 'installation_time_zone': '', + 'max_site_meter_power_ac': 1000000000, + 'min_site_meter_power_ac': -1000000000, + 'nameplate_energy': 40500, + 'nameplate_power': 15000, + 'site_name': 'Site', + 'tou_settings_optimization_strategy': 'economics', + 'tou_settings_schedule': list([ + dict({ + 'end_seconds': 3600, + 'start_seconds': 0, + 'target': 'off_peak', + 'week_days': list([ + 1, + 0, + ]), + }), + dict({ + 'end_seconds': 0, + 'start_seconds': 3600, + 'target': 'peak', + 'week_days': list([ + 1, + 0, + ]), + }), + ]), + 'user_settings_breaker_alert_enabled': False, + 'user_settings_go_off_grid_test_banner_enabled': False, + 'user_settings_powerwall_onboarding_settings_set': True, + 'user_settings_powerwall_tesla_electric_interested_in': False, + 'user_settings_storm_mode_enabled': True, + 'user_settings_sync_grid_alert_enabled': True, + 'user_settings_vpp_tour_enabled': True, + 'version': '23.44.0 eb113390', + 'vpp_backup_reserve_percent': 0, + }), + 'live': dict({ + 'backup_capable': True, + 'battery_power': 5060, + 'energy_left': 38896.47368421053, + 'generator_power': 0, + 'grid_power': 0, + 'grid_services_active': False, + 'grid_services_power': 0, + 'grid_status': 'Active', + 'island_status': 'on_grid', + 'load_power': 6245, + 'percentage_charged': 95.50537403739663, + 'solar_power': 1185, + 'storm_mode_active': False, + 'timestamp': '2024-01-01T00:00:00+00:00', + 'total_pack_energy': 40727, + 'wall_connectors': dict({ + 'abd-123': dict({ + 'din': 'abd-123', + 'wall_connector_fault_state': 2, + 'wall_connector_power': 0, + 'wall_connector_state': 2, + }), + 'bcd-234': dict({ + 'din': 'bcd-234', + 'wall_connector_fault_state': 2, + 'wall_connector_power': 0, + 'wall_connector_state': 2, + }), + }), + }), + }), + ]), + 'vehicles': list([ + dict({ + 'data': dict({ + 'access_type': 'OWNER', + 'api_version': 67, + 'backseat_token': None, + 'backseat_token_updated_at': None, + 'ble_autopair_enrolled': False, + 'calendar_enabled': True, + 'charge_state_battery_heater_on': False, + 'charge_state_battery_level': 75, + 'charge_state_battery_range': 263.68, + 'charge_state_charge_amps': 32, + 'charge_state_charge_current_request': 32, + 'charge_state_charge_current_request_max': 32, + 'charge_state_charge_enable_request': True, + 'charge_state_charge_energy_added': 18.47, + 'charge_state_charge_limit_soc': 80, + 'charge_state_charge_limit_soc_max': 100, + 'charge_state_charge_limit_soc_min': 50, + 'charge_state_charge_limit_soc_std': 80, + 'charge_state_charge_miles_added_ideal': 84, + 'charge_state_charge_miles_added_rated': 84, + 'charge_state_charge_port_cold_weather_mode': False, + 'charge_state_charge_port_color': '', + 'charge_state_charge_port_door_open': True, + 'charge_state_charge_port_latch': 'Engaged', + 'charge_state_charge_rate': 30.6, + 'charge_state_charger_actual_current': 32, + 'charge_state_charger_phases': 1, + 'charge_state_charger_pilot_current': 32, + 'charge_state_charger_power': 7, + 'charge_state_charger_voltage': 224, + 'charge_state_charging_state': 'Charging', + 'charge_state_conn_charge_cable': 'IEC', + 'charge_state_est_battery_range': 324.73, + 'charge_state_fast_charger_brand': '', + 'charge_state_fast_charger_present': False, + 'charge_state_fast_charger_type': 'ACSingleWireCAN', + 'charge_state_ideal_battery_range': 263.68, + 'charge_state_max_range_charge_counter': 0, + 'charge_state_minutes_to_full_charge': 0, + 'charge_state_not_enough_power_to_heat': None, + 'charge_state_off_peak_charging_enabled': False, + 'charge_state_off_peak_charging_times': 'all_week', + 'charge_state_off_peak_hours_end_time': 900, + 'charge_state_preconditioning_enabled': False, + 'charge_state_preconditioning_times': 'all_week', + 'charge_state_scheduled_charging_mode': 'StartAt', + 'charge_state_scheduled_charging_pending': False, + 'charge_state_scheduled_charging_start_time': 1701216000, + 'charge_state_scheduled_charging_start_time_app': 600, + 'charge_state_scheduled_charging_start_time_minutes': 600, + 'charge_state_scheduled_departure_time': 1694899800, + 'charge_state_scheduled_departure_time_minutes': 450, + 'charge_state_supercharger_session_trip_planner': False, + 'charge_state_time_to_full_charge': 0, + 'charge_state_timestamp': 1701139037461, + 'charge_state_trip_charging': False, + 'charge_state_usable_battery_level': 75, + 'charge_state_user_charge_enable_request': None, + 'climate_state_allow_cabin_overheat_protection': True, + 'climate_state_auto_seat_climate_left': True, + 'climate_state_auto_seat_climate_right': True, + 'climate_state_auto_steering_wheel_heat': True, + 'climate_state_battery_heater': False, + 'climate_state_battery_heater_no_power': None, + 'climate_state_cabin_overheat_protection': 'On', + 'climate_state_cabin_overheat_protection_actively_cooling': False, + 'climate_state_climate_keeper_mode': 'off', + 'climate_state_cop_activation_temperature': 'High', + 'climate_state_defrost_mode': 0, + 'climate_state_driver_temp_setting': 22.5, + 'climate_state_fan_status': 0, + 'climate_state_hvac_auto_request': 'On', + 'climate_state_inside_temp': 30.4, + 'climate_state_is_auto_conditioning_on': False, + 'climate_state_is_climate_on': False, + 'climate_state_is_front_defroster_on': False, + 'climate_state_is_preconditioning': False, + 'climate_state_is_rear_defroster_on': False, + 'climate_state_left_temp_direction': 234, + 'climate_state_max_avail_temp': 28, + 'climate_state_min_avail_temp': 15, + 'climate_state_outside_temp': 30.5, + 'climate_state_passenger_temp_setting': 22.5, + 'climate_state_remote_heater_control_enabled': False, + 'climate_state_right_temp_direction': 234, + 'climate_state_seat_fan_front_left': 0, + 'climate_state_seat_fan_front_right': 0, + 'climate_state_seat_heater_left': 0, + 'climate_state_seat_heater_rear_center': 0, + 'climate_state_seat_heater_rear_left': 0, + 'climate_state_seat_heater_rear_right': 0, + 'climate_state_seat_heater_right': 0, + 'climate_state_side_mirror_heaters': False, + 'climate_state_steering_wheel_heat_level': 0, + 'climate_state_steering_wheel_heater': False, + 'climate_state_supports_fan_only_cabin_overheat_protection': True, + 'climate_state_timestamp': 1701139037461, + 'climate_state_wiper_blade_heater': False, + 'color': None, + 'display_name': 'Test', + 'drive_state_active_route_destination': 'Giga Texas', + 'drive_state_active_route_energy_at_arrival': 65, + 'drive_state_active_route_latitude': '**REDACTED**', + 'drive_state_active_route_longitude': '**REDACTED**', + 'drive_state_active_route_miles_to_arrival': 46.707353, + 'drive_state_active_route_minutes_to_arrival': 59.2, + 'drive_state_active_route_traffic_minutes_delay': 0, + 'drive_state_gps_as_of': 1701129612, + 'drive_state_heading': 185, + 'drive_state_latitude': '**REDACTED**', + 'drive_state_longitude': '**REDACTED**', + 'drive_state_native_latitude': '**REDACTED**', + 'drive_state_native_location_supported': 1, + 'drive_state_native_longitude': '**REDACTED**', + 'drive_state_native_type': 'wgs', + 'drive_state_power': -7, + 'drive_state_shift_state': None, + 'drive_state_speed': None, + 'drive_state_timestamp': 1701139037461, + 'granular_access_hide_private': False, + 'gui_settings_gui_24_hour_time': False, + 'gui_settings_gui_charge_rate_units': 'kW', + 'gui_settings_gui_distance_units': 'km/hr', + 'gui_settings_gui_range_display': 'Rated', + 'gui_settings_gui_temperature_units': 'C', + 'gui_settings_gui_tirepressure_units': 'Psi', + 'gui_settings_show_range_units': False, + 'gui_settings_timestamp': 1701139037461, + 'id': '**REDACTED**', + 'id_s': '**REDACTED**', + 'in_service': False, + 'state': 'online', + 'tokens': '**REDACTED**', + 'user_id': '**REDACTED**', + 'vehicle_config_aux_park_lamps': 'Eu', + 'vehicle_config_badge_version': 1, + 'vehicle_config_can_accept_navigation_requests': True, + 'vehicle_config_can_actuate_trunks': True, + 'vehicle_config_car_special_type': 'base', + 'vehicle_config_car_type': 'model3', + 'vehicle_config_charge_port_type': 'CCS', + 'vehicle_config_cop_user_set_temp_supported': False, + 'vehicle_config_dashcam_clip_save_supported': True, + 'vehicle_config_default_charge_to_max': False, + 'vehicle_config_driver_assist': 'TeslaAP3', + 'vehicle_config_ece_restrictions': False, + 'vehicle_config_efficiency_package': 'M32021', + 'vehicle_config_eu_vehicle': True, + 'vehicle_config_exterior_color': 'DeepBlue', + 'vehicle_config_exterior_trim': 'Black', + 'vehicle_config_exterior_trim_override': '', + 'vehicle_config_has_air_suspension': False, + 'vehicle_config_has_ludicrous_mode': False, + 'vehicle_config_has_seat_cooling': True, + 'vehicle_config_headlamp_type': 'Global', + 'vehicle_config_interior_trim_type': 'White2', + 'vehicle_config_key_version': 2, + 'vehicle_config_motorized_charge_port': True, + 'vehicle_config_paint_color_override': '0,9,25,0.7,0.04', + 'vehicle_config_performance_package': 'Base', + 'vehicle_config_plg': True, + 'vehicle_config_pws': False, + 'vehicle_config_rear_drive_unit': 'PM216MOSFET', + 'vehicle_config_rear_seat_heaters': 1, + 'vehicle_config_rear_seat_type': 0, + 'vehicle_config_rhd': True, + 'vehicle_config_roof_color': 'RoofColorGlass', + 'vehicle_config_seat_type': None, + 'vehicle_config_spoiler_type': 'None', + 'vehicle_config_sun_roof_installed': True, + 'vehicle_config_supports_qr_pairing': False, + 'vehicle_config_third_row_seats': 'None', + 'vehicle_config_timestamp': 1701139037461, + 'vehicle_config_trim_badging': '74d', + 'vehicle_config_use_range_badging': True, + 'vehicle_config_utc_offset': 36000, + 'vehicle_config_webcam_selfie_supported': True, + 'vehicle_config_webcam_supported': True, + 'vehicle_config_wheel_type': 'Pinwheel18CapKit', + 'vehicle_id': '**REDACTED**', + 'vehicle_state_api_version': 67, + 'vehicle_state_autopark_state_v2': 'unavailable', + 'vehicle_state_calendar_supported': True, + 'vehicle_state_car_version': '2023.38.6 c1f85ddb415f', + 'vehicle_state_center_display_state': 0, + 'vehicle_state_dashcam_clip_save_available': True, + 'vehicle_state_dashcam_state': 'Recording', + 'vehicle_state_df': 0, + 'vehicle_state_dr': 0, + 'vehicle_state_fd_window': 0, + 'vehicle_state_feature_bitmask': 'fbdffbff,7f', + 'vehicle_state_fp_window': 0, + 'vehicle_state_ft': 0, + 'vehicle_state_is_user_present': False, + 'vehicle_state_locked': True, + 'vehicle_state_media_info_audio_volume': 2.3333, + 'vehicle_state_media_info_audio_volume_increment': 0.333333, + 'vehicle_state_media_info_audio_volume_max': 10.333333, + 'vehicle_state_media_info_media_playback_status': 'Stopped', + 'vehicle_state_media_info_now_playing_album': '', + 'vehicle_state_media_info_now_playing_artist': '', + 'vehicle_state_media_info_now_playing_duration': 0, + 'vehicle_state_media_info_now_playing_elapsed': 0, + 'vehicle_state_media_info_now_playing_source': '', + 'vehicle_state_media_info_now_playing_station': '', + 'vehicle_state_media_info_now_playing_title': '', + 'vehicle_state_media_state_remote_control_enabled': False, + 'vehicle_state_notifications_supported': True, + 'vehicle_state_odometer': 5454.495383, + 'vehicle_state_parsed_calendar_supported': True, + 'vehicle_state_pf': 0, + 'vehicle_state_pr': 0, + 'vehicle_state_rd_window': 0, + 'vehicle_state_remote_start': False, + 'vehicle_state_remote_start_enabled': True, + 'vehicle_state_remote_start_supported': True, + 'vehicle_state_rp_window': 0, + 'vehicle_state_rt': 0, + 'vehicle_state_santa_mode': 0, + 'vehicle_state_sentry_mode': False, + 'vehicle_state_sentry_mode_available': True, + 'vehicle_state_service_mode': False, + 'vehicle_state_service_mode_plus': False, + 'vehicle_state_software_update_download_perc': 100, + 'vehicle_state_software_update_expected_duration_sec': 2700, + 'vehicle_state_software_update_install_perc': 1, + 'vehicle_state_software_update_status': 'available', + 'vehicle_state_software_update_version': '2023.44.30.4', + 'vehicle_state_speed_limit_mode_active': False, + 'vehicle_state_speed_limit_mode_current_limit_mph': 74.564543, + 'vehicle_state_speed_limit_mode_max_limit_mph': 120, + 'vehicle_state_speed_limit_mode_min_limit_mph': 50, + 'vehicle_state_speed_limit_mode_pin_code_set': True, + 'vehicle_state_timestamp': 1701139037461, + 'vehicle_state_tpms_hard_warning_fl': False, + 'vehicle_state_tpms_hard_warning_fr': False, + 'vehicle_state_tpms_hard_warning_rl': False, + 'vehicle_state_tpms_hard_warning_rr': False, + 'vehicle_state_tpms_last_seen_pressure_time_fl': 1701062077, + 'vehicle_state_tpms_last_seen_pressure_time_fr': 1701062047, + 'vehicle_state_tpms_last_seen_pressure_time_rl': 1701062077, + 'vehicle_state_tpms_last_seen_pressure_time_rr': 1701062047, + 'vehicle_state_tpms_pressure_fl': 2.975, + 'vehicle_state_tpms_pressure_fr': 2.975, + 'vehicle_state_tpms_pressure_rl': 2.95, + 'vehicle_state_tpms_pressure_rr': 2.95, + 'vehicle_state_tpms_rcp_front_value': 2.9, + 'vehicle_state_tpms_rcp_rear_value': 2.9, + 'vehicle_state_tpms_soft_warning_fl': False, + 'vehicle_state_tpms_soft_warning_fr': False, + 'vehicle_state_tpms_soft_warning_rl': False, + 'vehicle_state_tpms_soft_warning_rr': False, + 'vehicle_state_valet_mode': False, + 'vehicle_state_valet_pin_needed': False, + 'vehicle_state_vehicle_name': 'Test', + 'vehicle_state_vehicle_self_test_progress': 0, + 'vehicle_state_vehicle_self_test_requested': False, + 'vehicle_state_webcam_available': True, + 'vin': '**REDACTED**', + }), + }), + ]), + }) +# --- diff --git a/tests/components/tessie/snapshots/test_lock.ambr b/tests/components/tessie/snapshots/test_lock.ambr index 1eff418b202..cea2bebbddb 100644 --- a/tests/components/tessie/snapshots/test_lock.ambr +++ b/tests/components/tessie/snapshots/test_lock.ambr @@ -93,51 +93,3 @@ 'state': 'locked', }) # --- -# name: test_locks[lock.test_speed_limit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'lock', - 'entity_category': None, - 'entity_id': 'lock.test_speed_limit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Speed limit', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'vehicle_state_speed_limit_mode_active', - 'unique_id': 'VINVINVIN-vehicle_state_speed_limit_mode_active', - 'unit_of_measurement': None, - }) -# --- -# name: test_locks[lock.test_speed_limit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'code_format': '^\\d\\d\\d\\d$', - 'friendly_name': 'Test Speed limit', - 'supported_features': , - }), - 'context': , - 'entity_id': 'lock.test_speed_limit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unlocked', - }) -# --- diff --git a/tests/components/tessie/snapshots/test_number.ambr b/tests/components/tessie/snapshots/test_number.ambr index c91fb74adeb..6e641bdf5b7 100644 --- a/tests/components/tessie/snapshots/test_number.ambr +++ b/tests/components/tessie/snapshots/test_number.ambr @@ -1,4 +1,120 @@ # serializer version: 1 +# name: test_numbers[number.energy_site_backup_reserve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.energy_site_backup_reserve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-alert', + 'original_name': 'Backup reserve', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_reserve_percent', + 'unique_id': '123456-backup_reserve_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_numbers[number.energy_site_backup_reserve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Backup reserve', + 'icon': 'mdi:battery-alert', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.energy_site_backup_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_numbers[number.energy_site_off_grid_reserve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.energy_site_off_grid_reserve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-unknown', + 'original_name': 'Off grid reserve', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_grid_vehicle_charging_reserve_percent', + 'unique_id': '123456-off_grid_vehicle_charging_reserve_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_numbers[number.energy_site_off_grid_reserve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Off grid reserve', + 'icon': 'mdi:battery-unknown', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.energy_site_off_grid_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_numbers[number.test_charge_current-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_select.ambr b/tests/components/tessie/snapshots/test_select.ambr index fc076aabf14..acc1946aab5 100644 --- a/tests/components/tessie/snapshots/test_select.ambr +++ b/tests/components/tessie/snapshots/test_select.ambr @@ -1,4 +1,236 @@ # serializer version: 1 +# name: test_select[select.energy_site_allow_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.energy_site_allow_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow export', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_customer_preferred_export_rule', + 'unique_id': '123456-components_customer_preferred_export_rule', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.energy_site_allow_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Allow export', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.energy_site_allow_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'pv_only', + }) +# --- +# name: test_select[select.energy_site_operation_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.energy_site_operation_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Operation mode', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'default_real_mode', + 'unique_id': '123456-default_real_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.energy_site_operation_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Operation mode', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.energy_site_operation_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'self_consumption', + }) +# --- +# name: test_select[select.test_seat_cooler_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_cooler_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat cooler left', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_fan_front_left', + 'unique_id': 'VINVINVIN-climate_state_seat_fan_front_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_cooler_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat cooler left', + 'options': list([ + , + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_cooler_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_select[select.test_seat_cooler_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_cooler_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat cooler right', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_fan_front_right', + 'unique_id': 'VINVINVIN-climate_state_seat_fan_front_right', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_cooler_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat cooler right', + 'options': list([ + , + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_cooler_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_select[select.test_seat_heater_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_sensor.ambr b/tests/components/tessie/snapshots/test_sensor.ambr index 48beab6133c..0a5ff4603aa 100644 --- a/tests/components/tessie/snapshots/test_sensor.ambr +++ b/tests/components/tessie/snapshots/test_sensor.ambr @@ -1,4 +1,562 @@ # serializer version: 1 +# name: test_sensors[sensor.energy_site_battery_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_battery_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery power', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_power', + 'unique_id': '123456-battery_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_battery_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Battery power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.06', + }) +# --- +# name: test_sensors[sensor.energy_site_energy_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.energy_site_energy_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy left', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_left', + 'unique_id': '123456-energy_left', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_energy_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Energy left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_energy_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.8964736842105', + }) +# --- +# name: test_sensors[sensor.energy_site_generator_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_generator_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Generator power', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'generator_power', + 'unique_id': '123456-generator_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_generator_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Generator power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_generator_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid power', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_power', + 'unique_id': '123456-grid_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_services_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid services power', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_services_power', + 'unique_id': '123456-grid_services_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid services power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_services_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_load_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_load_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Load power', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'load_power', + 'unique_id': '123456-load_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_load_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Load power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_load_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.245', + }) +# --- +# name: test_sensors[sensor.energy_site_percentage_charged-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_percentage_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Percentage charged', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'percentage_charged', + 'unique_id': '123456-percentage_charged', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.energy_site_percentage_charged-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Percentage charged', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_percentage_charged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '95.5053740373966', + }) +# --- +# name: test_sensors[sensor.energy_site_solar_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_solar_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Solar power', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'solar_power', + 'unique_id': '123456-solar_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_solar_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Solar power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_solar_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.185', + }) +# --- +# name: test_sensors[sensor.energy_site_total_pack_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.energy_site_total_pack_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total pack energy', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_pack_energy', + 'unique_id': '123456-total_pack_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_total_pack_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Total pack energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_total_pack_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.727', + }) +# --- +# name: test_sensors[sensor.energy_site_vpp_backup_reserve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.energy_site_vpp_backup_reserve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VPP backup reserve', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vpp_backup_reserve_percent', + 'unique_id': '123456-vpp_backup_reserve_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.energy_site_vpp_backup_reserve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site VPP backup reserve', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_vpp_backup_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- # name: test_sensors[sensor.test_battery_level-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -592,42 +1150,6 @@ 'state': 'Giga Texas', }) # --- -# name: test_sensors[sensor.test_distance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_distance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Distance', - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'charge_state_est_battery_range', - 'unique_id': 'VINVINVIN-charge_state_est_battery_range', - 'unit_of_measurement': , - }) -# --- # name: test_sensors[sensor.test_distance_to_arrival-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1544,3 +2066,353 @@ 'state': '0', }) # --- +# name: test_sensors[sensor.wall_connector_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_power', + 'unique_id': '123456-abd-123-wall_connector_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.wall_connector_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_power_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_power_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_power', + 'unique_id': '123456-bcd-234-wall_connector_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.wall_connector_power_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'booting', + 'charging', + 'disconnected', + 'connected', + 'scheduled', + 'negotiating', + 'error', + 'charging_finished', + 'waiting_car', + 'charging_reduced', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_state', + 'unique_id': '123456-abd-123-wall_connector_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Wall Connector State', + 'options': list([ + 'booting', + 'charging', + 'disconnected', + 'connected', + 'scheduled', + 'negotiating', + 'error', + 'charging_finished', + 'waiting_car', + 'charging_reduced', + ]), + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disconnected', + }) +# --- +# name: test_sensors[sensor.wall_connector_state_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'booting', + 'charging', + 'disconnected', + 'connected', + 'scheduled', + 'negotiating', + 'error', + 'charging_finished', + 'waiting_car', + 'charging_reduced', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_state_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_state', + 'unique_id': '123456-bcd-234-wall_connector_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_state_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Wall Connector State', + 'options': list([ + 'booting', + 'charging', + 'disconnected', + 'connected', + 'scheduled', + 'negotiating', + 'error', + 'charging_finished', + 'waiting_car', + 'charging_reduced', + ]), + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disconnected', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_vehicle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Vehicle', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vin', + 'unique_id': '123456-abd-123-vin', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_vehicle_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Vehicle', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vin', + 'unique_id': '123456-bcd-234-vin', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/tessie/snapshots/test_switch.ambr b/tests/components/tessie/snapshots/test_switch.ambr index db06e028198..3b7a3623de8 100644 --- a/tests/components/tessie/snapshots/test_switch.ambr +++ b/tests/components/tessie/snapshots/test_switch.ambr @@ -1,4 +1,96 @@ # serializer version: 1 +# name: test_switches[switch.energy_site_allow_charging_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.energy_site_allow_charging_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow charging from grid', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_disallow_charge_from_grid_with_solar_installed', + 'unique_id': '123456-components_disallow_charge_from_grid_with_solar_installed', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch.energy_site_allow_charging_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Allow charging from grid', + }), + 'context': , + 'entity_id': 'switch.energy_site_allow_charging_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[switch.energy_site_storm_watch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.energy_site_storm_watch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Storm watch', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'user_settings_storm_mode_enabled', + 'unique_id': '123456-user_settings_storm_mode_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch.energy_site_storm_watch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Storm watch', + }), + 'context': , + 'entity_id': 'switch.energy_site_storm_watch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_switches[switch.test_charge-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/test_config_flow.py b/tests/components/tessie/test_config_flow.py index f3dc98e6e18..043086971fa 100644 --- a/tests/components/tessie/test_config_flow.py +++ b/tests/components/tessie/test_config_flow.py @@ -67,6 +67,33 @@ async def test_form( assert result2["data"] == TEST_CONFIG +async def test_abort( + hass: HomeAssistant, + mock_config_flow_get_state_of_all_vehicles, + mock_async_setup_entry, +) -> None: + """Test a duplicate entry aborts.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + data=TEST_CONFIG, + ) + mock_entry.add_to_hass(hass) + + result1 = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + TEST_CONFIG, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + @pytest.mark.parametrize( ("side_effect", "error"), [ diff --git a/tests/components/tessie/test_coordinator.py b/tests/components/tessie/test_coordinator.py index c4c1b6d1e72..77b2829b53a 100644 --- a/tests/components/tessie/test_coordinator.py +++ b/tests/components/tessie/test_coordinator.py @@ -2,11 +2,17 @@ from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory +from tesla_fleet_api.exceptions import Forbidden, InvalidToken + from homeassistant.components.tessie import PLATFORMS -from homeassistant.components.tessie.coordinator import TESSIE_SYNC_INTERVAL +from homeassistant.components.tessie.coordinator import ( + TESSIE_FLEET_API_SYNC_INTERVAL, + TESSIE_SYNC_INTERVAL, +) +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant -from homeassistant.util.dt import utcnow from .common import ( ERROR_AUTH, @@ -22,60 +28,124 @@ WAIT = timedelta(seconds=TESSIE_SYNC_INTERVAL) async def test_coordinator_online( - hass: HomeAssistant, mock_get_state, mock_get_status + hass: HomeAssistant, mock_get_state, mock_get_status, freezer: FrozenDateTimeFactory ) -> None: """Tests that the coordinator handles online vehicles.""" await setup_platform(hass, PLATFORMS) - async_fire_time_changed(hass, utcnow() + WAIT) + freezer.tick(WAIT) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_get_status.assert_called_once() mock_get_state.assert_called_once() assert hass.states.get("binary_sensor.test_status").state == STATE_ON -async def test_coordinator_asleep(hass: HomeAssistant, mock_get_status) -> None: +async def test_coordinator_asleep( + hass: HomeAssistant, mock_get_status, freezer: FrozenDateTimeFactory +) -> None: """Tests that the coordinator handles asleep vehicles.""" await setup_platform(hass, [Platform.BINARY_SENSOR]) mock_get_status.return_value = TEST_VEHICLE_STATUS_ASLEEP - async_fire_time_changed(hass, utcnow() + WAIT) + freezer.tick(WAIT) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_get_status.assert_called_once() assert hass.states.get("binary_sensor.test_status").state == STATE_OFF -async def test_coordinator_clienterror(hass: HomeAssistant, mock_get_status) -> None: +async def test_coordinator_clienterror( + hass: HomeAssistant, mock_get_status, freezer: FrozenDateTimeFactory +) -> None: """Tests that the coordinator handles client errors.""" mock_get_status.side_effect = ERROR_UNKNOWN await setup_platform(hass, [Platform.BINARY_SENSOR]) - async_fire_time_changed(hass, utcnow() + WAIT) + freezer.tick(WAIT) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_get_status.assert_called_once() assert hass.states.get("binary_sensor.test_status").state == STATE_UNAVAILABLE -async def test_coordinator_auth(hass: HomeAssistant, mock_get_status) -> None: - """Tests that the coordinator handles timeout errors.""" +async def test_coordinator_auth( + hass: HomeAssistant, mock_get_status, freezer: FrozenDateTimeFactory +) -> None: + """Tests that the coordinator handles auth errors.""" mock_get_status.side_effect = ERROR_AUTH await setup_platform(hass, [Platform.BINARY_SENSOR]) - async_fire_time_changed(hass, utcnow() + WAIT) + freezer.tick(WAIT) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_get_status.assert_called_once() -async def test_coordinator_connection(hass: HomeAssistant, mock_get_status) -> None: +async def test_coordinator_connection( + hass: HomeAssistant, mock_get_status, freezer: FrozenDateTimeFactory +) -> None: """Tests that the coordinator handles connection errors.""" mock_get_status.side_effect = ERROR_CONNECTION await setup_platform(hass, [Platform.BINARY_SENSOR]) - async_fire_time_changed(hass, utcnow() + WAIT) + freezer.tick(WAIT) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_get_status.assert_called_once() assert hass.states.get("binary_sensor.test_status").state == STATE_UNAVAILABLE + + +async def test_coordinator_live_error( + hass: HomeAssistant, mock_live_status, freezer: FrozenDateTimeFactory +) -> None: + """Tests that the energy live coordinator handles fleet errors.""" + + await setup_platform(hass, [Platform.SENSOR]) + + mock_live_status.reset_mock() + mock_live_status.side_effect = Forbidden + freezer.tick(TESSIE_FLEET_API_SYNC_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_live_status.assert_called_once() + assert hass.states.get("sensor.energy_site_solar_power").state == STATE_UNAVAILABLE + + +async def test_coordinator_info_error( + hass: HomeAssistant, mock_site_info, freezer: FrozenDateTimeFactory +) -> None: + """Tests that the energy info coordinator handles fleet errors.""" + + await setup_platform(hass, [Platform.SENSOR]) + + mock_site_info.reset_mock() + mock_site_info.side_effect = Forbidden + freezer.tick(TESSIE_FLEET_API_SYNC_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_site_info.assert_called_once() + assert ( + hass.states.get("sensor.energy_site_vpp_backup_reserve").state + == STATE_UNAVAILABLE + ) + + +async def test_coordinator_live_reauth(hass: HomeAssistant, mock_live_status) -> None: + """Tests that the energy live coordinator handles auth errors.""" + + mock_live_status.side_effect = InvalidToken + entry = await setup_platform(hass, [Platform.SENSOR]) + assert entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_coordinator_info_reauth(hass: HomeAssistant, mock_site_info) -> None: + """Tests that the energy info coordinator handles auth errors.""" + + mock_site_info.side_effect = InvalidToken + entry = await setup_platform(hass, [Platform.SENSOR]) + assert entry.state is ConfigEntryState.SETUP_ERROR diff --git a/tests/components/tessie/test_cover.py b/tests/components/tessie/test_cover.py index b731add10f8..be4dda3ec7b 100644 --- a/tests/components/tessie/test_cover.py +++ b/tests/components/tessie/test_cover.py @@ -42,6 +42,7 @@ async def test_covers( ("cover.test_charge_port_door", "open_unlock_charge_port", "close_charge_port"), ("cover.test_frunk", "open_front_trunk", False), ("cover.test_trunk", "open_close_rear_trunk", "open_close_rear_trunk"), + ("cover.test_sunroof", "vent_sunroof", "close_sunroof"), ): # Test open windows if openfunc: diff --git a/tests/components/tessie/test_diagnostics.py b/tests/components/tessie/test_diagnostics.py new file mode 100644 index 00000000000..5f60c1a06ca --- /dev/null +++ b/tests/components/tessie/test_diagnostics.py @@ -0,0 +1,23 @@ +"""Test the Tessie Diagnostics.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from .common import setup_platform + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + + entry = await setup_platform(hass) + + diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) + assert diag == snapshot diff --git a/tests/components/tessie/test_init.py b/tests/components/tessie/test_init.py index 81d1d758edf..921ef93b1ae 100644 --- a/tests/components/tessie/test_init.py +++ b/tests/components/tessie/test_init.py @@ -1,5 +1,9 @@ """Test the Tessie init.""" +from unittest.mock import patch + +from tesla_fleet_api.exceptions import TeslaFleetError + from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -44,3 +48,23 @@ async def test_connection_failure( mock_get_state_of_all_vehicles.side_effect = ERROR_CONNECTION entry = await setup_platform(hass) assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_products_error(hass: HomeAssistant) -> None: + """Test init with a fleet error on products.""" + + with patch( + "homeassistant.components.tessie.Tessie.products", side_effect=TeslaFleetError + ): + entry = await setup_platform(hass) + assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_scopes_error(hass: HomeAssistant) -> None: + """Test init with a fleet error on scopes.""" + + with patch( + "homeassistant.components.tessie.Tessie.scopes", side_effect=TeslaFleetError + ): + entry = await setup_platform(hass) + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/tessie/test_number.py b/tests/components/tessie/test_number.py index 8a3d1a649c7..0fb13779183 100644 --- a/tests/components/tessie/test_number.py +++ b/tests/components/tessie/test_number.py @@ -4,12 +4,16 @@ from unittest.mock import patch from syrupy import SnapshotAssertion -from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import assert_entities, setup_platform +from .common import TEST_RESPONSE, assert_entities, setup_platform async def test_numbers( @@ -29,7 +33,7 @@ async def test_numbers( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: [entity_id], "value": 16}, + {ATTR_ENTITY_ID: [entity_id], ATTR_VALUE: 16}, blocking=True, ) mock_set_charging_amps.assert_called_once() @@ -42,7 +46,7 @@ async def test_numbers( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: [entity_id], "value": 80}, + {ATTR_ENTITY_ID: [entity_id], ATTR_VALUE: 80}, blocking=True, ) mock_set_charge_limit.assert_called_once() @@ -55,8 +59,41 @@ async def test_numbers( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: [entity_id], "value": 60}, + {ATTR_ENTITY_ID: [entity_id], ATTR_VALUE: 60}, blocking=True, ) mock_set_speed_limit.assert_called_once() assert hass.states.get(entity_id).state == "60.0" + + entity_id = "number.energy_site_backup_reserve" + with patch( + "homeassistant.components.teslemetry.EnergySpecific.backup", + return_value=TEST_RESPONSE, + ) as call: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 80, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == "80" + call.assert_called_once() + + entity_id = "number.energy_site_off_grid_reserve" + with patch( + "homeassistant.components.teslemetry.EnergySpecific.off_grid_vehicle_charging_reserve", + return_value=TEST_RESPONSE, + ) as call: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 88}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == "88" + call.assert_called_once() diff --git a/tests/components/tessie/test_select.py b/tests/components/tessie/test_select.py index f9526bf0a47..c78923fbf5b 100644 --- a/tests/components/tessie/test_select.py +++ b/tests/components/tessie/test_select.py @@ -4,12 +4,17 @@ from unittest.mock import patch import pytest from syrupy import SnapshotAssertion +from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode +from tesla_fleet_api.exceptions import UnsupportedVehicle from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.components.tessie.const import TessieSeatHeaterOptions +from homeassistant.components.tessie.const import ( + TessieSeatCoolerOptions, + TessieSeatHeaterOptions, +) from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -27,9 +32,8 @@ async def test_select( assert_entities(hass, entry.entry_id, entity_registry, snapshot) - entity_id = "select.test_seat_heater_left" - # Test changing select + entity_id = "select.test_seat_heater_left" with patch( "homeassistant.components.tessie.select.set_seat_heat", return_value=TEST_RESPONSE, @@ -45,14 +49,64 @@ async def test_select( assert mock_set.call_args[1]["level"] == 1 assert hass.states.get(entity_id) == snapshot(name=SERVICE_SELECT_OPTION) + # Test site operation mode + entity_id = "select.energy_site_operation_mode" + with patch( + "homeassistant.components.teslemetry.EnergySpecific.operation", + return_value=TEST_RESPONSE, + ) as call: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: EnergyOperationMode.AUTONOMOUS.value, + }, + blocking=True, + ) + assert (state := hass.states.get(entity_id)) + assert state.state == EnergyOperationMode.AUTONOMOUS.value + call.assert_called_once() + + # Test site export mode + entity_id = "select.energy_site_allow_export" + with patch( + "homeassistant.components.teslemetry.EnergySpecific.grid_import_export", + return_value=TEST_RESPONSE, + ) as call: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: EnergyExportMode.BATTERY_OK.value}, + blocking=True, + ) + assert (state := hass.states.get(entity_id)) + assert state.state == EnergyExportMode.BATTERY_OK.value + call.assert_called_once() + + # Test changing select + entity_id = "select.test_seat_cooler_left" + with patch( + "homeassistant.components.tessie.select.set_seat_cool", + return_value=TEST_RESPONSE, + ) as mock_set: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: [entity_id], ATTR_OPTION: TessieSeatCoolerOptions.LOW}, + blocking=True, + ) + mock_set.assert_called_once() + assert mock_set.call_args[1]["seat"] == "front_left" + assert mock_set.call_args[1]["level"] == 1 + async def test_errors(hass: HomeAssistant) -> None: """Tests unknown error is handled.""" await setup_platform(hass, [Platform.SELECT]) - entity_id = "select.test_seat_heater_left" - # Test setting cover open with unknown error + # Test changing vehicle select with unknown error with ( patch( "homeassistant.components.tessie.select.set_seat_heat", @@ -63,8 +117,31 @@ async def test_errors(hass: HomeAssistant) -> None: await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: [entity_id], ATTR_OPTION: TessieSeatHeaterOptions.LOW}, + { + ATTR_ENTITY_ID: ["select.test_seat_heater_left"], + ATTR_OPTION: TessieSeatHeaterOptions.LOW, + }, blocking=True, ) mock_set.assert_called_once() assert error.value.__cause__ == ERROR_UNKNOWN + + # Test changing energy select with unknown error + with ( + patch( + "homeassistant.components.tessie.EnergySpecific.operation", + side_effect=UnsupportedVehicle, + ) as mock_set, + pytest.raises(HomeAssistantError) as error, + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: ["select.energy_site_operation_mode"], + ATTR_OPTION: EnergyOperationMode.AUTONOMOUS.value, + }, + blocking=True, + ) + mock_set.assert_called_once() + assert isinstance(error.value.__cause__, UnsupportedVehicle) diff --git a/tests/components/tessie/test_switch.py b/tests/components/tessie/test_switch.py index 907be29ddcc..499e529b2e8 100644 --- a/tests/components/tessie/test_switch.py +++ b/tests/components/tessie/test_switch.py @@ -2,6 +2,7 @@ from unittest.mock import patch +import pytest from syrupy import SnapshotAssertion from homeassistant.components.switch import ( @@ -9,11 +10,11 @@ from homeassistant.components.switch import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import assert_entities, setup_platform +from .common import RESPONSE_OK, assert_entities, setup_platform async def test_switches( @@ -52,3 +53,56 @@ async def test_switches( mock_stop_charging.assert_called_once() assert hass.states.get(entity_id) == snapshot(name=SERVICE_TURN_OFF) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("name", "on", "off"), + [ + ( + "energy_site_storm_watch", + "EnergySpecific.storm_mode", + "EnergySpecific.storm_mode", + ), + ( + "energy_site_allow_charging_from_grid", + "EnergySpecific.grid_import_export", + "EnergySpecific.grid_import_export", + ), + ], +) +async def test_switch_services( + hass: HomeAssistant, name: str, on: str, off: str +) -> None: + """Tests that the switch service calls work.""" + + await setup_platform(hass, [Platform.SWITCH]) + + entity_id = f"switch.{name}" + with patch( + f"homeassistant.components.teslemetry.{on}", + return_value=RESPONSE_OK, + ) as call: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == STATE_ON + call.assert_called_once() + + with patch( + f"homeassistant.components.teslemetry.{off}", + return_value=RESPONSE_OK, + ) as call: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + call.assert_called_once() diff --git a/tests/components/tibber/conftest.py b/tests/components/tibber/conftest.py index fc6596444c5..0b48531bde1 100644 --- a/tests/components/tibber/conftest.py +++ b/tests/components/tibber/conftest.py @@ -27,7 +27,7 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture async def mock_tibber_setup( config_entry: MockConfigEntry, hass: HomeAssistant -) -> AsyncGenerator[None, MagicMock]: +) -> AsyncGenerator[MagicMock]: """Mock tibber entry setup.""" unique_user_id = "unique_user_id" title = "title" diff --git a/tests/components/tibber/test_services.py b/tests/components/tibber/test_services.py index fe437e421d7..e9bee3ba31f 100644 --- a/tests/components/tibber/test_services.py +++ b/tests/components/tibber/test_services.py @@ -4,6 +4,7 @@ import asyncio import datetime as dt from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.tibber.const import DOMAIN @@ -11,11 +12,12 @@ from homeassistant.components.tibber.services import PRICE_SERVICE_NAME, __get_p from homeassistant.core import ServiceCall from homeassistant.exceptions import ServiceValidationError +STARTTIME = dt.datetime.fromtimestamp(1615766400) + def generate_mock_home_data(): """Create mock data from the tibber connection.""" - today = remove_microseconds(dt.datetime.now()) - tomorrow = remove_microseconds(today + dt.timedelta(days=1)) + tomorrow = STARTTIME + dt.timedelta(days=1) mock_homes = [ MagicMock( name="first_home", @@ -26,13 +28,13 @@ def generate_mock_home_data(): "priceInfo": { "today": [ { - "startsAt": today.isoformat(), + "startsAt": STARTTIME.isoformat(), "total": 0.46914, "level": "VERY_EXPENSIVE", }, { "startsAt": ( - today + dt.timedelta(hours=1) + STARTTIME + dt.timedelta(hours=1) ).isoformat(), "total": 0.46914, "level": "VERY_EXPENSIVE", @@ -67,13 +69,13 @@ def generate_mock_home_data(): "priceInfo": { "today": [ { - "startsAt": today.isoformat(), + "startsAt": STARTTIME.isoformat(), "total": 0.46914, "level": "VERY_EXPENSIVE", }, { "startsAt": ( - today + dt.timedelta(hours=1) + STARTTIME + dt.timedelta(hours=1) ).isoformat(), "total": 0.46914, "level": "VERY_EXPENSIVE", @@ -119,19 +121,16 @@ def create_mock_hass(): return mock_hass -def remove_microseconds(dt): - """Remove microseconds from a datetime object.""" - return dt.replace(microsecond=0) - - -async def test_get_prices(): +async def test_get_prices( + freezer: FrozenDateTimeFactory, +) -> None: """Test __get_prices with mock data.""" - today = remove_microseconds(dt.datetime.now()) - tomorrow = remove_microseconds(dt.datetime.now() + dt.timedelta(days=1)) + freezer.move_to(STARTTIME) + tomorrow = STARTTIME + dt.timedelta(days=1) call = ServiceCall( DOMAIN, PRICE_SERVICE_NAME, - {"start": today.date().isoformat(), "end": tomorrow.date().isoformat()}, + {"start": STARTTIME.date().isoformat(), "end": tomorrow.date().isoformat()}, ) result = await __get_prices(call, hass=create_mock_hass()) @@ -140,24 +139,24 @@ async def test_get_prices(): "prices": { "first_home": [ { - "start_time": today, + "start_time": STARTTIME, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": today + dt.timedelta(hours=1), + "start_time": STARTTIME + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": today, + "start_time": STARTTIME, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": today + dt.timedelta(hours=1), + "start_time": STARTTIME + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, @@ -166,9 +165,11 @@ async def test_get_prices(): } -async def test_get_prices_no_input(): +async def test_get_prices_no_input( + freezer: FrozenDateTimeFactory, +) -> None: """Test __get_prices with no input.""" - today = remove_microseconds(dt.datetime.now()) + freezer.move_to(STARTTIME) call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {}) result = await __get_prices(call, hass=create_mock_hass()) @@ -177,24 +178,24 @@ async def test_get_prices_no_input(): "prices": { "first_home": [ { - "start_time": today, + "start_time": STARTTIME, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": today + dt.timedelta(hours=1), + "start_time": STARTTIME + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": today, + "start_time": STARTTIME, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": today + dt.timedelta(hours=1), + "start_time": STARTTIME + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, @@ -203,9 +204,12 @@ async def test_get_prices_no_input(): } -async def test_get_prices_start_tomorrow(): +async def test_get_prices_start_tomorrow( + freezer: FrozenDateTimeFactory, +) -> None: """Test __get_prices with start date tomorrow.""" - tomorrow = remove_microseconds(dt.datetime.now() + dt.timedelta(days=1)) + freezer.move_to(STARTTIME) + tomorrow = STARTTIME + dt.timedelta(days=1) call = ServiceCall( DOMAIN, PRICE_SERVICE_NAME, {"start": tomorrow.date().isoformat()} ) @@ -242,7 +246,7 @@ async def test_get_prices_start_tomorrow(): } -async def test_get_prices_invalid_input(): +async def test_get_prices_invalid_input() -> None: """Test __get_prices with invalid input.""" call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {"start": "test"}) diff --git a/tests/components/tile/conftest.py b/tests/components/tile/conftest.py index e3b55c49ae7..01a711d9261 100644 --- a/tests/components/tile/conftest.py +++ b/tests/components/tile/conftest.py @@ -1,6 +1,8 @@ """Define test fixtures for Tile.""" +from collections.abc import Generator import json +from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest @@ -8,6 +10,7 @@ from pytile.tile import Tile from homeassistant.components.tile.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -16,7 +19,7 @@ TEST_USERNAME = "user@host.com" @pytest.fixture(name="api") -def api_fixture(hass, data_tile_details): +def api_fixture(data_tile_details: dict[str, Any]) -> Mock: """Define a pytile API object.""" tile = Tile(None, data_tile_details) tile.async_update = AsyncMock() @@ -29,7 +32,9 @@ def api_fixture(hass, data_tile_details): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry(domain=DOMAIN, unique_id=config[CONF_USERNAME], data=config) entry.add_to_hass(hass) @@ -37,7 +42,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_USERNAME: TEST_USERNAME, @@ -52,7 +57,7 @@ def data_tile_details_fixture(): @pytest.fixture(name="mock_pytile") -async def mock_pytile_fixture(api): +def mock_pytile_fixture(api: Mock) -> Generator[None]: """Define a fixture to patch pytile.""" with ( patch( @@ -64,7 +69,9 @@ async def mock_pytile_fixture(api): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_pytile): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_pytile: None +) -> None: """Define a fixture to set up tile.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/time_date/conftest.py b/tests/components/time_date/conftest.py index 4bcaa887b6f..7841b6d0b83 100644 --- a/tests/components/time_date/conftest.py +++ b/tests/components/time_date/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Time & Date integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/tod/test_binary_sensor.py b/tests/components/tod/test_binary_sensor.py index c4b28b527cb..b4b6b13d8e3 100644 --- a/tests/components/tod/test_binary_sensor.py +++ b/tests/components/tod/test_binary_sensor.py @@ -1,6 +1,6 @@ """Test Times of the Day Binary Sensor.""" -from datetime import datetime, timedelta +from datetime import datetime, timedelta, tzinfo from freezegun.api import FrozenDateTimeFactory import pytest @@ -16,13 +16,13 @@ from tests.common import assert_setup_component, async_fire_time_changed @pytest.fixture -def hass_time_zone(): +def hass_time_zone() -> str: """Return default hass timezone.""" return "US/Pacific" @pytest.fixture(autouse=True) -async def setup_fixture(hass, hass_time_zone): +async def setup_fixture(hass: HomeAssistant, hass_time_zone: str) -> None: """Set up things to be run when tests are started.""" hass.config.latitude = 50.27583 hass.config.longitude = 18.98583 @@ -30,7 +30,7 @@ async def setup_fixture(hass, hass_time_zone): @pytest.fixture -def hass_tz_info(hass): +def hass_tz_info(hass: HomeAssistant) -> tzinfo | None: """Return timezone info for the hass timezone.""" return dt_util.get_time_zone(hass.config.time_zone) diff --git a/tests/components/todo/test_init.py b/tests/components/todo/test_init.py index 5999b4b9fbe..b62505b14b4 100644 --- a/tests/components/todo/test_init.py +++ b/tests/components/todo/test_init.py @@ -1,26 +1,33 @@ """Tests for the todo integration.""" +from collections.abc import Generator import datetime from typing import Any from unittest.mock import AsyncMock import zoneinfo import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant.components import conversation from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, DOMAIN, TodoItem, TodoItemStatus, TodoListEntity, TodoListEntityFeature, + TodoServices, intent as todo_intent, ) from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow -from homeassistant.const import Platform +from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import intent @@ -230,11 +237,11 @@ async def test_list_todo_items( [ ({}, [ITEM_1, ITEM_2]), ( - {"status": [TodoItemStatus.COMPLETED, TodoItemStatus.NEEDS_ACTION]}, + {ATTR_STATUS: [TodoItemStatus.COMPLETED, TodoItemStatus.NEEDS_ACTION]}, [ITEM_1, ITEM_2], ), - ({"status": [TodoItemStatus.NEEDS_ACTION]}, [ITEM_1]), - ({"status": [TodoItemStatus.COMPLETED]}, [ITEM_2]), + ({ATTR_STATUS: [TodoItemStatus.NEEDS_ACTION]}, [ITEM_1]), + ({ATTR_STATUS: [TodoItemStatus.COMPLETED]}, [ITEM_2]), ], ) async def test_get_items_service( @@ -251,13 +258,13 @@ async def test_get_items_service( state = hass.states.get("todo.entity1") assert state assert state.state == "1" - assert state.attributes == {"supported_features": 15} + assert state.attributes == {ATTR_SUPPORTED_FEATURES: 15} result = await hass.services.async_call( DOMAIN, - "get_items", + TodoServices.GET_ITEMS, service_data, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, return_response=True, ) @@ -297,9 +304,9 @@ async def test_add_item_service( await hass.services.async_call( DOMAIN, - "add_item", - {"item": "New item"}, - target={"entity_id": "todo.entity1"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "New item"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -324,9 +331,9 @@ async def test_add_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "add_item", - {"item": "New item"}, - target={"entity_id": "todo.entity1"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "New item"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -335,21 +342,21 @@ async def test_add_item_service_raises( ("item_data", "expected_exception", "expected_error"), [ ({}, vol.Invalid, "required key not provided"), - ({"item": ""}, vol.Invalid, "length of value must be at least 1"), + ({ATTR_ITEM: ""}, vol.Invalid, "length of value must be at least 1"), ( - {"item": "Submit forms", "description": "Submit tax forms"}, + {ATTR_ITEM: "Submit forms", ATTR_DESCRIPTION: "Submit tax forms"}, ServiceValidationError, "does not support setting field: description", ), ( - {"item": "Submit forms", "due_date": "2023-11-17"}, + {ATTR_ITEM: "Submit forms", ATTR_DUE_DATE: "2023-11-17"}, ServiceValidationError, "does not support setting field: due_date", ), ( { - "item": "Submit forms", - "due_datetime": f"2023-11-17T17:00:00{TEST_OFFSET}", + ATTR_ITEM: "Submit forms", + ATTR_DUE_DATETIME: f"2023-11-17T17:00:00{TEST_OFFSET}", }, ServiceValidationError, "does not support setting field: due_datetime", @@ -370,9 +377,9 @@ async def test_add_item_service_invalid_input( with pytest.raises(expected_exception) as exc: await hass.services.async_call( DOMAIN, - "add_item", + TodoServices.ADD_ITEM, item_data, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -384,7 +391,7 @@ async def test_add_item_service_invalid_input( [ ( TodoListEntityFeature.SET_DUE_DATE_ON_ITEM, - {"item": "New item", "due_date": "2023-11-13"}, + {ATTR_ITEM: "New item", ATTR_DUE_DATE: "2023-11-13"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -393,7 +400,10 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {"item": "New item", "due_datetime": f"2023-11-13T17:00:00{TEST_OFFSET}"}, + { + ATTR_ITEM: "New item", + ATTR_DUE_DATETIME: f"2023-11-13T17:00:00{TEST_OFFSET}", + }, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -402,7 +412,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {"item": "New item", "due_datetime": "2023-11-13T17:00:00+00:00"}, + {ATTR_ITEM: "New item", ATTR_DUE_DATETIME: "2023-11-13T17:00:00+00:00"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -411,7 +421,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {"item": "New item", "due_datetime": "2023-11-13"}, + {ATTR_ITEM: "New item", ATTR_DUE_DATETIME: "2023-11-13"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -420,7 +430,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM, - {"item": "New item", "description": "Submit revised draft"}, + {ATTR_ITEM: "New item", ATTR_DESCRIPTION: "Submit revised draft"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -443,9 +453,9 @@ async def test_add_item_service_extended_fields( await hass.services.async_call( DOMAIN, - "add_item", - {"item": "New item", **item_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "New item", **item_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -465,9 +475,9 @@ async def test_update_todo_item_service_by_id( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", ATTR_RENAME: "Updated item", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -490,9 +500,9 @@ async def test_update_todo_item_service_by_id_status_only( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -515,9 +525,9 @@ async def test_update_todo_item_service_by_id_rename( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", "rename": "Updated item"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -540,9 +550,9 @@ async def test_update_todo_item_service_raises( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", "rename": "Updated item", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -550,9 +560,9 @@ async def test_update_todo_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", "rename": "Updated item", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -567,9 +577,9 @@ async def test_update_todo_item_service_by_summary( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "Item #1", "rename": "Something else", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Item #1", "rename": "Something else", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -592,9 +602,9 @@ async def test_update_todo_item_service_by_summary_only_status( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "Item #1", "rename": "Something else"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Item #1", "rename": "Something else"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -618,9 +628,9 @@ async def test_update_todo_item_service_by_summary_not_found( with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( DOMAIN, - "update_item", - {"item": "Item #7", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Item #7", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -652,7 +662,7 @@ async def test_update_item_service_invalid_input( DOMAIN, "update_item", item_data, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -677,9 +687,9 @@ async def test_update_todo_item_field_unsupported( with pytest.raises(ServiceValidationError, match="does not support"): await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", **update_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", **update_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -733,9 +743,9 @@ async def test_update_todo_item_extended_fields( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", **update_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", **update_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -823,9 +833,9 @@ async def test_update_todo_item_extended_fields_overwrite_existing_values( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", **update_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", **update_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -845,9 +855,9 @@ async def test_remove_todo_item_service_by_id( await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["1", "2"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["1", "2"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -868,9 +878,9 @@ async def test_remove_todo_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["1", "2"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["1", "2"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -888,9 +898,9 @@ async def test_remove_todo_item_service_invalid_input( ): await hass.services.async_call( DOMAIN, - "remove_item", + TodoServices.REMOVE_ITEM, {}, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -905,9 +915,9 @@ async def test_remove_todo_item_service_by_summary( await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["Item #1"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["Item #1"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -927,9 +937,9 @@ async def test_remove_todo_item_service_by_summary_not_found( with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["Item #7"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["Item #7"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1035,26 +1045,26 @@ async def test_move_todo_item_service_invalid_input( ("service_name", "payload"), [ ( - "add_item", + TodoServices.ADD_ITEM, { - "item": "New item", + ATTR_ITEM: "New item", }, ), ( - "remove_item", + TodoServices.REMOVE_ITEM, { - "item": ["1"], + ATTR_ITEM: ["1"], }, ), ( - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "1", - "rename": "Updated item", + ATTR_ITEM: "1", + ATTR_RENAME: "Updated item", }, ), ( - "remove_completed_items", + TodoServices.REMOVE_COMPLETED_ITEMS, None, ), ], @@ -1078,7 +1088,7 @@ async def test_unsupported_service( DOMAIN, service_name, payload, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1131,7 +1141,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "beer"}, "name": {"value": "list 1"}}, + {ATTR_ITEM: {"value": "beer"}, "name": {"value": "list 1"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1147,7 +1157,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "cheese"}, "name": {"value": "List 2"}}, + {ATTR_ITEM: {"value": "cheese"}, "name": {"value": "List 2"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1162,7 +1172,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "wine"}, "name": {"value": "lIST 2"}}, + {ATTR_ITEM: {"value": "wine"}, "name": {"value": "lIST 2"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1224,8 +1234,8 @@ async def test_remove_completed_items_service( await hass.services.async_call( DOMAIN, - "remove_completed_items", - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_COMPLETED_ITEMS, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1238,8 +1248,8 @@ async def test_remove_completed_items_service( # calling service multiple times will not call the entity method await hass.services.async_call( DOMAIN, - "remove_completed_items", - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_COMPLETED_ITEMS, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) test_entity.async_delete_todo_items.assert_not_called() @@ -1257,8 +1267,8 @@ async def test_remove_completed_items_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "remove_completed_items", - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_COMPLETED_ITEMS, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1423,7 +1433,7 @@ async def test_list_todo_items_extended_fields( DOMAIN, "get_items", {}, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, return_response=True, ) diff --git a/tests/components/todoist/conftest.py b/tests/components/todoist/conftest.py index 386385a0ddb..4b2bfea2e30 100644 --- a/tests/components/todoist/conftest.py +++ b/tests/components/todoist/conftest.py @@ -1,13 +1,13 @@ """Common fixtures for the todoist tests.""" +from collections.abc import Generator from http import HTTPStatus from unittest.mock import AsyncMock, patch import pytest from requests.exceptions import HTTPError from requests.models import Response -from todoist_api_python.models import Collaborator, Due, Label, Project, Task -from typing_extensions import Generator +from todoist_api_python.models import Collaborator, Due, Label, Project, Section, Task from homeassistant.components.todoist import DOMAIN from homeassistant.const import CONF_TOKEN, Platform @@ -18,6 +18,7 @@ from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry PROJECT_ID = "project-id-1" +SECTION_ID = "section-id-1" SUMMARY = "A task" TOKEN = "some-token" TODAY = dt_util.now().strftime("%Y-%m-%d") @@ -98,6 +99,14 @@ def mock_api(tasks: list[Task]) -> AsyncMock: view_style="list", ) ] + api.get_sections.return_value = [ + Section( + id=SECTION_ID, + project_id=PROJECT_ID, + name="Section Name", + order=1, + ) + ] api.get_labels.return_value = [ Label(id="1", name="Label1", color="1", order=1, is_favorite=False) ] diff --git a/tests/components/todoist/test_calendar.py b/tests/components/todoist/test_calendar.py index 8ba4da9b2e8..071a14a70ae 100644 --- a/tests/components/todoist/test_calendar.py +++ b/tests/components/todoist/test_calendar.py @@ -18,15 +18,17 @@ from homeassistant.components.todoist.const import ( DOMAIN, LABELS, PROJECT_NAME, + SECTION_NAME, SERVICE_NEW_TASK, ) from homeassistant.const import CONF_TOKEN, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.util import dt as dt_util -from .conftest import PROJECT_ID, SUMMARY +from .conftest import PROJECT_ID, SECTION_ID, SUMMARY from tests.typing import ClientSessionGenerator @@ -269,6 +271,51 @@ async def test_create_task_service_call(hass: HomeAssistant, api: AsyncMock) -> ) +async def test_create_task_service_call_raises( + hass: HomeAssistant, api: AsyncMock +) -> None: + """Test adding an item to an invalid project raises an error.""" + + with pytest.raises(ServiceValidationError, match="project_invalid"): + await hass.services.async_call( + DOMAIN, + SERVICE_NEW_TASK, + { + ASSIGNEE: "user", + CONTENT: "task", + LABELS: ["Label1"], + PROJECT_NAME: "Missing Project", + }, + blocking=True, + ) + + +async def test_create_task_service_call_with_section( + hass: HomeAssistant, api: AsyncMock +) -> None: + """Test api is called correctly when section is included.""" + await hass.services.async_call( + DOMAIN, + SERVICE_NEW_TASK, + { + ASSIGNEE: "user", + CONTENT: "task", + LABELS: ["Label1"], + PROJECT_NAME: "Name", + SECTION_NAME: "Section Name", + }, + ) + await hass.async_block_till_done() + + api.add_task.assert_called_with( + "task", + project_id=PROJECT_ID, + section_id=SECTION_ID, + labels=["Label1"], + assignee_id="1", + ) + + @pytest.mark.parametrize( ("due"), [ @@ -366,6 +413,73 @@ async def test_task_due_datetime( assert await response.json() == [] +@pytest.mark.parametrize( + ("todoist_config", "due", "start", "end", "expected_response"), + [ + ( + {"custom_projects": [{"name": "Test", "labels": ["Label1"]}]}, + Due(date="2023-03-30", is_recurring=False, string="Mar 30"), + "2023-03-28T00:00:00.000Z", + "2023-04-01T00:00:00.000Z", + [get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})], + ), + ( + {"custom_projects": [{"name": "Test", "labels": ["custom"]}]}, + Due(date="2023-03-30", is_recurring=False, string="Mar 30"), + "2023-03-28T00:00:00.000Z", + "2023-04-01T00:00:00.000Z", + [], + ), + ( + {"custom_projects": [{"name": "Test", "include_projects": ["Name"]}]}, + Due(date="2023-03-30", is_recurring=False, string="Mar 30"), + "2023-03-28T00:00:00.000Z", + "2023-04-01T00:00:00.000Z", + [get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})], + ), + ( + {"custom_projects": [{"name": "Test", "due_date_days": 1}]}, + Due(date="2023-03-30", is_recurring=False, string="Mar 30"), + "2023-03-28T00:00:00.000Z", + "2023-04-01T00:00:00.000Z", + [get_events_response({"date": "2023-03-30"}, {"date": "2023-03-31"})], + ), + ( + {"custom_projects": [{"name": "Test", "due_date_days": 1}]}, + Due( + date=(dt_util.now() + timedelta(days=2)).strftime("%Y-%m-%d"), + is_recurring=False, + string="Mar 30", + ), + dt_util.now().isoformat(), + (dt_util.now() + timedelta(days=5)).isoformat(), + [], + ), + ], + ids=[ + "in_labels_whitelist", + "not_in_labels_whitelist", + "in_include_projects", + "in_due_date_days", + "not_in_due_date_days", + ], +) +async def test_events_filtered_for_custom_projects( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + start: str, + end: str, + expected_response: dict[str, Any], +) -> None: + """Test we filter out tasks from custom projects based on their config.""" + client = await hass_client() + response = await client.get( + get_events_url("calendar.test", start, end), + ) + assert response.status == HTTPStatus.OK + assert await response.json() == expected_response + + @pytest.mark.parametrize( ("due", "setup_platform"), [ diff --git a/tests/components/todoist/test_todo.py b/tests/components/todoist/test_todo.py index 2aabfcc5755..1c2da67fb02 100644 --- a/tests/components/todoist/test_todo.py +++ b/tests/components/todoist/test_todo.py @@ -6,8 +6,17 @@ from unittest.mock import AsyncMock import pytest from todoist_api_python.models import Due, Task -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -86,7 +95,7 @@ async def test_todo_item_state( ), ( [], - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, [ make_api_task( id="task-id-1", @@ -105,7 +114,7 @@ async def test_todo_item_state( ), ( [], - {"due_datetime": "2023-11-18T06:30:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T06:30:00"}, [ make_api_task( id="task-id-1", @@ -132,7 +141,7 @@ async def test_todo_item_state( ), ( [], - {"description": "6-pack"}, + {ATTR_DESCRIPTION: "6-pack"}, [ make_api_task( id="task-id-1", @@ -173,9 +182,9 @@ async def test_add_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda", **item_data}, - target={"entity_id": "todo.name"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda", **item_data}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) @@ -190,9 +199,9 @@ async def test_add_todo_list_item( result = await hass.services.async_call( TODO_DOMAIN, - "get_items", + TodoServices.GET_ITEMS, {}, - target={"entity_id": "todo.name"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, return_response=True, ) @@ -223,9 +232,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "task-id-1", "status": "completed"}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "task-id-1", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.close_task.called @@ -246,9 +255,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "task-id-1", "status": "needs_action"}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "task-id-1", ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.reopen_task.called @@ -274,7 +283,7 @@ async def test_update_todo_item_status( description="desc", ) ], - {"rename": "Milk"}, + {ATTR_RENAME: "Milk"}, [ make_api_task( id="task-id-1", @@ -298,7 +307,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, [ make_api_task( id="task-id-1", @@ -322,7 +331,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {"due_datetime": "2023-11-18T06:30:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T06:30:00"}, [ make_api_task( id="task-id-1", @@ -351,7 +360,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {"description": "6-pack"}, + {ATTR_DESCRIPTION: "6-pack"}, [ make_api_task( id="task-id-1", @@ -382,7 +391,7 @@ async def test_update_todo_item_status( is_completed=False, ) ], - {"description": None}, + {ATTR_DESCRIPTION: None}, [ make_api_task( id="task-id-1", @@ -415,7 +424,7 @@ async def test_update_todo_item_status( due=Due(date="2024-01-01", is_recurring=True, string="every day"), ) ], - {"due_date": "2024-02-01"}, + {ATTR_DUE_DATE: "2024-02-01"}, [ make_api_task( id="task-id-1", @@ -472,9 +481,9 @@ async def test_update_todo_items( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "task-id-1", **update_data}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "task-id-1", **update_data}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.update_task.called @@ -484,9 +493,9 @@ async def test_update_todo_items( result = await hass.services.async_call( TODO_DOMAIN, - "get_items", + TodoServices.GET_ITEMS, {}, - target={"entity_id": "todo.name"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, return_response=True, ) @@ -519,9 +528,9 @@ async def test_remove_todo_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["task-id-1", "task-id-2"]}, - target={"entity_id": "todo.name"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["task-id-1", "task-id-2"]}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.delete_task.call_count == 2 @@ -575,9 +584,9 @@ async def test_subscribe( ] await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "Cheese", "rename": "Wine"}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Cheese", ATTR_RENAME: "Wine"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) diff --git a/tests/components/tomato/test_device_tracker.py b/tests/components/tomato/test_device_tracker.py index 099a2c2b40a..9484d3393d7 100644 --- a/tests/components/tomato/test_device_tracker.py +++ b/tests/components/tomato/test_device_tracker.py @@ -25,7 +25,7 @@ def mock_session_response(*args, **kwargs): """Mock data generation for session response.""" class MockSessionResponse: - def __init__(self, text, status_code): + def __init__(self, text, status_code) -> None: self.text = text self.status_code = status_code diff --git a/tests/components/tomorrowio/snapshots/test_weather.ambr b/tests/components/tomorrowio/snapshots/test_weather.ambr index fe65925e4c7..6278b50b7f7 100644 --- a/tests/components/tomorrowio/snapshots/test_weather.ambr +++ b/tests/components/tomorrowio/snapshots/test_weather.ambr @@ -735,1126 +735,6 @@ }), ]) # --- -# name: test_v4_forecast_service - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T11:00:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.9, - 'templow': 26.1, - 'wind_bearing': 239.6, - 'wind_speed': 34.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 49.4, - 'templow': 26.3, - 'wind_bearing': 262.82, - 'wind_speed': 26.06, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-09T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 67.0, - 'templow': 31.5, - 'wind_bearing': 229.3, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 65.3, - 'templow': 37.3, - 'wind_bearing': 149.91, - 'wind_speed': 38.3, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-11T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 66.2, - 'templow': 48.3, - 'wind_bearing': 210.45, - 'wind_speed': 56.48, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-12T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 67.9, - 'templow': 53.8, - 'wind_bearing': 217.98, - 'wind_speed': 44.28, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-13T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 54.5, - 'templow': 42.9, - 'wind_bearing': 58.79, - 'wind_speed': 34.99, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-14T10:00:00+00:00', - 'precipitation': 0.94, - 'precipitation_probability': 95, - 'temperature': 42.9, - 'templow': 33.4, - 'wind_bearing': 70.25, - 'wind_speed': 58.5, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-15T10:00:00+00:00', - 'precipitation': 0.06, - 'precipitation_probability': 55, - 'temperature': 43.7, - 'templow': 29.4, - 'wind_bearing': 84.47, - 'wind_speed': 57.2, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-16T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 43.0, - 'templow': 29.1, - 'wind_bearing': 103.85, - 'wind_speed': 24.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-17T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 52.4, - 'templow': 34.3, - 'wind_bearing': 145.41, - 'wind_speed': 26.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-18T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 54.1, - 'templow': 41.3, - 'wind_bearing': 62.99, - 'wind_speed': 23.69, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-19T10:00:00+00:00', - 'precipitation': 0.12, - 'precipitation_probability': 55, - 'temperature': 48.9, - 'templow': 39.4, - 'wind_bearing': 68.54, - 'wind_speed': 50.08, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-20T10:00:00+00:00', - 'precipitation': 0.05, - 'precipitation_probability': 33, - 'temperature': 40.1, - 'templow': 35.1, - 'wind_bearing': 56.98, - 'wind_speed': 62.46, - }), - ]), - }) -# --- -# name: test_v4_forecast_service.1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T17:48:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.1, - 'wind_bearing': 315.14, - 'wind_speed': 33.59, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T18:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.8, - 'wind_bearing': 321.71, - 'wind_speed': 31.82, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T19:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.8, - 'wind_bearing': 323.38, - 'wind_speed': 32.04, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T20:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.3, - 'wind_bearing': 318.43, - 'wind_speed': 33.73, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T21:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.6, - 'wind_bearing': 320.9, - 'wind_speed': 28.98, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T22:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 41.9, - 'wind_bearing': 322.11, - 'wind_speed': 15.7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T23:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 38.9, - 'wind_bearing': 295.94, - 'wind_speed': 17.78, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T00:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 36.2, - 'wind_bearing': 11.94, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T01:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 34.3, - 'wind_bearing': 13.68, - 'wind_speed': 20.05, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T02:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 32.9, - 'wind_bearing': 14.93, - 'wind_speed': 19.48, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T03:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.9, - 'wind_bearing': 26.07, - 'wind_speed': 16.6, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T04:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 51.27, - 'wind_speed': 9.32, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T05:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.4, - 'wind_bearing': 343.25, - 'wind_speed': 11.92, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T06:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.7, - 'wind_bearing': 341.46, - 'wind_speed': 15.37, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T07:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.4, - 'wind_bearing': 322.34, - 'wind_speed': 12.71, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T08:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.1, - 'wind_bearing': 294.69, - 'wind_speed': 13.14, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T09:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 30.1, - 'wind_bearing': 325.32, - 'wind_speed': 11.52, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T10:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.0, - 'wind_bearing': 322.27, - 'wind_speed': 10.22, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T11:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.2, - 'wind_bearing': 310.14, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T12:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 324.8, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T13:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 33.2, - 'wind_bearing': 335.16, - 'wind_speed': 23.26, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T14:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 37.0, - 'wind_bearing': 324.49, - 'wind_speed': 21.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T15:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 40.0, - 'wind_bearing': 310.68, - 'wind_speed': 19.98, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T16:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 42.4, - 'wind_bearing': 304.18, - 'wind_speed': 19.66, - }), - ]), - }) -# --- -# name: test_v4_forecast_service[forecast] - dict({ - 'weather.tomorrow_io_daily': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T11:00:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.9, - 'templow': 26.1, - 'wind_bearing': 239.6, - 'wind_speed': 34.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 49.4, - 'templow': 26.3, - 'wind_bearing': 262.82, - 'wind_speed': 26.06, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-09T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 67.0, - 'templow': 31.5, - 'wind_bearing': 229.3, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 65.3, - 'templow': 37.3, - 'wind_bearing': 149.91, - 'wind_speed': 38.3, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-11T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 66.2, - 'templow': 48.3, - 'wind_bearing': 210.45, - 'wind_speed': 56.48, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-12T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 67.9, - 'templow': 53.8, - 'wind_bearing': 217.98, - 'wind_speed': 44.28, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-13T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 54.5, - 'templow': 42.9, - 'wind_bearing': 58.79, - 'wind_speed': 34.99, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-14T10:00:00+00:00', - 'precipitation': 0.94, - 'precipitation_probability': 95, - 'temperature': 42.9, - 'templow': 33.4, - 'wind_bearing': 70.25, - 'wind_speed': 58.5, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-15T10:00:00+00:00', - 'precipitation': 0.06, - 'precipitation_probability': 55, - 'temperature': 43.7, - 'templow': 29.4, - 'wind_bearing': 84.47, - 'wind_speed': 57.2, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-16T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 43.0, - 'templow': 29.1, - 'wind_bearing': 103.85, - 'wind_speed': 24.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-17T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 52.4, - 'templow': 34.3, - 'wind_bearing': 145.41, - 'wind_speed': 26.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-18T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 54.1, - 'templow': 41.3, - 'wind_bearing': 62.99, - 'wind_speed': 23.69, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-19T10:00:00+00:00', - 'precipitation': 0.12, - 'precipitation_probability': 55, - 'temperature': 48.9, - 'templow': 39.4, - 'wind_bearing': 68.54, - 'wind_speed': 50.08, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-20T10:00:00+00:00', - 'precipitation': 0.05, - 'precipitation_probability': 33, - 'temperature': 40.1, - 'templow': 35.1, - 'wind_bearing': 56.98, - 'wind_speed': 62.46, - }), - ]), - }), - }) -# --- -# name: test_v4_forecast_service[forecast].1 - dict({ - 'weather.tomorrow_io_daily': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T17:48:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.1, - 'wind_bearing': 315.14, - 'wind_speed': 33.59, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T18:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.8, - 'wind_bearing': 321.71, - 'wind_speed': 31.82, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T19:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.8, - 'wind_bearing': 323.38, - 'wind_speed': 32.04, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T20:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.3, - 'wind_bearing': 318.43, - 'wind_speed': 33.73, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T21:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.6, - 'wind_bearing': 320.9, - 'wind_speed': 28.98, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T22:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 41.9, - 'wind_bearing': 322.11, - 'wind_speed': 15.7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T23:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 38.9, - 'wind_bearing': 295.94, - 'wind_speed': 17.78, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T00:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 36.2, - 'wind_bearing': 11.94, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T01:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 34.3, - 'wind_bearing': 13.68, - 'wind_speed': 20.05, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T02:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 32.9, - 'wind_bearing': 14.93, - 'wind_speed': 19.48, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T03:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.9, - 'wind_bearing': 26.07, - 'wind_speed': 16.6, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T04:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 51.27, - 'wind_speed': 9.32, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T05:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.4, - 'wind_bearing': 343.25, - 'wind_speed': 11.92, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T06:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.7, - 'wind_bearing': 341.46, - 'wind_speed': 15.37, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T07:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.4, - 'wind_bearing': 322.34, - 'wind_speed': 12.71, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T08:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.1, - 'wind_bearing': 294.69, - 'wind_speed': 13.14, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T09:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 30.1, - 'wind_bearing': 325.32, - 'wind_speed': 11.52, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T10:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.0, - 'wind_bearing': 322.27, - 'wind_speed': 10.22, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T11:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.2, - 'wind_bearing': 310.14, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T12:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 324.8, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T13:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 33.2, - 'wind_bearing': 335.16, - 'wind_speed': 23.26, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T14:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 37.0, - 'wind_bearing': 324.49, - 'wind_speed': 21.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T15:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 40.0, - 'wind_bearing': 310.68, - 'wind_speed': 19.98, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T16:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 42.4, - 'wind_bearing': 304.18, - 'wind_speed': 19.66, - }), - ]), - }), - }) -# --- -# name: test_v4_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T11:00:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.9, - 'templow': 26.1, - 'wind_bearing': 239.6, - 'wind_speed': 34.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 49.4, - 'templow': 26.3, - 'wind_bearing': 262.82, - 'wind_speed': 26.06, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-09T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 67.0, - 'templow': 31.5, - 'wind_bearing': 229.3, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 65.3, - 'templow': 37.3, - 'wind_bearing': 149.91, - 'wind_speed': 38.3, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-11T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 66.2, - 'templow': 48.3, - 'wind_bearing': 210.45, - 'wind_speed': 56.48, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-12T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 67.9, - 'templow': 53.8, - 'wind_bearing': 217.98, - 'wind_speed': 44.28, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-13T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 54.5, - 'templow': 42.9, - 'wind_bearing': 58.79, - 'wind_speed': 34.99, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-14T10:00:00+00:00', - 'precipitation': 0.94, - 'precipitation_probability': 95, - 'temperature': 42.9, - 'templow': 33.4, - 'wind_bearing': 70.25, - 'wind_speed': 58.5, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-15T10:00:00+00:00', - 'precipitation': 0.06, - 'precipitation_probability': 55, - 'temperature': 43.7, - 'templow': 29.4, - 'wind_bearing': 84.47, - 'wind_speed': 57.2, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-16T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 43.0, - 'templow': 29.1, - 'wind_bearing': 103.85, - 'wind_speed': 24.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-17T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 52.4, - 'templow': 34.3, - 'wind_bearing': 145.41, - 'wind_speed': 26.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-18T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 54.1, - 'templow': 41.3, - 'wind_bearing': 62.99, - 'wind_speed': 23.69, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-19T10:00:00+00:00', - 'precipitation': 0.12, - 'precipitation_probability': 55, - 'temperature': 48.9, - 'templow': 39.4, - 'wind_bearing': 68.54, - 'wind_speed': 50.08, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-20T10:00:00+00:00', - 'precipitation': 0.05, - 'precipitation_probability': 33, - 'temperature': 40.1, - 'templow': 35.1, - 'wind_bearing': 56.98, - 'wind_speed': 62.46, - }), - ]), - }) -# --- -# name: test_v4_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T17:48:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.1, - 'wind_bearing': 315.14, - 'wind_speed': 33.59, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T18:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.8, - 'wind_bearing': 321.71, - 'wind_speed': 31.82, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T19:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.8, - 'wind_bearing': 323.38, - 'wind_speed': 32.04, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T20:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.3, - 'wind_bearing': 318.43, - 'wind_speed': 33.73, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T21:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.6, - 'wind_bearing': 320.9, - 'wind_speed': 28.98, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T22:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 41.9, - 'wind_bearing': 322.11, - 'wind_speed': 15.7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T23:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 38.9, - 'wind_bearing': 295.94, - 'wind_speed': 17.78, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T00:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 36.2, - 'wind_bearing': 11.94, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T01:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 34.3, - 'wind_bearing': 13.68, - 'wind_speed': 20.05, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T02:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 32.9, - 'wind_bearing': 14.93, - 'wind_speed': 19.48, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T03:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.9, - 'wind_bearing': 26.07, - 'wind_speed': 16.6, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T04:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 51.27, - 'wind_speed': 9.32, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T05:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.4, - 'wind_bearing': 343.25, - 'wind_speed': 11.92, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T06:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.7, - 'wind_bearing': 341.46, - 'wind_speed': 15.37, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T07:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.4, - 'wind_bearing': 322.34, - 'wind_speed': 12.71, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T08:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.1, - 'wind_bearing': 294.69, - 'wind_speed': 13.14, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T09:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 30.1, - 'wind_bearing': 325.32, - 'wind_speed': 11.52, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T10:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.0, - 'wind_bearing': 322.27, - 'wind_speed': 10.22, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T11:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.2, - 'wind_bearing': 310.14, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T12:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 324.8, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T13:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 33.2, - 'wind_bearing': 335.16, - 'wind_speed': 23.26, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T14:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 37.0, - 'wind_bearing': 324.49, - 'wind_speed': 21.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T15:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 40.0, - 'wind_bearing': 310.68, - 'wind_speed': 19.98, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T16:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 42.4, - 'wind_bearing': 304.18, - 'wind_speed': 19.66, - }), - ]), - }) -# --- # name: test_v4_forecast_service[get_forecasts] dict({ 'weather.tomorrow_io_daily': dict({ diff --git a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr index 54089c6f192..81cfecbc530 100644 --- a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr +++ b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr @@ -847,6 +847,101 @@ 'state': 'off', }) # --- +# name: test_entity_registry[binary_sensor.test_carbon_monoxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_carbon_monoxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon monoxide', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_carbon_monoxide', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_carbon_monoxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_monoxide', + 'friendly_name': 'test Carbon monoxide', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_carbon_monoxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.test_police_emergency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_police_emergency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Police emergency', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'police', + 'unique_id': '123456_police', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_police_emergency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test Police emergency', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_police_emergency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_entity_registry[binary_sensor.test_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -895,6 +990,54 @@ 'state': 'off', }) # --- +# name: test_entity_registry[binary_sensor.test_smoke-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_smoke', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Smoke', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_smoke', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_smoke-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'test Smoke', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_smoke', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_entity_registry[binary_sensor.test_tamper-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index d1454d12e68..c63ca9139f1 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -1,33 +1,45 @@ """Tests for the TP-Link component.""" +from collections import namedtuple +from datetime import datetime +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from kasa import ( - ConnectionType, + Device, DeviceConfig, - DeviceFamilyType, - EncryptType, - SmartBulb, - SmartDevice, - SmartDimmer, - SmartLightStrip, - SmartPlug, - SmartStrip, + DeviceConnectionParameters, + DeviceEncryptionType, + DeviceFamily, + DeviceType, + Feature, + KasaException, + Module, ) -from kasa.exceptions import SmartDeviceException +from kasa.interfaces import Fan, Light, LightEffect, LightState from kasa.protocol import BaseProtocol +from syrupy import SnapshotAssertion from homeassistant.components.tplink import ( CONF_ALIAS, + CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, CONF_HOST, CONF_MODEL, Credentials, ) from homeassistant.components.tplink.const import DOMAIN +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.translation import async_get_translations +from homeassistant.helpers.typing import UNDEFINED +from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_value_fixture + +ColorTempRange = namedtuple("ColorTempRange", ["min", "max"]) # noqa: PYI024 MODULE = "homeassistant.components.tplink" MODULE_CONFIG_FLOW = "homeassistant.components.tplink.config_flow" @@ -36,38 +48,35 @@ IP_ADDRESS2 = "127.0.0.2" ALIAS = "My Bulb" MODEL = "HS100" MAC_ADDRESS = "aa:bb:cc:dd:ee:ff" +DEVICE_ID = "123456789ABCDEFGH" +DEVICE_ID_MAC = "AA:BB:CC:DD:EE:FF" DHCP_FORMATTED_MAC_ADDRESS = MAC_ADDRESS.replace(":", "") MAC_ADDRESS2 = "11:22:33:44:55:66" DEFAULT_ENTRY_TITLE = f"{ALIAS} {MODEL}" CREDENTIALS_HASH_LEGACY = "" DEVICE_CONFIG_LEGACY = DeviceConfig(IP_ADDRESS) -DEVICE_CONFIG_DICT_LEGACY = DEVICE_CONFIG_LEGACY.to_dict( - credentials_hash=CREDENTIALS_HASH_LEGACY, exclude_credentials=True -) +DEVICE_CONFIG_DICT_LEGACY = DEVICE_CONFIG_LEGACY.to_dict(exclude_credentials=True) CREDENTIALS = Credentials("foo", "bar") -CREDENTIALS_HASH_AUTH = "abcdefghijklmnopqrstuv==" -DEVICE_CONFIG_AUTH = DeviceConfig( +CREDENTIALS_HASH_AES = "AES/abcdefghijklmnopqrstuvabcdefghijklmnopqrstuv==" +CREDENTIALS_HASH_KLAP = "KLAP/abcdefghijklmnopqrstuv==" +DEVICE_CONFIG_KLAP = DeviceConfig( IP_ADDRESS, credentials=CREDENTIALS, - connection_type=ConnectionType( - DeviceFamilyType.IotSmartPlugSwitch, EncryptType.Klap + connection_type=DeviceConnectionParameters( + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap ), uses_http=True, ) -DEVICE_CONFIG_AUTH2 = DeviceConfig( +DEVICE_CONFIG_AES = DeviceConfig( IP_ADDRESS2, credentials=CREDENTIALS, - connection_type=ConnectionType( - DeviceFamilyType.IotSmartPlugSwitch, EncryptType.Klap + connection_type=DeviceConnectionParameters( + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes ), uses_http=True, ) -DEVICE_CONFIG_DICT_AUTH = DEVICE_CONFIG_AUTH.to_dict( - credentials_hash=CREDENTIALS_HASH_AUTH, exclude_credentials=True -) -DEVICE_CONFIG_DICT_AUTH2 = DEVICE_CONFIG_AUTH2.to_dict( - credentials_hash=CREDENTIALS_HASH_AUTH, exclude_credentials=True -) +DEVICE_CONFIG_DICT_KLAP = DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True) +DEVICE_CONFIG_DICT_AES = DEVICE_CONFIG_AES.to_dict(exclude_credentials=True) CREATE_ENTRY_DATA_LEGACY = { CONF_HOST: IP_ADDRESS, @@ -76,204 +85,378 @@ CREATE_ENTRY_DATA_LEGACY = { CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, } -CREATE_ENTRY_DATA_AUTH = { +CREATE_ENTRY_DATA_KLAP = { CONF_HOST: IP_ADDRESS, CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_KLAP, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, } -CREATE_ENTRY_DATA_AUTH2 = { +CREATE_ENTRY_DATA_AES = { CONF_HOST: IP_ADDRESS2, CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH2, + CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AES, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AES, } +CONNECTION_TYPE_KLAP = DeviceConnectionParameters( + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap +) +CONNECTION_TYPE_KLAP_DICT = CONNECTION_TYPE_KLAP.to_dict() +CONNECTION_TYPE_AES = DeviceConnectionParameters( + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes +) +CONNECTION_TYPE_AES_DICT = CONNECTION_TYPE_AES.to_dict() + + +def _load_feature_fixtures(): + fixtures = load_json_value_fixture("features.json", DOMAIN) + for fixture in fixtures.values(): + if isinstance(fixture["value"], str): + try: + time = datetime.strptime(fixture["value"], "%Y-%m-%d %H:%M:%S.%f%z") + fixture["value"] = time + except ValueError: + pass + return fixtures + + +FEATURES_FIXTURE = _load_feature_fixtures() + + +async def setup_platform_for_device( + hass: HomeAssistant, config_entry: ConfigEntry, platform: Platform, device: Device +): + """Set up a single tplink platform with a device.""" + config_entry.add_to_hass(hass) + + with ( + patch("homeassistant.components.tplink.PLATFORMS", [platform]), + _patch_discovery(device=device), + _patch_connect(device=device), + ): + await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + # Good practice to wait background tasks in tests see PR #112726 + await hass.async_block_till_done(wait_background_tasks=True) + + +async def snapshot_platform( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, + config_entry_id: str, +) -> None: + """Snapshot a platform.""" + device_entries = dr.async_entries_for_config_entry(device_registry, config_entry_id) + assert device_entries + for device_entry in device_entries: + assert device_entry == snapshot( + name=f"{device_entry.name}-entry" + ), f"device entry snapshot failed for {device_entry.name}" + + entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id) + assert entity_entries + assert ( + len({entity_entry.domain for entity_entry in entity_entries}) == 1 + ), "Please limit the loaded platforms to 1 platform." + + translations = await async_get_translations(hass, "en", "entity", [DOMAIN]) + for entity_entry in entity_entries: + if entity_entry.translation_key: + key = f"component.{DOMAIN}.entity.{entity_entry.domain}.{entity_entry.translation_key}.name" + assert ( + key in translations + ), f"No translation for entity {entity_entry.unique_id}, expected {key}" + assert entity_entry == snapshot( + name=f"{entity_entry.entity_id}-entry" + ), f"entity entry snapshot failed for {entity_entry.entity_id}" + if entity_entry.disabled_by is None: + state = hass.states.get(entity_entry.entity_id) + assert state, f"State not found for {entity_entry.entity_id}" + assert state == snapshot( + name=f"{entity_entry.entity_id}-state" + ), f"state snapshot failed for {entity_entry.entity_id}" def _mock_protocol() -> BaseProtocol: - protocol = MagicMock(auto_spec=BaseProtocol) + protocol = MagicMock(spec=BaseProtocol) protocol.close = AsyncMock() return protocol -def _mocked_bulb( +def _mocked_device( device_config=DEVICE_CONFIG_LEGACY, credentials_hash=CREDENTIALS_HASH_LEGACY, mac=MAC_ADDRESS, + device_id=DEVICE_ID, alias=ALIAS, -) -> SmartBulb: - bulb = MagicMock(auto_spec=SmartBulb, name="Mocked bulb") - bulb.update = AsyncMock() - bulb.mac = mac - bulb.alias = alias - bulb.model = MODEL - bulb.host = IP_ADDRESS - bulb.brightness = 50 - bulb.color_temp = 4000 - bulb.is_color = True - bulb.is_strip = False - bulb.is_plug = False - bulb.is_dimmer = False - bulb.is_light_strip = False - bulb.has_effects = False - bulb.effect = None - bulb.effect_list = None - bulb.hsv = (10, 30, 5) - bulb.device_id = mac - bulb.valid_temperature_range.min = 4000 - bulb.valid_temperature_range.max = 9000 - bulb.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} - bulb.turn_off = AsyncMock() - bulb.turn_on = AsyncMock() - bulb.set_brightness = AsyncMock() - bulb.set_hsv = AsyncMock() - bulb.set_color_temp = AsyncMock() - bulb.protocol = _mock_protocol() - bulb.config = device_config - bulb.credentials_hash = credentials_hash - return bulb + model=MODEL, + ip_address: str | None = None, + modules: list[str] | None = None, + children: list[Device] | None = None, + features: list[str | Feature] | None = None, + device_type=None, + spec: type = Device, +) -> Device: + device = MagicMock(spec=spec, name="Mocked device") + device.update = AsyncMock() + device.turn_off = AsyncMock() + device.turn_on = AsyncMock() + + device.mac = mac + device.alias = alias + device.model = model + device.device_id = device_id + device.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} + device.modules = {} + device.features = {} + + if not ip_address: + ip_address = IP_ADDRESS + else: + device_config.host = ip_address + device.host = ip_address + + if modules: + device.modules = { + module_name: MODULE_TO_MOCK_GEN[module_name](device) + for module_name in modules + } + + if features: + device.features = { + feature_id: _mocked_feature(feature_id, require_fixture=True) + for feature_id in features + if isinstance(feature_id, str) + } + + device.features.update( + { + feature.id: feature + for feature in features + if isinstance(feature, Feature) + } + ) + device.children = [] + if children: + for child in children: + child.mac = mac + device.children = children + device.device_type = device_type if device_type else DeviceType.Unknown + if ( + not device_type + and device.children + and all( + child.device_type is DeviceType.StripSocket for child in device.children + ) + ): + device.device_type = DeviceType.Strip + + device.protocol = _mock_protocol() + device.config = device_config + device.credentials_hash = credentials_hash + return device -class MockedSmartLightStrip(SmartLightStrip): - """Mock a SmartLightStrip.""" +def _mocked_feature( + id: str, + *, + require_fixture=False, + value: Any = UNDEFINED, + name=None, + type_=None, + category=None, + precision_hint=None, + choices=None, + unit=None, + minimum_value=0, + maximum_value=2**16, # Arbitrary max +) -> Feature: + """Get a mocked feature. - def __new__(cls, *args, **kwargs): - """Mock a SmartLightStrip that will pass an isinstance check.""" - return MagicMock(spec=cls) + If kwargs are provided they will override the attributes for any features defined in fixtures.json + """ + feature = MagicMock(spec=Feature, name=f"Mocked {id} feature") + feature.id = id + feature.name = name or id.upper() + feature.set_value = AsyncMock() + if not (fixture := FEATURES_FIXTURE.get(id)): + assert ( + require_fixture is False + ), f"No fixture defined for feature {id} and require_fixture is True" + assert ( + value is not UNDEFINED + ), f"Value must be provided if feature {id} not defined in features.json" + fixture = {"value": value, "category": "Primary", "type": "Sensor"} + elif value is not UNDEFINED: + fixture["value"] = value + feature.value = fixture["value"] + + feature.type = type_ or Feature.Type[fixture["type"]] + feature.category = category or Feature.Category[fixture["category"]] + + # sensor + feature.precision_hint = precision_hint or fixture.get("precision_hint") + feature.unit = unit or fixture.get("unit") + + # number + feature.minimum_value = minimum_value or fixture.get("minimum_value") + feature.maximum_value = maximum_value or fixture.get("maximum_value") + + # select + feature.choices = choices or fixture.get("choices") + return feature -def _mocked_smart_light_strip() -> SmartLightStrip: - strip = MockedSmartLightStrip() - strip.update = AsyncMock() - strip.mac = MAC_ADDRESS - strip.alias = ALIAS - strip.model = MODEL - strip.host = IP_ADDRESS - strip.brightness = 50 - strip.color_temp = 4000 - strip.is_color = True - strip.is_strip = False - strip.is_plug = False - strip.is_dimmer = False - strip.is_light_strip = True - strip.has_effects = True - strip.effect = {"name": "Effect1", "enable": 1} - strip.effect_list = ["Effect1", "Effect2"] - strip.hsv = (10, 30, 5) - strip.device_id = MAC_ADDRESS - strip.valid_temperature_range.min = 4000 - strip.valid_temperature_range.max = 9000 - strip.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} - strip.turn_off = AsyncMock() - strip.turn_on = AsyncMock() - strip.set_brightness = AsyncMock() - strip.set_hsv = AsyncMock() - strip.set_color_temp = AsyncMock() - strip.set_effect = AsyncMock() - strip.set_custom_effect = AsyncMock() - strip.protocol = _mock_protocol() - strip.config = DEVICE_CONFIG_LEGACY - strip.credentials_hash = CREDENTIALS_HASH_LEGACY - return strip +def _mocked_light_module(device) -> Light: + light = MagicMock(spec=Light, name="Mocked light module") + light.update = AsyncMock() + light.brightness = 50 + light.color_temp = 4000 + light.state = LightState( + light_on=True, brightness=light.brightness, color_temp=light.color_temp + ) + light.is_color = True + light.is_variable_color_temp = True + light.is_dimmable = True + light.is_brightness = True + light.has_effects = False + light.hsv = (10, 30, 5) + light.valid_temperature_range = ColorTempRange(min=4000, max=9000) + light.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} + + async def _set_state(state, *_, **__): + light.state = state + + light.set_state = AsyncMock(wraps=_set_state) + + async def _set_brightness(brightness, *_, **__): + light.state.brightness = brightness + light.state.light_on = brightness > 0 + + light.set_brightness = AsyncMock(wraps=_set_brightness) + + async def _set_hsv(h, s, v, *_, **__): + light.state.hue = h + light.state.saturation = s + light.state.brightness = v + light.state.light_on = True + + light.set_hsv = AsyncMock(wraps=_set_hsv) + + async def _set_color_temp(temp, *_, **__): + light.state.color_temp = temp + light.state.light_on = True + + light.set_color_temp = AsyncMock(wraps=_set_color_temp) + light.protocol = _mock_protocol() + return light -def _mocked_dimmer() -> SmartDimmer: - dimmer = MagicMock(auto_spec=SmartDimmer, name="Mocked dimmer") - dimmer.update = AsyncMock() - dimmer.mac = MAC_ADDRESS - dimmer.alias = "My Dimmer" - dimmer.model = MODEL - dimmer.host = IP_ADDRESS - dimmer.brightness = 50 - dimmer.color_temp = 4000 - dimmer.is_color = True - dimmer.is_strip = False - dimmer.is_plug = False - dimmer.is_dimmer = True - dimmer.is_light_strip = False - dimmer.effect = None - dimmer.effect_list = None - dimmer.hsv = (10, 30, 5) - dimmer.device_id = MAC_ADDRESS - dimmer.valid_temperature_range.min = 4000 - dimmer.valid_temperature_range.max = 9000 - dimmer.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} - dimmer.turn_off = AsyncMock() - dimmer.turn_on = AsyncMock() - dimmer.set_brightness = AsyncMock() - dimmer.set_hsv = AsyncMock() - dimmer.set_color_temp = AsyncMock() - dimmer.set_led = AsyncMock() - dimmer.protocol = _mock_protocol() - dimmer.config = DEVICE_CONFIG_LEGACY - dimmer.credentials_hash = CREDENTIALS_HASH_LEGACY - return dimmer +def _mocked_light_effect_module(device) -> LightEffect: + effect = MagicMock(spec=LightEffect, name="Mocked light effect") + effect.has_effects = True + effect.has_custom_effects = True + effect.effect = "Effect1" + effect.effect_list = ["Off", "Effect1", "Effect2"] + + async def _set_effect(effect_name, *_, **__): + assert ( + effect_name in effect.effect_list + ), f"set_effect '{effect_name}' not in {effect.effect_list}" + assert device.modules[ + Module.Light + ], "Need a light module to test set_effect method" + device.modules[Module.Light].state.light_on = True + effect.effect = effect_name + + effect.set_effect = AsyncMock(wraps=_set_effect) + effect.set_custom_effect = AsyncMock() + return effect -def _mocked_plug() -> SmartPlug: - plug = MagicMock(auto_spec=SmartPlug, name="Mocked plug") - plug.update = AsyncMock() - plug.mac = MAC_ADDRESS - plug.alias = "My Plug" - plug.model = MODEL - plug.host = IP_ADDRESS - plug.is_light_strip = False - plug.is_bulb = False - plug.is_dimmer = False - plug.is_strip = False - plug.is_plug = True - plug.device_id = MAC_ADDRESS - plug.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} - plug.turn_off = AsyncMock() - plug.turn_on = AsyncMock() - plug.set_led = AsyncMock() - plug.protocol = _mock_protocol() - plug.config = DEVICE_CONFIG_LEGACY - plug.credentials_hash = CREDENTIALS_HASH_LEGACY - return plug +def _mocked_fan_module(effect) -> Fan: + fan = MagicMock(auto_spec=Fan, name="Mocked fan") + fan.fan_speed_level = 0 + fan.set_fan_speed_level = AsyncMock() + return fan -def _mocked_strip() -> SmartStrip: - strip = MagicMock(auto_spec=SmartStrip, name="Mocked strip") - strip.update = AsyncMock() - strip.mac = MAC_ADDRESS - strip.alias = "My Strip" - strip.model = MODEL - strip.host = IP_ADDRESS - strip.is_light_strip = False - strip.is_bulb = False - strip.is_dimmer = False - strip.is_strip = True - strip.is_plug = True - strip.device_id = MAC_ADDRESS - strip.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} - strip.turn_off = AsyncMock() - strip.turn_on = AsyncMock() - strip.set_led = AsyncMock() - strip.protocol = _mock_protocol() - strip.config = DEVICE_CONFIG_LEGACY - strip.credentials_hash = CREDENTIALS_HASH_LEGACY - plug0 = _mocked_plug() - plug0.alias = "Plug0" - plug0.device_id = "bb:bb:cc:dd:ee:ff_PLUG0DEVICEID" - plug0.mac = "bb:bb:cc:dd:ee:ff" +def _mocked_strip_children(features=None, alias=None) -> list[Device]: + plug0 = _mocked_device( + alias="Plug0" if alias is None else alias, + device_id="bb:bb:cc:dd:ee:ff_PLUG0DEVICEID", + mac="bb:bb:cc:dd:ee:ff", + device_type=DeviceType.StripSocket, + features=features, + ) + plug1 = _mocked_device( + alias="Plug1" if alias is None else alias, + device_id="cc:bb:cc:dd:ee:ff_PLUG1DEVICEID", + mac="cc:bb:cc:dd:ee:ff", + device_type=DeviceType.StripSocket, + features=features, + ) plug0.is_on = True - plug0.protocol = _mock_protocol() - plug1 = _mocked_plug() - plug1.device_id = "cc:bb:cc:dd:ee:ff_PLUG1DEVICEID" - plug1.mac = "cc:bb:cc:dd:ee:ff" - plug1.alias = "Plug1" - plug1.protocol = _mock_protocol() plug1.is_on = False - strip.children = [plug0, plug1] - return strip + return [plug0, plug1] + + +def _mocked_energy_features( + power=None, total=None, voltage=None, current=None, today=None +) -> list[Feature]: + feats = [] + if power is not None: + feats.append( + _mocked_feature( + "current_consumption", + value=power, + ) + ) + if total is not None: + feats.append( + _mocked_feature( + "consumption_total", + value=total, + ) + ) + if voltage is not None: + feats.append( + _mocked_feature( + "voltage", + value=voltage, + ) + ) + if current is not None: + feats.append( + _mocked_feature( + "current", + value=current, + ) + ) + # Today is always reported as 0 by the library rather than none + feats.append( + _mocked_feature( + "consumption_today", + value=today if today is not None else 0.0, + ) + ) + return feats + + +MODULE_TO_MOCK_GEN = { + Module.Light: _mocked_light_module, + Module.LightEffect: _mocked_light_effect_module, + Module.Fan: _mocked_fan_module, +} def _patch_discovery(device=None, no_device=False): async def _discovery(*args, **kwargs): if no_device: return {} - return {IP_ADDRESS: _mocked_bulb()} + return {IP_ADDRESS: _mocked_device()} return patch("homeassistant.components.tplink.Discover.discover", new=_discovery) @@ -281,8 +464,8 @@ def _patch_discovery(device=None, no_device=False): def _patch_single_discovery(device=None, no_device=False): async def _discover_single(*args, **kwargs): if no_device: - raise SmartDeviceException - return device if device else _mocked_bulb() + raise KasaException + return device if device else _mocked_device() return patch( "homeassistant.components.tplink.Discover.discover_single", new=_discover_single @@ -292,14 +475,14 @@ def _patch_single_discovery(device=None, no_device=False): def _patch_connect(device=None, no_device=False): async def _connect(*args, **kwargs): if no_device: - raise SmartDeviceException - return device if device else _mocked_bulb() + raise KasaException + return device if device else _mocked_device() - return patch("homeassistant.components.tplink.SmartDevice.connect", new=_connect) + return patch("homeassistant.components.tplink.Device.connect", new=_connect) async def initialize_config_entry_for_device( - hass: HomeAssistant, dev: SmartDevice + hass: HomeAssistant, dev: Device ) -> MockConfigEntry: """Create a mocked configuration entry for the given device. diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index 88da9b699a7..ee4530575ce 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -1,26 +1,28 @@ """tplink conftest.""" +from collections.abc import Generator import copy from unittest.mock import DEFAULT, AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.tplink import DOMAIN from homeassistant.core import HomeAssistant from . import ( CREATE_ENTRY_DATA_LEGACY, - CREDENTIALS_HASH_AUTH, - DEVICE_CONFIG_AUTH, + CREDENTIALS_HASH_AES, + CREDENTIALS_HASH_KLAP, + DEVICE_CONFIG_AES, + DEVICE_CONFIG_KLAP, IP_ADDRESS, IP_ADDRESS2, MAC_ADDRESS, MAC_ADDRESS2, - _mocked_bulb, + _mocked_device, ) -from tests.common import MockConfigEntry, mock_device_registry, mock_registry +from tests.common import MockConfigEntry @pytest.fixture @@ -31,15 +33,15 @@ def mock_discovery(): discover=DEFAULT, discover_single=DEFAULT, ) as mock_discovery: - device = _mocked_bulb( - device_config=copy.deepcopy(DEVICE_CONFIG_AUTH), - credentials_hash=CREDENTIALS_HASH_AUTH, + device = _mocked_device( + device_config=copy.deepcopy(DEVICE_CONFIG_KLAP), + credentials_hash=CREDENTIALS_HASH_KLAP, alias=None, ) devices = { - "127.0.0.1": _mocked_bulb( - device_config=copy.deepcopy(DEVICE_CONFIG_AUTH), - credentials_hash=CREDENTIALS_HASH_AUTH, + "127.0.0.1": _mocked_device( + device_config=copy.deepcopy(DEVICE_CONFIG_KLAP), + credentials_hash=CREDENTIALS_HASH_KLAP, alias=None, ) } @@ -52,15 +54,18 @@ def mock_discovery(): @pytest.fixture def mock_connect(): """Mock python-kasa connect.""" - with patch("homeassistant.components.tplink.SmartDevice.connect") as mock_connect: + with patch("homeassistant.components.tplink.Device.connect") as mock_connect: devices = { - IP_ADDRESS: _mocked_bulb( - device_config=DEVICE_CONFIG_AUTH, credentials_hash=CREDENTIALS_HASH_AUTH + IP_ADDRESS: _mocked_device( + device_config=DEVICE_CONFIG_KLAP, + credentials_hash=CREDENTIALS_HASH_KLAP, + ip_address=IP_ADDRESS, ), - IP_ADDRESS2: _mocked_bulb( - device_config=DEVICE_CONFIG_AUTH, - credentials_hash=CREDENTIALS_HASH_AUTH, + IP_ADDRESS2: _mocked_device( + device_config=DEVICE_CONFIG_AES, + credentials_hash=CREDENTIALS_HASH_AES, mac=MAC_ADDRESS2, + ip_address=IP_ADDRESS2, ), } @@ -72,18 +77,6 @@ def mock_connect(): yield {"connect": mock_connect, "mock_devices": devices} -@pytest.fixture(name="device_reg") -def device_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture(name="entity_reg") -def entity_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_registry(hass) - - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json new file mode 100644 index 00000000000..7cfe979ea25 --- /dev/null +++ b/tests/components/tplink/fixtures/features.json @@ -0,0 +1,287 @@ +{ + "state": { + "value": true, + "type": "Switch", + "category": "Primary" + }, + "led": { + "value": true, + "type": "Switch", + "category": "Config" + }, + "auto_update_enabled": { + "value": true, + "type": "Switch", + "category": "Config" + }, + "auto_off_enabled": { + "value": true, + "type": "Switch", + "category": "Config" + }, + "smooth_transitions": { + "value": true, + "type": "Switch", + "category": "Config" + }, + "frost_protection_enabled": { + "value": true, + "type": "Switch", + "category": "Config" + }, + "fan_sleep_mode": { + "value": false, + "type": "Switch", + "category": "Config" + }, + "current_consumption": { + "value": 5.23, + "type": "Sensor", + "category": "Primary", + "unit": "W", + "precision_hint": 1 + }, + "consumption_today": { + "value": 5.23, + "type": "Sensor", + "category": "Info", + "unit": "kWh", + "precision_hint": 3 + }, + "consumption_this_month": { + "value": 15.345, + "type": "Sensor", + "category": "Info", + "unit": "kWh", + "precision_hint": 3 + }, + "consumption_total": { + "value": 30.0049, + "type": "Sensor", + "category": "Info", + "unit": "kWh", + "precision_hint": 3 + }, + "current": { + "value": 5.035, + "type": "Sensor", + "category": "Primary", + "unit": "A", + "precision_hint": 2 + }, + "voltage": { + "value": 121.1, + "type": "Sensor", + "category": "Primary", + "unit": "V", + "precision_hint": 1 + }, + "device_id": { + "value": "94hd2dn298812je12u0931828", + "type": "Sensor", + "category": "Debug" + }, + "signal_level": { + "value": 2, + "type": "Sensor", + "category": "Info" + }, + "rssi": { + "value": -62, + "type": "Sensor", + "category": "Debug" + }, + "ssid": { + "value": "HOMEWIFI", + "type": "Sensor", + "category": "Debug" + }, + "on_since": { + "value": "2024-06-24 10:03:11.046643+01:00", + "type": "Sensor", + "category": "Debug" + }, + "battery_level": { + "value": 85, + "type": "Sensor", + "category": "Info", + "unit": "%" + }, + "auto_off_at": { + "value": "2024-06-24 10:03:11.046643+01:00", + "type": "Sensor", + "category": "Info" + }, + "humidity": { + "value": 12, + "type": "Sensor", + "category": "Primary", + "unit": "%" + }, + "report_interval": { + "value": 16, + "type": "Sensor", + "category": "Debug", + "unit": "%" + }, + "alarm_source": { + "value": "", + "type": "Sensor", + "category": "Debug" + }, + "device_time": { + "value": "2024-06-24 10:03:11.046643+01:00", + "type": "Sensor", + "category": "Debug" + }, + "temperature": { + "value": 19.2, + "type": "Sensor", + "category": "Debug", + "unit": "celsius" + }, + "current_firmware_version": { + "value": "1.1.2", + "type": "Sensor", + "category": "Debug" + }, + "available_firmware_version": { + "value": "1.1.3", + "type": "Sensor", + "category": "Debug" + }, + "thermostat_mode": { + "value": "off", + "type": "Sensor", + "category": "Primary" + }, + "overheated": { + "value": false, + "type": "BinarySensor", + "category": "Info" + }, + "battery_low": { + "value": false, + "type": "BinarySensor", + "category": "Debug" + }, + "update_available": { + "value": false, + "type": "BinarySensor", + "category": "Info" + }, + "cloud_connection": { + "value": false, + "type": "BinarySensor", + "category": "Info" + }, + "temperature_warning": { + "value": false, + "type": "BinarySensor", + "category": "Debug" + }, + "humidity_warning": { + "value": false, + "type": "BinarySensor", + "category": "Debug" + }, + "water_alert": { + "value": false, + "type": "BinarySensor", + "category": "Primary" + }, + "is_open": { + "value": false, + "type": "BinarySensor", + "category": "Primary" + }, + "test_alarm": { + "value": "", + "type": "Action", + "category": "Config" + }, + "stop_alarm": { + "value": "", + "type": "Action", + "category": "Config" + }, + "smooth_transition_on": { + "value": false, + "type": "Number", + "category": "Config", + "minimum_value": 0, + "maximum_value": 60 + }, + "smooth_transition_off": { + "value": false, + "type": "Number", + "category": "Config", + "minimum_value": 0, + "maximum_value": 60 + }, + "auto_off_minutes": { + "value": false, + "type": "Number", + "category": "Config", + "unit": "min", + "minimum_value": 0, + "maximum_value": 60 + }, + "temperature_offset": { + "value": false, + "type": "Number", + "category": "Config", + "minimum_value": -10, + "maximum_value": 10 + }, + "target_temperature": { + "value": false, + "type": "Number", + "category": "Primary" + }, + "fan_speed_level": { + "value": 2, + "type": "Number", + "category": "Primary", + "minimum_value": 0, + "maximum_value": 4 + }, + "light_preset": { + "value": "Off", + "type": "Choice", + "category": "Config", + "choices": ["Off", "Preset 1", "Preset 2"] + }, + "alarm_sound": { + "value": "Phone Ring", + "type": "Choice", + "category": "Config", + "choices": [ + "Doorbell Ring 1", + "Doorbell Ring 2", + "Doorbell Ring 3", + "Doorbell Ring 4", + "Doorbell Ring 5", + "Doorbell Ring 6", + "Doorbell Ring 7", + "Doorbell Ring 8", + "Doorbell Ring 9", + "Doorbell Ring 10", + "Phone Ring", + "Alarm 1", + "Alarm 2", + "Alarm 3", + "Alarm 4", + "Dripping Tap", + "Alarm 5", + "Connection 1", + "Connection 2" + ] + }, + "alarm_volume": { + "value": "normal", + "type": "Choice", + "category": "Config", + "choices": ["low", "normal", "high"] + } +} diff --git a/tests/components/tplink/snapshots/test_binary_sensor.ambr b/tests/components/tplink/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..cded74da363 --- /dev/null +++ b/tests/components/tplink/snapshots/test_binary_sensor.ambr @@ -0,0 +1,371 @@ +# serializer version: 1 +# name: test_states[binary_sensor.my_device_battery_low-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_device_battery_low', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery low', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_low', + 'unique_id': '123456789ABCDEFGH_battery_low', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_cloud_connection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_device_cloud_connection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloud connection', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cloud_connection', + 'unique_id': '123456789ABCDEFGH_cloud_connection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_cloud_connection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'my_device Cloud connection', + }), + 'context': , + 'entity_id': 'binary_sensor.my_device_cloud_connection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[binary_sensor.my_device_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.my_device_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Door', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_open', + 'unique_id': '123456789ABCDEFGH_is_open', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'my_device Door', + }), + 'context': , + 'entity_id': 'binary_sensor.my_device_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[binary_sensor.my_device_humidity_warning-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_device_humidity_warning', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Humidity warning', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'humidity_warning', + 'unique_id': '123456789ABCDEFGH_humidity_warning', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_moisture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.my_device_moisture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_alert', + 'unique_id': '123456789ABCDEFGH_water_alert', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_moisture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'my_device Moisture', + }), + 'context': , + 'entity_id': 'binary_sensor.my_device_moisture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[binary_sensor.my_device_overheated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_device_overheated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overheated', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overheated', + 'unique_id': '123456789ABCDEFGH_overheated', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_overheated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'my_device Overheated', + }), + 'context': , + 'entity_id': 'binary_sensor.my_device_overheated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[binary_sensor.my_device_temperature_warning-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_device_temperature_warning', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature warning', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_warning', + 'unique_id': '123456789ABCDEFGH_temperature_warning', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_update-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_device_update', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Update', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'update_available', + 'unique_id': '123456789ABCDEFGH_update_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[binary_sensor.my_device_update-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'update', + 'friendly_name': 'my_device Update', + }), + 'context': , + 'entity_id': 'binary_sensor.my_device_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[my_device-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'my_device', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/tplink/snapshots/test_button.ambr b/tests/components/tplink/snapshots/test_button.ambr new file mode 100644 index 00000000000..d6019861804 --- /dev/null +++ b/tests/components/tplink/snapshots/test_button.ambr @@ -0,0 +1,129 @@ +# serializer version: 1 +# name: test_states[button.my_device_stop_alarm-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_stop_alarm', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop alarm', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop_alarm', + 'unique_id': '123456789ABCDEFGH_stop_alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_stop_alarm-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Stop alarm', + }), + 'context': , + 'entity_id': 'button.my_device_stop_alarm', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[button.my_device_test_alarm-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_test_alarm', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test alarm', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'test_alarm', + 'unique_id': '123456789ABCDEFGH_test_alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_test_alarm-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Test alarm', + }), + 'context': , + 'entity_id': 'button.my_device_test_alarm', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[my_device-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'my_device', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/tplink/snapshots/test_climate.ambr b/tests/components/tplink/snapshots/test_climate.ambr new file mode 100644 index 00000000000..ad863fc79ae --- /dev/null +++ b/tests/components/tplink/snapshots/test_climate.ambr @@ -0,0 +1,96 @@ +# serializer version: 1 +# name: test_states[climate.thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 65536, + 'min_temp': None, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.thermostat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABCDEFGH_climate', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20, + 'friendly_name': 'thermostat', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 65536, + 'min_temp': None, + 'supported_features': , + 'temperature': 22, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_states[thermostat-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'thermostat', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/tplink/snapshots/test_fan.ambr b/tests/components/tplink/snapshots/test_fan.ambr new file mode 100644 index 00000000000..1a7392dc63a --- /dev/null +++ b/tests/components/tplink/snapshots/test_fan.ambr @@ -0,0 +1,196 @@ +# serializer version: 1 +# name: test_states[fan.my_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': None, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.my_device', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABCDEFGH', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[fan.my_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device', + 'percentage': None, + 'percentage_step': 25.0, + 'preset_mode': None, + 'preset_modes': None, + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.my_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[fan.my_device_my_fan_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': None, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.my_device_my_fan_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'my_fan_0', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABCDEFGH00', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[fan.my_device_my_fan_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device my_fan_0', + 'percentage': None, + 'percentage_step': 25.0, + 'preset_mode': None, + 'preset_modes': None, + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.my_device_my_fan_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[fan.my_device_my_fan_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': None, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.my_device_my_fan_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'my_fan_1', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABCDEFGH01', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[fan.my_device_my_fan_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device my_fan_1', + 'percentage': None, + 'percentage_step': 25.0, + 'preset_mode': None, + 'preset_modes': None, + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.my_device_my_fan_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[my_device-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'my_device', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/tplink/snapshots/test_number.ambr b/tests/components/tplink/snapshots/test_number.ambr new file mode 100644 index 00000000000..ee06314ffe3 --- /dev/null +++ b/tests/components/tplink/snapshots/test_number.ambr @@ -0,0 +1,257 @@ +# serializer version: 1 +# name: test_states[my_device-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'my_device', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- +# name: test_states[number.my_device_smooth_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.my_device_smooth_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smooth off', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smooth_transition_off', + 'unique_id': '123456789ABCDEFGH_smooth_transition_off', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.my_device_smooth_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Smooth off', + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.my_device_smooth_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'False', + }) +# --- +# name: test_states[number.my_device_smooth_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.my_device_smooth_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smooth on', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smooth_transition_on', + 'unique_id': '123456789ABCDEFGH_smooth_transition_on', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.my_device_smooth_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Smooth on', + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.my_device_smooth_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'False', + }) +# --- +# name: test_states[number.my_device_temperature_offset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65536, + 'min': -10, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.my_device_temperature_offset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature offset', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_offset', + 'unique_id': '123456789ABCDEFGH_temperature_offset', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.my_device_temperature_offset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Temperature offset', + 'max': 65536, + 'min': -10, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.my_device_temperature_offset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'False', + }) +# --- +# name: test_states[number.my_device_turn_off_in-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.my_device_turn_off_in', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Turn off in', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_off_minutes', + 'unique_id': '123456789ABCDEFGH_auto_off_minutes', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.my_device_turn_off_in-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Turn off in', + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.my_device_turn_off_in', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'False', + }) +# --- diff --git a/tests/components/tplink/snapshots/test_select.ambr b/tests/components/tplink/snapshots/test_select.ambr new file mode 100644 index 00000000000..c851979f34c --- /dev/null +++ b/tests/components/tplink/snapshots/test_select.ambr @@ -0,0 +1,240 @@ +# serializer version: 1 +# name: test_states[my_device-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'my_device', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- +# name: test_states[select.my_device_alarm_sound-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Doorbell Ring 1', + 'Doorbell Ring 2', + 'Doorbell Ring 3', + 'Doorbell Ring 4', + 'Doorbell Ring 5', + 'Doorbell Ring 6', + 'Doorbell Ring 7', + 'Doorbell Ring 8', + 'Doorbell Ring 9', + 'Doorbell Ring 10', + 'Phone Ring', + 'Alarm 1', + 'Alarm 2', + 'Alarm 3', + 'Alarm 4', + 'Dripping Tap', + 'Alarm 5', + 'Connection 1', + 'Connection 2', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.my_device_alarm_sound', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Alarm sound', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alarm_sound', + 'unique_id': '123456789ABCDEFGH_alarm_sound', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[select.my_device_alarm_sound-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Alarm sound', + 'options': list([ + 'Doorbell Ring 1', + 'Doorbell Ring 2', + 'Doorbell Ring 3', + 'Doorbell Ring 4', + 'Doorbell Ring 5', + 'Doorbell Ring 6', + 'Doorbell Ring 7', + 'Doorbell Ring 8', + 'Doorbell Ring 9', + 'Doorbell Ring 10', + 'Phone Ring', + 'Alarm 1', + 'Alarm 2', + 'Alarm 3', + 'Alarm 4', + 'Dripping Tap', + 'Alarm 5', + 'Connection 1', + 'Connection 2', + ]), + }), + 'context': , + 'entity_id': 'select.my_device_alarm_sound', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Phone Ring', + }) +# --- +# name: test_states[select.my_device_alarm_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'low', + 'normal', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.my_device_alarm_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Alarm volume', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alarm_volume', + 'unique_id': '123456789ABCDEFGH_alarm_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[select.my_device_alarm_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Alarm volume', + 'options': list([ + 'low', + 'normal', + 'high', + ]), + }), + 'context': , + 'entity_id': 'select.my_device_alarm_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_states[select.my_device_light_preset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Off', + 'Preset 1', + 'Preset 2', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.my_device_light_preset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light preset', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light_preset', + 'unique_id': '123456789ABCDEFGH_light_preset', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[select.my_device_light_preset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Light preset', + 'options': list([ + 'Off', + 'Preset 1', + 'Preset 2', + ]), + }), + 'context': , + 'entity_id': 'select.my_device_light_preset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Off', + }) +# --- diff --git a/tests/components/tplink/snapshots/test_sensor.ambr b/tests/components/tplink/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..e639540e552 --- /dev/null +++ b/tests/components/tplink/snapshots/test_sensor.ambr @@ -0,0 +1,792 @@ +# serializer version: 1 +# name: test_states[my_device-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'my_device', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- +# name: test_states[sensor.my_device_alarm_source-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_alarm_source', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Alarm source', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alarm_source', + 'unique_id': '123456789ABCDEFGH_alarm_source', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_auto_off_at-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_auto_off_at', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Auto off at', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_off_at', + 'unique_id': '123456789ABCDEFGH_auto_off_at', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_auto_off_at-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my_device Auto off at', + }), + 'context': , + 'entity_id': 'sensor.my_device_auto_off_at', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-06-24T09:03:11+00:00', + }) +# --- +# name: test_states[sensor.my_device_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery level', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_level', + 'unique_id': '123456789ABCDEFGH_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_states[sensor.my_device_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'my_device Battery level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_device_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '85', + }) +# --- +# name: test_states[sensor.my_device_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_device_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current', + 'unique_id': '123456789ABCDEFGH_current_a', + 'unit_of_measurement': 'A', + }) +# --- +# name: test_states[sensor.my_device_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'my_device Current', + 'state_class': , + 'unit_of_measurement': 'A', + }), + 'context': , + 'entity_id': 'sensor.my_device_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.04', + }) +# --- +# name: test_states[sensor.my_device_current_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_device_current_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current consumption', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_consumption', + 'unique_id': '123456789ABCDEFGH_current_power_w', + 'unit_of_measurement': 'W', + }) +# --- +# name: test_states[sensor.my_device_current_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'my_device Current consumption', + 'state_class': , + 'unit_of_measurement': 'W', + }), + 'context': , + 'entity_id': 'sensor.my_device_current_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.2', + }) +# --- +# name: test_states[sensor.my_device_device_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_device_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device time', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_time', + 'unique_id': '123456789ABCDEFGH_device_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_device_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'humidity', + 'unique_id': '123456789ABCDEFGH_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_states[sensor.my_device_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'my_device Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_device_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_states[sensor.my_device_on_since-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_on_since', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'On since', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'on_since', + 'unique_id': '123456789ABCDEFGH_on_since', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_report_interval-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_report_interval', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Report interval', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'report_interval', + 'unique_id': '123456789ABCDEFGH_report_interval', + 'unit_of_measurement': '%', + }) +# --- +# name: test_states[sensor.my_device_signal_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_signal_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Signal level', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'signal_level', + 'unique_id': '123456789ABCDEFGH_signal_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_signal_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Signal level', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_device_signal_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_states[sensor.my_device_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Signal strength', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rssi', + 'unique_id': '123456789ABCDEFGH_rssi', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_ssid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SSID', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ssid', + 'unique_id': '123456789ABCDEFGH_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature', + 'unique_id': '123456789ABCDEFGH_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_states[sensor.my_device_this_month_s_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_this_month_s_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': "This month's consumption", + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_this_month', + 'unique_id': '123456789ABCDEFGH_consumption_this_month', + 'unit_of_measurement': 'kWh', + }) +# --- +# name: test_states[sensor.my_device_this_month_s_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': "my_device This month's consumption", + 'state_class': , + 'unit_of_measurement': 'kWh', + }), + 'context': , + 'entity_id': 'sensor.my_device_this_month_s_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15.345', + }) +# --- +# name: test_states[sensor.my_device_today_s_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_today_s_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': "Today's consumption", + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_today', + 'unique_id': '123456789ABCDEFGH_today_energy_kwh', + 'unit_of_measurement': 'kWh', + }) +# --- +# name: test_states[sensor.my_device_today_s_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': "my_device Today's consumption", + 'state_class': , + 'unit_of_measurement': 'kWh', + }), + 'context': , + 'entity_id': 'sensor.my_device_today_s_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.23', + }) +# --- +# name: test_states[sensor.my_device_total_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_total_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total consumption', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_total', + 'unique_id': '123456789ABCDEFGH_total_energy_kwh', + 'unit_of_measurement': 'kWh', + }) +# --- +# name: test_states[sensor.my_device_total_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'my_device Total consumption', + 'state_class': , + 'unit_of_measurement': 'kWh', + }), + 'context': , + 'entity_id': 'sensor.my_device_total_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.005', + }) +# --- +# name: test_states[sensor.my_device_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_device_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage', + 'unique_id': '123456789ABCDEFGH_voltage', + 'unit_of_measurement': 'V', + }) +# --- +# name: test_states[sensor.my_device_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'my_device Voltage', + 'state_class': , + 'unit_of_measurement': 'V', + }), + 'context': , + 'entity_id': 'sensor.my_device_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '121.1', + }) +# --- diff --git a/tests/components/tplink/snapshots/test_switch.ambr b/tests/components/tplink/snapshots/test_switch.ambr new file mode 100644 index 00000000000..4354ea1905a --- /dev/null +++ b/tests/components/tplink/snapshots/test_switch.ambr @@ -0,0 +1,313 @@ +# serializer version: 1 +# name: test_states[my_device-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'my_device', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- +# name: test_states[switch.my_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.my_device', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABCDEFGH', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device', + }), + 'context': , + 'entity_id': 'switch.my_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_states[switch.my_device_auto_off_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_auto_off_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Auto off enabled', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_off_enabled', + 'unique_id': '123456789ABCDEFGH_auto_off_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_auto_off_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Auto off enabled', + }), + 'context': , + 'entity_id': 'switch.my_device_auto_off_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_states[switch.my_device_auto_update_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_auto_update_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Auto update enabled', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_update_enabled', + 'unique_id': '123456789ABCDEFGH_auto_update_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_auto_update_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Auto update enabled', + }), + 'context': , + 'entity_id': 'switch.my_device_auto_update_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_states[switch.my_device_fan_sleep_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_fan_sleep_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fan sleep mode', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_sleep_mode', + 'unique_id': '123456789ABCDEFGH_fan_sleep_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_fan_sleep_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Fan sleep mode', + }), + 'context': , + 'entity_id': 'switch.my_device_fan_sleep_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_states[switch.my_device_led-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_led', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led', + 'unique_id': '123456789ABCDEFGH_led', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_led-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device LED', + }), + 'context': , + 'entity_id': 'switch.my_device_led', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_states[switch.my_device_smooth_transitions-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_smooth_transitions', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smooth transitions', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smooth_transitions', + 'unique_id': '123456789ABCDEFGH_smooth_transitions', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_smooth_transitions-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Smooth transitions', + }), + 'context': , + 'entity_id': 'switch.my_device_smooth_transitions', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/tplink/test_binary_sensor.py b/tests/components/tplink/test_binary_sensor.py new file mode 100644 index 00000000000..e2b9cd08d13 --- /dev/null +++ b/tests/components/tplink/test_binary_sensor.py @@ -0,0 +1,124 @@ +"""Tests for tplink binary_sensor platform.""" + +from kasa import Feature +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components import tplink +from homeassistant.components.tplink.binary_sensor import BINARY_SENSOR_DESCRIPTIONS +from homeassistant.components.tplink.const import DOMAIN +from homeassistant.components.tplink.entity import EXCLUDED_FEATURES +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from . import ( + DEVICE_ID, + MAC_ADDRESS, + _mocked_device, + _mocked_feature, + _mocked_strip_children, + _patch_connect, + _patch_discovery, + setup_platform_for_device, + snapshot_platform, +) + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mocked_feature_binary_sensor() -> Feature: + """Return mocked tplink binary sensor feature.""" + return _mocked_feature( + "overheated", + value=False, + name="Overheated", + type_=Feature.Type.BinarySensor, + category=Feature.Category.Primary, + ) + + +async def test_states( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a sensor unique ids.""" + features = {description.key for description in BINARY_SENSOR_DESCRIPTIONS} + features.update(EXCLUDED_FEATURES) + device = _mocked_device(alias="my_device", features=features) + + await setup_platform_for_device( + hass, mock_config_entry, Platform.BINARY_SENSOR, device + ) + await snapshot_platform( + hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id + ) + + for excluded in EXCLUDED_FEATURES: + assert hass.states.get(f"sensor.my_device_{excluded}") is None + + +async def test_binary_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mocked_feature_binary_sensor: Feature, +) -> None: + """Test a sensor unique ids.""" + mocked_feature = mocked_feature_binary_sensor + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + + plug = _mocked_device(alias="my_plug", features=[mocked_feature]) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + # The entity_id is based on standard name from core. + entity_id = "binary_sensor.my_plug_overheated" + entity = entity_registry.async_get(entity_id) + assert entity + assert entity.unique_id == f"{DEVICE_ID}_{mocked_feature.id}" + + +async def test_binary_sensor_children( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mocked_feature_binary_sensor: Feature, +) -> None: + """Test a sensor unique ids.""" + mocked_feature = mocked_feature_binary_sensor + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + plug = _mocked_device( + alias="my_plug", + features=[mocked_feature], + children=_mocked_strip_children(features=[mocked_feature]), + ) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "binary_sensor.my_plug_overheated" + entity = entity_registry.async_get(entity_id) + assert entity + device = device_registry.async_get(entity.device_id) + + for plug_id in range(2): + child_entity_id = f"binary_sensor.my_plug_plug{plug_id}_overheated" + child_entity = entity_registry.async_get(child_entity_id) + assert child_entity + assert child_entity.unique_id == f"PLUG{plug_id}DEVICEID_{mocked_feature.id}" + assert child_entity.device_id != entity.device_id + child_device = device_registry.async_get(child_entity.device_id) + assert child_device + assert child_device.via_device_id == device.id diff --git a/tests/components/tplink/test_button.py b/tests/components/tplink/test_button.py new file mode 100644 index 00000000000..143a882a6cb --- /dev/null +++ b/tests/components/tplink/test_button.py @@ -0,0 +1,153 @@ +"""Tests for tplink button platform.""" + +from kasa import Feature +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components import tplink +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.tplink.button import BUTTON_DESCRIPTIONS +from homeassistant.components.tplink.const import DOMAIN +from homeassistant.components.tplink.entity import EXCLUDED_FEATURES +from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from . import ( + DEVICE_ID, + MAC_ADDRESS, + _mocked_device, + _mocked_feature, + _mocked_strip_children, + _patch_connect, + _patch_discovery, + setup_platform_for_device, + snapshot_platform, +) + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mocked_feature_button() -> Feature: + """Return mocked tplink binary sensor feature.""" + return _mocked_feature( + "test_alarm", + value="", + name="Test alarm", + type_=Feature.Type.Action, + category=Feature.Category.Primary, + ) + + +async def test_states( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a sensor unique ids.""" + features = {description.key for description in BUTTON_DESCRIPTIONS} + features.update(EXCLUDED_FEATURES) + device = _mocked_device(alias="my_device", features=features) + + await setup_platform_for_device(hass, mock_config_entry, Platform.BUTTON, device) + await snapshot_platform( + hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id + ) + + for excluded in EXCLUDED_FEATURES: + assert hass.states.get(f"sensor.my_device_{excluded}") is None + + +async def test_button( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mocked_feature_button: Feature, +) -> None: + """Test a sensor unique ids.""" + mocked_feature = mocked_feature_button + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + + plug = _mocked_device(alias="my_plug", features=[mocked_feature]) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + # The entity_id is based on standard name from core. + entity_id = "button.my_plug_test_alarm" + entity = entity_registry.async_get(entity_id) + assert entity + assert entity.unique_id == f"{DEVICE_ID}_{mocked_feature.id}" + + +async def test_button_children( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mocked_feature_button: Feature, +) -> None: + """Test a sensor unique ids.""" + mocked_feature = mocked_feature_button + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + plug = _mocked_device( + alias="my_plug", + features=[mocked_feature], + children=_mocked_strip_children(features=[mocked_feature]), + ) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "button.my_plug_test_alarm" + entity = entity_registry.async_get(entity_id) + assert entity + device = device_registry.async_get(entity.device_id) + + for plug_id in range(2): + child_entity_id = f"button.my_plug_plug{plug_id}_test_alarm" + child_entity = entity_registry.async_get(child_entity_id) + assert child_entity + assert child_entity.unique_id == f"PLUG{plug_id}DEVICEID_{mocked_feature.id}" + assert child_entity.device_id != entity.device_id + child_device = device_registry.async_get(child_entity.device_id) + assert child_device + assert child_device.via_device_id == device.id + + +async def test_button_press( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mocked_feature_button: Feature, +) -> None: + """Test a number entity limits and setting values.""" + mocked_feature = mocked_feature_button + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + plug = _mocked_device(alias="my_plug", features=[mocked_feature]) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "button.my_plug_test_alarm" + entity = entity_registry.async_get(entity_id) + assert entity + assert entity.unique_id == f"{DEVICE_ID}_test_alarm" + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mocked_feature.set_value.assert_called_with(True) diff --git a/tests/components/tplink/test_climate.py b/tests/components/tplink/test_climate.py new file mode 100644 index 00000000000..2f24fa829f9 --- /dev/null +++ b/tests/components/tplink/test_climate.py @@ -0,0 +1,227 @@ +"""Tests for tplink climate platform.""" + +from datetime import timedelta + +from kasa import Device, Feature +from kasa.smart.modules.temperaturecontrol import ThermostatState +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_CURRENT_TEMPERATURE, + ATTR_HVAC_ACTION, + ATTR_HVAC_MODE, + ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + HVACAction, + HVACMode, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util import dt as dt_util + +from . import ( + DEVICE_ID, + _mocked_device, + _mocked_feature, + setup_platform_for_device, + snapshot_platform, +) + +from tests.common import MockConfigEntry, async_fire_time_changed + +ENTITY_ID = "climate.thermostat" + + +@pytest.fixture +async def mocked_hub(hass: HomeAssistant) -> Device: + """Return mocked tplink binary sensor feature.""" + + features = [ + _mocked_feature( + "temperature", value=20, category=Feature.Category.Primary, unit="celsius" + ), + _mocked_feature( + "target_temperature", + value=22, + type_=Feature.Type.Number, + category=Feature.Category.Primary, + unit="celsius", + ), + _mocked_feature( + "state", + value=True, + type_=Feature.Type.Switch, + category=Feature.Category.Primary, + ), + _mocked_feature( + "thermostat_mode", + value=ThermostatState.Heating, + type_=Feature.Type.Choice, + category=Feature.Category.Primary, + ), + ] + + thermostat = _mocked_device( + alias="thermostat", features=features, device_type=Device.Type.Thermostat + ) + + return _mocked_device( + alias="hub", children=[thermostat], device_type=Device.Type.Hub + ) + + +async def test_climate( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mocked_hub: Device, +) -> None: + """Test initialization.""" + await setup_platform_for_device( + hass, mock_config_entry, Platform.CLIMATE, mocked_hub + ) + + entity = entity_registry.async_get(ENTITY_ID) + assert entity + assert entity.unique_id == f"{DEVICE_ID}_climate" + + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_HVAC_ACTION] is HVACAction.HEATING + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20 + assert state.attributes[ATTR_TEMPERATURE] == 22 + + +async def test_states( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, + mocked_hub: Device, +) -> None: + """Snapshot test.""" + await setup_platform_for_device( + hass, mock_config_entry, Platform.CLIMATE, mocked_hub + ) + + await snapshot_platform( + hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id + ) + + +async def test_set_temperature( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mocked_hub: Device +) -> None: + """Test that set_temperature service calls the setter.""" + mocked_thermostat = mocked_hub.children[0] + mocked_thermostat.features["target_temperature"].minimum_value = 0 + + await setup_platform_for_device( + hass, mock_config_entry, Platform.CLIMATE, mocked_hub + ) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 10}, + blocking=True, + ) + target_temp_feature = mocked_thermostat.features["target_temperature"] + target_temp_feature.set_value.assert_called_with(10) + + +async def test_set_hvac_mode( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mocked_hub: Device +) -> None: + """Test that set_hvac_mode service works.""" + await setup_platform_for_device( + hass, mock_config_entry, Platform.CLIMATE, mocked_hub + ) + + mocked_thermostat = mocked_hub.children[0] + mocked_state = mocked_thermostat.features["state"] + assert mocked_state is not None + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + + mocked_state.set_value.assert_called_with(False) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: [ENTITY_ID], ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + mocked_state.set_value.assert_called_with(True) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: [ENTITY_ID], ATTR_HVAC_MODE: HVACMode.DRY}, + blocking=True, + ) + + +async def test_turn_on_and_off( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mocked_hub: Device +) -> None: + """Test that turn_on and turn_off services work as expected.""" + await setup_platform_for_device( + hass, mock_config_entry, Platform.CLIMATE, mocked_hub + ) + + mocked_thermostat = mocked_hub.children[0] + mocked_state = mocked_thermostat.features["state"] + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: [ENTITY_ID]}, + blocking=True, + ) + + mocked_state.set_value.assert_called_with(False) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [ENTITY_ID]}, + blocking=True, + ) + + mocked_state.set_value.assert_called_with(True) + + +async def test_unknown_mode( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mocked_hub: Device, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that unknown device modes log a warning and default to off.""" + await setup_platform_for_device( + hass, mock_config_entry, Platform.CLIMATE, mocked_hub + ) + + mocked_thermostat = mocked_hub.children[0] + mocked_state = mocked_thermostat.features["thermostat_mode"] + mocked_state.value = ThermostatState.Unknown + + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.OFF + assert "Unknown thermostat state, defaulting to OFF" in caplog.text diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index 7bf3b8cce5e..ddd67f249e6 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -1,21 +1,27 @@ """Test the tplink config flow.""" +import logging from unittest.mock import AsyncMock, patch -from kasa import TimeoutException +from kasa import TimeoutError import pytest from homeassistant import config_entries from homeassistant.components import dhcp from homeassistant.components.tplink import ( DOMAIN, - AuthenticationException, + AuthenticationError, Credentials, + Device, DeviceConfig, - SmartDeviceException, + KasaException, ) -from homeassistant.components.tplink.const import CONF_DEVICE_CONFIG -from homeassistant.config_entries import ConfigEntryState +from homeassistant.components.tplink.const import ( + CONF_CONNECTION_TYPE, + CONF_CREDENTIALS_HASH, + CONF_DEVICE_CONFIG, +) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( CONF_ALIAS, CONF_DEVICE, @@ -29,18 +35,22 @@ from homeassistant.data_entry_flow import FlowResultType from . import ( ALIAS, - CREATE_ENTRY_DATA_AUTH, - CREATE_ENTRY_DATA_AUTH2, + CONNECTION_TYPE_KLAP_DICT, + CREATE_ENTRY_DATA_AES, + CREATE_ENTRY_DATA_KLAP, CREATE_ENTRY_DATA_LEGACY, + CREDENTIALS_HASH_AES, + CREDENTIALS_HASH_KLAP, DEFAULT_ENTRY_TITLE, - DEVICE_CONFIG_DICT_AUTH, + DEVICE_CONFIG_DICT_AES, + DEVICE_CONFIG_DICT_KLAP, DEVICE_CONFIG_DICT_LEGACY, DHCP_FORMATTED_MAC_ADDRESS, IP_ADDRESS, MAC_ADDRESS, MAC_ADDRESS2, MODULE, - _mocked_bulb, + _mocked_device, _patch_connect, _patch_discovery, _patch_single_discovery, @@ -120,7 +130,7 @@ async def test_discovery_auth( ) -> None: """Test authenticated discovery.""" - mock_discovery["mock_device"].update.side_effect = AuthenticationException + mock_discovery["mock_device"].update.side_effect = AuthenticationError result = await hass.config_entries.flow.async_init( DOMAIN, @@ -129,7 +139,7 @@ async def test_discovery_auth( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() @@ -148,15 +158,15 @@ async def test_discovery_auth( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == DEFAULT_ENTRY_TITLE - assert result2["data"] == CREATE_ENTRY_DATA_AUTH + assert result2["data"] == CREATE_ENTRY_DATA_KLAP assert result2["context"]["unique_id"] == MAC_ADDRESS @pytest.mark.parametrize( ("error_type", "errors_msg", "error_placement"), [ - (AuthenticationException("auth_error_details"), "invalid_auth", CONF_PASSWORD), - (SmartDeviceException("smart_device_error_details"), "cannot_connect", "base"), + (AuthenticationError("auth_error_details"), "invalid_auth", CONF_PASSWORD), + (KasaException("smart_device_error_details"), "cannot_connect", "base"), ], ids=["invalid-auth", "unknown-error"], ) @@ -170,7 +180,7 @@ async def test_discovery_auth_errors( error_placement, ) -> None: """Test handling of discovery authentication errors.""" - mock_discovery["mock_device"].update.side_effect = AuthenticationException + mock_discovery["mock_device"].update.side_effect = AuthenticationError default_connect_side_effect = mock_connect["connect"].side_effect mock_connect["connect"].side_effect = error_type @@ -181,7 +191,7 @@ async def test_discovery_auth_errors( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() @@ -212,7 +222,7 @@ async def test_discovery_auth_errors( }, ) assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == CREATE_ENTRY_DATA_AUTH + assert result3["data"] == CREATE_ENTRY_DATA_KLAP assert result3["context"]["unique_id"] == MAC_ADDRESS @@ -223,7 +233,7 @@ async def test_discovery_new_credentials( mock_init, ) -> None: """Test setting up discovery with new credentials.""" - mock_discovery["mock_device"].update.side_effect = AuthenticationException + mock_discovery["mock_device"].update.side_effect = AuthenticationError result = await hass.config_entries.flow.async_init( DOMAIN, @@ -232,7 +242,7 @@ async def test_discovery_new_credentials( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() @@ -261,7 +271,7 @@ async def test_discovery_new_credentials( {}, ) assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == CREATE_ENTRY_DATA_AUTH + assert result3["data"] == CREATE_ENTRY_DATA_KLAP assert result3["context"]["unique_id"] == MAC_ADDRESS @@ -272,10 +282,10 @@ async def test_discovery_new_credentials_invalid( mock_init, ) -> None: """Test setting up discovery with new invalid credentials.""" - mock_discovery["mock_device"].update.side_effect = AuthenticationException + mock_discovery["mock_device"].update.side_effect = AuthenticationError default_connect_side_effect = mock_connect["connect"].side_effect - mock_connect["connect"].side_effect = AuthenticationException + mock_connect["connect"].side_effect = AuthenticationError result = await hass.config_entries.flow.async_init( DOMAIN, @@ -284,7 +294,7 @@ async def test_discovery_new_credentials_invalid( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() @@ -317,7 +327,7 @@ async def test_discovery_new_credentials_invalid( }, ) assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == CREATE_ENTRY_DATA_AUTH + assert result3["data"] == CREATE_ENTRY_DATA_KLAP assert result3["context"]["unique_id"] == MAC_ADDRESS @@ -514,7 +524,7 @@ async def test_manual_auth( assert result["step_id"] == "user" assert not result["errors"] - mock_discovery["mock_device"].update.side_effect = AuthenticationException + mock_discovery["mock_device"].update.side_effect = AuthenticationError result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_HOST: IP_ADDRESS} @@ -537,15 +547,15 @@ async def test_manual_auth( await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == DEFAULT_ENTRY_TITLE - assert result3["data"] == CREATE_ENTRY_DATA_AUTH + assert result3["data"] == CREATE_ENTRY_DATA_KLAP assert result3["context"]["unique_id"] == MAC_ADDRESS @pytest.mark.parametrize( ("error_type", "errors_msg", "error_placement"), [ - (AuthenticationException("auth_error_details"), "invalid_auth", CONF_PASSWORD), - (SmartDeviceException("smart_device_error_details"), "cannot_connect", "base"), + (AuthenticationError("auth_error_details"), "invalid_auth", CONF_PASSWORD), + (KasaException("smart_device_error_details"), "cannot_connect", "base"), ], ids=["invalid-auth", "unknown-error"], ) @@ -566,7 +576,7 @@ async def test_manual_auth_errors( assert result["step_id"] == "user" assert not result["errors"] - mock_discovery["mock_device"].update.side_effect = AuthenticationException + mock_discovery["mock_device"].update.side_effect = AuthenticationError default_connect_side_effect = mock_connect["connect"].side_effect mock_connect["connect"].side_effect = error_type @@ -601,7 +611,7 @@ async def test_manual_auth_errors( }, ) assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["data"] == CREATE_ENTRY_DATA_AUTH + assert result4["data"] == CREATE_ENTRY_DATA_KLAP assert result4["context"]["unique_id"] == MAC_ADDRESS await hass.async_block_till_done() @@ -765,7 +775,7 @@ async def test_integration_discovery_with_ip_change( mock_connect: AsyncMock, ) -> None: """Test reauth flow.""" - mock_connect["connect"].side_effect = SmartDeviceException() + mock_connect["connect"].side_effect = KasaException() mock_config_entry.add_to_hass(hass) with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -785,19 +795,19 @@ async def test_integration_discovery_with_ip_change( CONF_HOST: "127.0.0.2", CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" - config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_AUTH) + config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_KLAP) mock_connect["connect"].reset_mock(side_effect=True) - bulb = _mocked_bulb( + bulb = _mocked_device( device_config=config, mac=mock_config_entry.unique_id, ) @@ -811,6 +821,79 @@ async def test_integration_discovery_with_ip_change( mock_connect["connect"].assert_awaited_once_with(config=config) +async def test_integration_discovery_with_connection_change( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test that config entry is updated with new device config. + + And that connection_hash is removed as it will be invalid. + """ + mock_connect["connect"].side_effect = KasaException() + + mock_config_entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data=CREATE_ENTRY_DATA_AES, + unique_id=MAC_ADDRESS2, + ) + mock_config_entry.add_to_hass(hass) + with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + assert ( + len( + hass.config_entries.flow.async_progress_by_handler( + DOMAIN, match_context={"source": SOURCE_REAUTH} + ) + ) + == 0 + ) + assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES + assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.2" + assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES + + NEW_DEVICE_CONFIG = { + **DEVICE_CONFIG_DICT_KLAP, + CONF_CONNECTION_TYPE: CONNECTION_TYPE_KLAP_DICT, + CONF_HOST: "127.0.0.2", + } + config = DeviceConfig.from_dict(NEW_DEVICE_CONFIG) + # Reset the connect mock so when the config flow reloads the entry it succeeds + mock_connect["connect"].reset_mock(side_effect=True) + bulb = _mocked_device( + device_config=config, + mac=mock_config_entry.unique_id, + ) + mock_connect["connect"].return_value = bulb + + discovery_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: "127.0.0.2", + CONF_MAC: MAC_ADDRESS2, + CONF_ALIAS: ALIAS, + CONF_DEVICE_CONFIG: NEW_DEVICE_CONFIG, + }, + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert discovery_result["type"] is FlowResultType.ABORT + assert discovery_result["reason"] == "already_configured" + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == NEW_DEVICE_CONFIG + assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert CREDENTIALS_HASH_AES not in mock_config_entry.data + + assert mock_config_entry.state is ConfigEntryState.LOADED + + mock_connect["connect"].assert_awaited_once_with(config=config) + + async def test_dhcp_discovery_with_ip_change( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -818,7 +901,7 @@ async def test_dhcp_discovery_with_ip_change( mock_connect: AsyncMock, ) -> None: """Test dhcp discovery with an IP change.""" - mock_connect["connect"].side_effect = SmartDeviceException() + mock_connect["connect"].side_effect = KasaException() mock_config_entry.add_to_hass(hass) with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -876,6 +959,77 @@ async def test_reauth( await hass.async_block_till_done() +async def test_reauth_update_with_encryption_change( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test reauth flow.""" + orig_side_effect = mock_connect["connect"].side_effect + mock_connect["connect"].side_effect = AuthenticationError() + mock_config_entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data={**CREATE_ENTRY_DATA_AES}, + unique_id=MAC_ADDRESS2, + ) + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES + assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES + + with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + caplog.set_level(logging.DEBUG) + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES + assert CONF_CREDENTIALS_HASH not in mock_config_entry.data + + new_config = DeviceConfig( + "127.0.0.2", + credentials=None, + connection_type=Device.ConnectionParameters( + Device.Family.SmartTapoPlug, Device.EncryptionType.Klap + ), + uses_http=True, + ) + mock_discovery["mock_device"].host = "127.0.0.2" + mock_discovery["mock_device"].config = new_config + mock_discovery["mock_device"].credentials_hash = None + mock_connect["mock_devices"]["127.0.0.2"].config = new_config + mock_connect["mock_devices"]["127.0.0.2"].credentials_hash = CREDENTIALS_HASH_KLAP + + mock_connect["connect"].side_effect = orig_side_effect + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Connection type changed for 127.0.0.2" in caplog.text + credentials = Credentials("fake_username", "fake_password") + mock_discovery["discover_single"].assert_called_once_with( + "127.0.0.2", credentials=credentials + ) + mock_discovery["mock_device"].update.assert_called_once_with() + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert mock_config_entry.state is ConfigEntryState.LOADED + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == { + **DEVICE_CONFIG_DICT_KLAP, + CONF_HOST: "127.0.0.2", + } + assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_KLAP + + async def test_reauth_update_from_discovery( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -883,7 +1037,7 @@ async def test_reauth_update_from_discovery( mock_connect: AsyncMock, ) -> None: """Test reauth flow.""" - mock_connect["connect"].side_effect = AuthenticationException + mock_connect["connect"].side_effect = AuthenticationError mock_config_entry.add_to_hass(hass) with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -904,13 +1058,13 @@ async def test_reauth_update_from_discovery( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP async def test_reauth_update_from_discovery_with_ip_change( @@ -920,7 +1074,7 @@ async def test_reauth_update_from_discovery_with_ip_change( mock_connect: AsyncMock, ) -> None: """Test reauth flow.""" - mock_connect["connect"].side_effect = AuthenticationException() + mock_connect["connect"].side_effect = AuthenticationError() mock_config_entry.add_to_hass(hass) with patch("homeassistant.components.tplink.Discover.discover", return_value={}): await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -940,13 +1094,13 @@ async def test_reauth_update_from_discovery_with_ip_change( CONF_HOST: "127.0.0.2", CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" @@ -957,13 +1111,13 @@ async def test_reauth_no_update_if_config_and_ip_the_same( mock_connect: AsyncMock, ) -> None: """Test reauth discovery does not update when the host and config are the same.""" - mock_connect["connect"].side_effect = AuthenticationException() + mock_connect["connect"].side_effect = AuthenticationError() mock_config_entry.add_to_hass(hass) hass.config_entries.async_update_entry( mock_config_entry, data={ **mock_config_entry.data, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -974,7 +1128,7 @@ async def test_reauth_no_update_if_config_and_ip_the_same( assert len(flows) == 1 [result] = flows assert result["step_id"] == "reauth_confirm" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP discovery_result = await hass.config_entries.flow.async_init( DOMAIN, @@ -983,21 +1137,21 @@ async def test_reauth_no_update_if_config_and_ip_the_same( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS @pytest.mark.parametrize( ("error_type", "errors_msg", "error_placement"), [ - (AuthenticationException("auth_error_details"), "invalid_auth", CONF_PASSWORD), - (SmartDeviceException("smart_device_error_details"), "cannot_connect", "base"), + (AuthenticationError("auth_error_details"), "invalid_auth", CONF_PASSWORD), + (KasaException("smart_device_error_details"), "cannot_connect", "base"), ], ids=["invalid-auth", "unknown-error"], ) @@ -1060,8 +1214,8 @@ async def test_reauth_errors( @pytest.mark.parametrize( ("error_type", "expected_flow"), [ - (AuthenticationException, FlowResultType.FORM), - (SmartDeviceException, FlowResultType.ABORT), + (AuthenticationError, FlowResultType.FORM), + (KasaException, FlowResultType.ABORT), ], ids=["invalid-auth", "unknown-error"], ) @@ -1119,7 +1273,7 @@ async def test_discovery_timeout_connect( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_discovery["discover_single"].side_effect = TimeoutException + mock_discovery["discover_single"].side_effect = TimeoutError await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -1137,19 +1291,24 @@ async def test_discovery_timeout_connect( async def test_reauth_update_other_flows( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, mock_discovery: AsyncMock, mock_connect: AsyncMock, ) -> None: """Test reauth updates other reauth flows.""" + mock_config_entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data={**CREATE_ENTRY_DATA_KLAP}, + unique_id=MAC_ADDRESS, + ) mock_config_entry2 = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AUTH2}, + data={**CREATE_ENTRY_DATA_AES}, unique_id=MAC_ADDRESS2, ) default_side_effect = mock_connect["connect"].side_effect - mock_connect["connect"].side_effect = AuthenticationException() + mock_connect["connect"].side_effect = AuthenticationError() mock_config_entry.add_to_hass(hass) mock_config_entry2.add_to_hass(hass) with patch("homeassistant.components.tplink.Discover.discover", return_value={}): @@ -1167,7 +1326,7 @@ async def test_reauth_update_other_flows( flows_by_entry_id = {flow["context"]["entry_id"]: flow for flow in flows} result = flows_by_entry_id[mock_config_entry.entry_id] assert result["step_id"] == "reauth_confirm" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/tplink/test_diagnostics.py b/tests/components/tplink/test_diagnostics.py index 3543cf95572..7288d631f4a 100644 --- a/tests/components/tplink/test_diagnostics.py +++ b/tests/components/tplink/test_diagnostics.py @@ -2,12 +2,12 @@ import json -from kasa import SmartDevice +from kasa import Device import pytest from homeassistant.core import HomeAssistant -from . import _mocked_bulb, _mocked_plug, initialize_config_entry_for_device +from . import _mocked_device, initialize_config_entry_for_device from tests.common import load_fixture from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -18,13 +18,13 @@ from tests.typing import ClientSessionGenerator ("mocked_dev", "fixture_file", "sysinfo_vars", "expected_oui"), [ ( - _mocked_bulb(), + _mocked_device(), "tplink-diagnostics-data-bulb-kl130.json", ["mic_mac", "deviceId", "oemId", "hwId", "alias"], "AA:BB:CC", ), ( - _mocked_plug(), + _mocked_device(), "tplink-diagnostics-data-plug-hs110.json", ["mac", "deviceId", "oemId", "hwId", "alias", "longitude_i", "latitude_i"], "AA:BB:CC", @@ -34,7 +34,7 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - mocked_dev: SmartDevice, + mocked_dev: Device, fixture_file: str, sysinfo_vars: list[str], expected_oui: str | None, diff --git a/tests/components/tplink/test_fan.py b/tests/components/tplink/test_fan.py new file mode 100644 index 00000000000..deba33abfa5 --- /dev/null +++ b/tests/components/tplink/test_fan.py @@ -0,0 +1,154 @@ +"""Tests for fan platform.""" + +from __future__ import annotations + +from datetime import timedelta + +from kasa import Device, Module +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.fan import ( + ATTR_PERCENTAGE, + DOMAIN as FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +import homeassistant.util.dt as dt_util + +from . import DEVICE_ID, _mocked_device, setup_platform_for_device, snapshot_platform + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_states( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a fan state.""" + child_fan_1 = _mocked_device( + modules=[Module.Fan], alias="my_fan_0", device_id=f"{DEVICE_ID}00" + ) + child_fan_2 = _mocked_device( + modules=[Module.Fan], alias="my_fan_1", device_id=f"{DEVICE_ID}01" + ) + parent_device = _mocked_device( + device_id=DEVICE_ID, + alias="my_device", + children=[child_fan_1, child_fan_2], + modules=[Module.Fan], + device_type=Device.Type.WallSwitch, + ) + + await setup_platform_for_device( + hass, mock_config_entry, Platform.FAN, parent_device + ) + await snapshot_platform( + hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id + ) + + +async def test_fan_unique_id( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test a fan unique id.""" + fan = _mocked_device(modules=[Module.Fan], alias="my_fan") + await setup_platform_for_device(hass, mock_config_entry, Platform.FAN, fan) + + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + assert device_entries + entity_id = "fan.my_fan" + entity_registry = er.async_get(hass) + assert entity_registry.async_get(entity_id).unique_id == DEVICE_ID + + +async def test_fan(hass: HomeAssistant, mock_config_entry: MockConfigEntry) -> None: + """Test a color fan and that all transitions are correctly passed.""" + device = _mocked_device(modules=[Module.Fan], alias="my_fan") + fan = device.modules[Module.Fan] + fan.fan_speed_level = 0 + await setup_platform_for_device(hass, mock_config_entry, Platform.FAN, device) + + entity_id = "fan.my_fan" + + state = hass.states.get(entity_id) + assert state.state == "off" + + await hass.services.async_call( + FAN_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + fan.set_fan_speed_level.assert_called_once_with(4) + fan.set_fan_speed_level.reset_mock() + + fan.fan_speed_level = 4 + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get(entity_id) + assert state.state == "on" + + await hass.services.async_call( + FAN_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + fan.set_fan_speed_level.assert_called_once_with(0) + fan.set_fan_speed_level.reset_mock() + + await hass.services.async_call( + FAN_DOMAIN, + "turn_on", + {ATTR_ENTITY_ID: entity_id, ATTR_PERCENTAGE: 50}, + blocking=True, + ) + fan.set_fan_speed_level.assert_called_once_with(2) + fan.set_fan_speed_level.reset_mock() + + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_SET_PERCENTAGE, + {ATTR_ENTITY_ID: entity_id, ATTR_PERCENTAGE: 25}, + blocking=True, + ) + fan.set_fan_speed_level.assert_called_once_with(1) + fan.set_fan_speed_level.reset_mock() + + +async def test_fan_child( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test child fans are added to parent device with the right ids.""" + child_fan_1 = _mocked_device( + modules=[Module.Fan], alias="my_fan_0", device_id=f"{DEVICE_ID}00" + ) + child_fan_2 = _mocked_device( + modules=[Module.Fan], alias="my_fan_1", device_id=f"{DEVICE_ID}01" + ) + parent_device = _mocked_device( + device_id=DEVICE_ID, + alias="my_device", + children=[child_fan_1, child_fan_2], + modules=[Module.Fan], + device_type=Device.Type.WallSwitch, + ) + await setup_platform_for_device( + hass, mock_config_entry, Platform.FAN, parent_device + ) + + entity_id = "fan.my_device" + entity = entity_registry.async_get(entity_id) + assert entity + + for fan_id in range(2): + child_entity_id = f"fan.my_device_my_fan_{fan_id}" + child_entity = entity_registry.async_get(child_entity_id) + assert child_entity + assert child_entity.unique_id == f"{DEVICE_ID}0{fan_id}" + assert child_entity.device_id == entity.device_id diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index 481a9e0e2b3..986aaebd170 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -4,15 +4,19 @@ from __future__ import annotations import copy from datetime import timedelta -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from freezegun.api import FrozenDateTimeFactory -from kasa.exceptions import AuthenticationException +from kasa import AuthenticationError, DeviceConfig, Feature, KasaException, Module import pytest from homeassistant import setup from homeassistant.components import tplink -from homeassistant.components.tplink.const import CONF_DEVICE_CONFIG, DOMAIN +from homeassistant.components.tplink.const import ( + CONF_CREDENTIALS_HASH, + CONF_DEVICE_CONFIG, + DOMAIN, +) from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( CONF_AUTHENTICATION, @@ -21,19 +25,22 @@ from homeassistant.const import ( CONF_USERNAME, STATE_ON, STATE_UNAVAILABLE, + EntityCategory, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_registry import EntityRegistry +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from . import ( - CREATE_ENTRY_DATA_AUTH, - DEVICE_CONFIG_AUTH, + CREATE_ENTRY_DATA_KLAP, + CREATE_ENTRY_DATA_LEGACY, + DEVICE_CONFIG_KLAP, + DEVICE_ID, + DEVICE_ID_MAC, IP_ADDRESS, MAC_ADDRESS, - _mocked_dimmer, - _mocked_plug, + _mocked_device, _patch_connect, _patch_discovery, _patch_single_discovery, @@ -100,22 +107,22 @@ async def test_config_entry_retry(hass: HomeAssistant) -> None: async def test_dimmer_switch_unique_id_fix_original_entity_still_exists( - hass: HomeAssistant, entity_reg: EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test no migration happens if the original entity id still exists.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=MAC_ADDRESS) config_entry.add_to_hass(hass) - dimmer = _mocked_dimmer() + dimmer = _mocked_device(alias="My dimmer", modules=[Module.Light]) rollout_unique_id = MAC_ADDRESS.replace(":", "").upper() original_unique_id = tplink.legacy_device_id(dimmer) - original_dimmer_entity_reg = entity_reg.async_get_or_create( + original_dimmer_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", unique_id=original_unique_id, original_name="Original dimmer", ) - rollout_dimmer_entity_reg = entity_reg.async_get_or_create( + rollout_dimmer_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -129,9 +136,9 @@ async def test_dimmer_switch_unique_id_fix_original_entity_still_exists( _patch_connect(device=dimmer), ): await setup.async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) - migrated_dimmer_entity_reg = entity_reg.async_get_or_create( + migrated_dimmer_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -171,7 +178,7 @@ async def test_config_entry_device_config( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AUTH}, + data={**CREATE_ENTRY_DATA_KLAP}, unique_id=MAC_ADDRESS, ) mock_config_entry.add_to_hass(hass) @@ -190,7 +197,7 @@ async def test_config_entry_with_stored_credentials( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AUTH}, + data={**CREATE_ENTRY_DATA_KLAP}, unique_id=MAC_ADDRESS, ) auth = { @@ -203,7 +210,7 @@ async def test_config_entry_with_stored_credentials( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED - config = DEVICE_CONFIG_AUTH + config = DEVICE_CONFIG_KLAP assert config.credentials != stored_credentials config.credentials = stored_credentials mock_connect["connect"].assert_called_once_with(config=config) @@ -216,7 +223,7 @@ async def test_config_entry_device_config_invalid( caplog: pytest.LogCaptureFixture, ) -> None: """Test that an invalid device config logs an error and loads the config entry.""" - entry_data = copy.deepcopy(CREATE_ENTRY_DATA_AUTH) + entry_data = copy.deepcopy(CREATE_ENTRY_DATA_KLAP) entry_data[CONF_DEVICE_CONFIG] = {"foo": "bar"} mock_config_entry = MockConfigEntry( title="TPLink", @@ -238,8 +245,8 @@ async def test_config_entry_device_config_invalid( @pytest.mark.parametrize( ("error_type", "entry_state", "reauth_flows"), [ - (tplink.AuthenticationException, ConfigEntryState.SETUP_ERROR, True), - (tplink.SmartDeviceException, ConfigEntryState.SETUP_RETRY, False), + (tplink.AuthenticationError, ConfigEntryState.SETUP_ERROR, True), + (tplink.KasaException, ConfigEntryState.SETUP_RETRY, False), ], ids=["invalid-auth", "unknown-error"], ) @@ -256,7 +263,7 @@ async def test_config_entry_errors( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AUTH}, + data={**CREATE_ENTRY_DATA_KLAP}, unique_id=MAC_ADDRESS, ) mock_config_entry.add_to_hass(hass) @@ -275,15 +282,15 @@ async def test_plug_auth_fails(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) config_entry.add_to_hass(hass) - plug = _mocked_plug() - with _patch_discovery(device=plug), _patch_connect(device=plug): + device = _mocked_device(alias="my_plug", features=["state"]) + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() entity_id = "switch.my_plug" state = hass.states.get(entity_id) assert state.state == STATE_ON - plug.update = AsyncMock(side_effect=AuthenticationException) + device.update = AsyncMock(side_effect=AuthenticationError) async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() @@ -298,3 +305,413 @@ async def test_plug_auth_fails(hass: HomeAssistant) -> None: ) == 1 ) + + +async def test_update_attrs_fails_in_init( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test a smart plug auth failure.""" + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + config_entry.add_to_hass(hass) + light = _mocked_device(modules=[Module.Light], alias="my_light") + light_module = light.modules[Module.Light] + p = PropertyMock(side_effect=KasaException) + type(light_module).color_temp = p + light.__str__ = lambda _: "MockLight" + with _patch_discovery(device=light), _patch_connect(device=light): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "light.my_light" + entity = entity_registry.async_get(entity_id) + assert entity + state = hass.states.get(entity_id) + assert state.state == STATE_UNAVAILABLE + assert "Unable to read data for MockLight None:" in caplog.text + + +async def test_update_attrs_fails_on_update( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test a smart plug auth failure.""" + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + config_entry.add_to_hass(hass) + light = _mocked_device(modules=[Module.Light], alias="my_light") + light_module = light.modules[Module.Light] + + with _patch_discovery(device=light), _patch_connect(device=light): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "light.my_light" + entity = entity_registry.async_get(entity_id) + assert entity + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + p = PropertyMock(side_effect=KasaException) + type(light_module).color_temp = p + light.__str__ = lambda _: "MockLight" + freezer.tick(5) + async_fire_time_changed(hass) + entity = entity_registry.async_get(entity_id) + assert entity + state = hass.states.get(entity_id) + assert state.state == STATE_UNAVAILABLE + assert f"Unable to read data for MockLight {entity_id}:" in caplog.text + # Check only logs once + caplog.clear() + freezer.tick(5) + async_fire_time_changed(hass) + entity = entity_registry.async_get(entity_id) + assert entity + state = hass.states.get(entity_id) + assert state.state == STATE_UNAVAILABLE + assert f"Unable to read data for MockLight {entity_id}:" not in caplog.text + + +async def test_feature_no_category( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test a strip unique id.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + dev = _mocked_device( + alias="my_plug", + features=["led"], + ) + dev.features["led"].category = Feature.Category.Unset + with _patch_discovery(device=dev), _patch_connect(device=dev): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "switch.my_plug_led" + entity = entity_registry.async_get(entity_id) + assert entity + assert entity.entity_category == EntityCategory.DIAGNOSTIC + assert "Unhandled category Category.Unset, fallback to DIAGNOSTIC" in caplog.text + + +@pytest.mark.parametrize( + ("device_id", "id_count", "domains", "expected_message"), + [ + pytest.param(DEVICE_ID_MAC, 1, [DOMAIN], None, id="mac-id-no-children"), + pytest.param(DEVICE_ID_MAC, 3, [DOMAIN], "Replaced", id="mac-id-children"), + pytest.param( + DEVICE_ID_MAC, + 1, + [DOMAIN, "other"], + None, + id="mac-id-no-children-other-domain", + ), + pytest.param( + DEVICE_ID_MAC, + 3, + [DOMAIN, "other"], + "Replaced", + id="mac-id-children-other-domain", + ), + pytest.param(DEVICE_ID, 1, [DOMAIN], None, id="not-mac-id-no-children"), + pytest.param( + DEVICE_ID, 3, [DOMAIN], "Unable to replace", id="not-mac-children" + ), + pytest.param( + DEVICE_ID, 1, [DOMAIN, "other"], None, id="not-mac-no-children-other-domain" + ), + pytest.param( + DEVICE_ID, + 3, + [DOMAIN, "other"], + "Unable to replace", + id="not-mac-children-other-domain", + ), + ], +) +async def test_unlink_devices( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + device_id, + id_count, + domains, + expected_message, +) -> None: + """Test for unlinking child device ids.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={**CREATE_ENTRY_DATA_LEGACY}, + entry_id="123456", + unique_id="any", + version=1, + minor_version=2, + ) + entry.add_to_hass(hass) + + # Generate list of test identifiers + test_identifiers = [ + (domain, f"{device_id}{"" if i == 0 else f"_000{i}"}") + for i in range(id_count) + for domain in domains + ] + update_msg_fragment = "identifiers for device dummy (hs300):" + update_msg = f"{expected_message} {update_msg_fragment}" if expected_message else "" + + # Expected identifiers should include all other domains or all the newer non-mac device ids + # or just the parent mac device id + expected_identifiers = [ + (domain, device_id) + for domain, device_id in test_identifiers + if domain != DOMAIN + or device_id.startswith(DEVICE_ID) + or device_id == DEVICE_ID_MAC + ] + + device_registry.async_get_or_create( + config_entry_id="123456", + connections={ + (dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS), + }, + identifiers=set(test_identifiers), + model="hs300", + name="dummy", + ) + device_entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id) + + assert device_entries[0].connections == { + (dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS), + } + assert device_entries[0].identifiers == set(test_identifiers) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + device_entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id) + + assert device_entries[0].connections == {(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)} + + assert device_entries[0].identifiers == set(expected_identifiers) + assert entry.version == 1 + assert entry.minor_version == 4 + + assert update_msg in caplog.text + assert "Migration to version 1.3 complete" in caplog.text + + +async def test_move_credentials_hash( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test credentials hash moved to parent. + + As async_setup_entry will succeed the hash on the parent is updated + from the device. + """ + device_config = { + **DEVICE_CONFIG_KLAP.to_dict( + exclude_credentials=True, credentials_hash="theHash" + ) + } + entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} + + entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data=entry_data, + entry_id="123456", + unique_id=MAC_ADDRESS, + version=1, + minor_version=3, + ) + assert entry.data[CONF_DEVICE_CONFIG][CONF_CREDENTIALS_HASH] == "theHash" + entry.add_to_hass(hass) + + async def _connect(config): + config.credentials_hash = "theNewHash" + return _mocked_device(device_config=config, credentials_hash="theNewHash") + + with ( + patch("homeassistant.components.tplink.Device.connect", new=_connect), + patch("homeassistant.components.tplink.PLATFORMS", []), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.minor_version == 4 + assert entry.state is ConfigEntryState.LOADED + assert CONF_CREDENTIALS_HASH not in entry.data[CONF_DEVICE_CONFIG] + assert CONF_CREDENTIALS_HASH in entry.data + # Gets the new hash from the successful connection. + assert entry.data[CONF_CREDENTIALS_HASH] == "theNewHash" + assert "Migration to version 1.4 complete" in caplog.text + + +async def test_move_credentials_hash_auth_error( + hass: HomeAssistant, +) -> None: + """Test credentials hash moved to parent. + + If there is an auth error it should be deleted after migration + in async_setup_entry. + """ + device_config = { + **DEVICE_CONFIG_KLAP.to_dict( + exclude_credentials=True, credentials_hash="theHash" + ) + } + entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} + + entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data=entry_data, + unique_id=MAC_ADDRESS, + version=1, + minor_version=3, + ) + assert entry.data[CONF_DEVICE_CONFIG][CONF_CREDENTIALS_HASH] == "theHash" + + with ( + patch( + "homeassistant.components.tplink.Device.connect", + side_effect=AuthenticationError, + ), + patch("homeassistant.components.tplink.PLATFORMS", []), + ): + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.minor_version == 4 + assert entry.state is ConfigEntryState.SETUP_ERROR + assert CONF_CREDENTIALS_HASH not in entry.data[CONF_DEVICE_CONFIG] + # Auth failure deletes the hash + assert CONF_CREDENTIALS_HASH not in entry.data + + +async def test_move_credentials_hash_other_error( + hass: HomeAssistant, +) -> None: + """Test credentials hash moved to parent. + + When there is a KasaException the same hash should still be on the parent + at the end of the test. + """ + device_config = { + **DEVICE_CONFIG_KLAP.to_dict( + exclude_credentials=True, credentials_hash="theHash" + ) + } + entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} + + entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data=entry_data, + unique_id=MAC_ADDRESS, + version=1, + minor_version=3, + ) + assert entry.data[CONF_DEVICE_CONFIG][CONF_CREDENTIALS_HASH] == "theHash" + + with ( + patch( + "homeassistant.components.tplink.Device.connect", side_effect=KasaException + ), + patch("homeassistant.components.tplink.PLATFORMS", []), + ): + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.minor_version == 4 + assert entry.state is ConfigEntryState.SETUP_RETRY + assert CONF_CREDENTIALS_HASH not in entry.data[CONF_DEVICE_CONFIG] + assert CONF_CREDENTIALS_HASH in entry.data + assert entry.data[CONF_CREDENTIALS_HASH] == "theHash" + + +async def test_credentials_hash( + hass: HomeAssistant, +) -> None: + """Test credentials_hash used to call connect.""" + device_config = {**DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True)} + entry_data = { + **CREATE_ENTRY_DATA_KLAP, + CONF_DEVICE_CONFIG: device_config, + CONF_CREDENTIALS_HASH: "theHash", + } + + entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data=entry_data, + unique_id=MAC_ADDRESS, + ) + + async def _connect(config): + config.credentials_hash = "theHash" + return _mocked_device(device_config=config, credentials_hash="theHash") + + with ( + patch("homeassistant.components.tplink.PLATFORMS", []), + patch("homeassistant.components.tplink.Device.connect", new=_connect), + ): + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + assert CONF_CREDENTIALS_HASH not in entry.data[CONF_DEVICE_CONFIG] + assert CONF_CREDENTIALS_HASH in entry.data + assert entry.data[CONF_DEVICE_CONFIG] == device_config + assert entry.data[CONF_CREDENTIALS_HASH] == "theHash" + + +async def test_credentials_hash_auth_error( + hass: HomeAssistant, +) -> None: + """Test credentials_hash is deleted after an auth failure.""" + device_config = {**DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True)} + entry_data = { + **CREATE_ENTRY_DATA_KLAP, + CONF_DEVICE_CONFIG: device_config, + CONF_CREDENTIALS_HASH: "theHash", + } + + entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data=entry_data, + unique_id=MAC_ADDRESS, + ) + + with ( + patch("homeassistant.components.tplink.PLATFORMS", []), + patch( + "homeassistant.components.tplink.Device.connect", + side_effect=AuthenticationError, + ) as connect_mock, + ): + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + expected_config = DeviceConfig.from_dict( + DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True, credentials_hash="theHash") + ) + connect_mock.assert_called_with(config=expected_config) + assert entry.state is ConfigEntryState.SETUP_ERROR + assert CONF_CREDENTIALS_HASH not in entry.data diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index 9f352e7ffc4..6998d8fbcc7 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -5,7 +5,17 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import MagicMock, PropertyMock -from kasa import AuthenticationException, SmartDeviceException, TimeoutException +from freezegun.api import FrozenDateTimeFactory +from kasa import ( + AuthenticationError, + DeviceType, + KasaException, + LightState, + Module, + TimeoutError, +) +from kasa.interfaces import LightEffect +from kasa.iot import IotDevice import pytest from homeassistant.components import tplink @@ -23,10 +33,17 @@ from homeassistant.components.light import ( ATTR_TRANSITION, ATTR_XY_COLOR, DOMAIN as LIGHT_DOMAIN, + EFFECT_OFF, ) from homeassistant.components.tplink.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH -from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, STATE_OFF, STATE_ON +from homeassistant.const import ( + ATTR_ENTITY_ID, + CONF_HOST, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -34,9 +51,9 @@ from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from . import ( + DEVICE_ID, MAC_ADDRESS, - _mocked_bulb, - _mocked_smart_light_strip, + _mocked_device, _patch_connect, _patch_discovery, _patch_single_discovery, @@ -45,37 +62,77 @@ from . import ( from tests.common import MockConfigEntry, async_fire_time_changed +@pytest.mark.parametrize( + ("device_type"), + [ + pytest.param(DeviceType.Dimmer, id="Dimmer"), + pytest.param(DeviceType.Bulb, id="Bulb"), + pytest.param(DeviceType.LightStrip, id="LightStrip"), + pytest.param(DeviceType.WallSwitch, id="WallSwitch"), + ], +) async def test_light_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, device_type ) -> None: """Test a light unique id.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb = _mocked_bulb() - bulb.color_temp = None - with _patch_discovery(device=bulb), _patch_connect(device=bulb): + light = _mocked_device(modules=[Module.Light], alias="my_light") + light.device_type = device_type + with _patch_discovery(device=light), _patch_connect(device=light): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" - assert entity_registry.async_get(entity_id).unique_id == "AABBCCDDEEFF" + entity_id = "light.my_light" + assert ( + entity_registry.async_get(entity_id).unique_id + == MAC_ADDRESS.replace(":", "").upper() + ) + + +async def test_legacy_dimmer_unique_id(hass: HomeAssistant) -> None: + """Test a light unique id.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + light = _mocked_device( + modules=[Module.Light], + alias="my_light", + spec=IotDevice, + device_id="aa:bb:cc:dd:ee:ff", + ) + light.device_type = DeviceType.Dimmer + + with _patch_discovery(device=light), _patch_connect(device=light): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "light.my_light" + entity_registry = er.async_get(hass) + assert entity_registry.async_get(entity_id).unique_id == "aa:bb:cc:dd:ee:ff" @pytest.mark.parametrize( - ("bulb", "transition"), [(_mocked_bulb(), 2.0), (_mocked_smart_light_strip(), None)] + ("device", "transition"), + [ + (_mocked_device(modules=[Module.Light]), 2.0), + (_mocked_device(modules=[Module.Light, Module.LightEffect]), None), + ], ) async def test_color_light( - hass: HomeAssistant, bulb: MagicMock, transition: float | None + hass: HomeAssistant, device: MagicMock, transition: float | None ) -> None: """Test a color light and that all transitions are correctly passed.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb.color_temp = None - with _patch_discovery(device=bulb), _patch_connect(device=bulb): + light = device.modules[Module.Light] + light.color_temp = None + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() @@ -90,22 +147,31 @@ async def test_color_light( assert state.state == "on" attributes = state.attributes assert attributes[ATTR_BRIGHTNESS] == 128 - assert attributes[ATTR_COLOR_MODE] == "hs" assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] - assert attributes[ATTR_MIN_MIREDS] == 111 - assert attributes[ATTR_MAX_MIREDS] == 250 - assert attributes[ATTR_HS_COLOR] == (10, 30) - assert attributes[ATTR_RGB_COLOR] == (255, 191, 178) - assert attributes[ATTR_XY_COLOR] == (0.42, 0.336) + # If effect is active, only the brightness can be controlled + if attributes.get(ATTR_EFFECT) is not None: + assert attributes[ATTR_COLOR_MODE] == "brightness" + else: + assert attributes[ATTR_COLOR_MODE] == "hs" + assert attributes[ATTR_MIN_MIREDS] == 111 + assert attributes[ATTR_MAX_MIREDS] == 250 + assert attributes[ATTR_HS_COLOR] == (10, 30) + assert attributes[ATTR_RGB_COLOR] == (255, 191, 178) + assert attributes[ATTR_XY_COLOR] == (0.42, 0.336) await hass.services.async_call( LIGHT_DOMAIN, "turn_off", BASE_PAYLOAD, blocking=True ) - bulb.turn_off.assert_called_once_with(transition=KASA_TRANSITION_VALUE) + light.set_state.assert_called_once_with( + LightState(light_on=False, transition=KASA_TRANSITION_VALUE) + ) + light.set_state.reset_mock() await hass.services.async_call(LIGHT_DOMAIN, "turn_on", BASE_PAYLOAD, blocking=True) - bulb.turn_on.assert_called_once_with(transition=KASA_TRANSITION_VALUE) - bulb.turn_on.reset_mock() + light.set_state.assert_called_once_with( + LightState(light_on=True, transition=KASA_TRANSITION_VALUE) + ) + light.set_state.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -113,8 +179,8 @@ async def test_color_light( {**BASE_PAYLOAD, ATTR_BRIGHTNESS: 100}, blocking=True, ) - bulb.set_brightness.assert_called_with(39, transition=KASA_TRANSITION_VALUE) - bulb.set_brightness.reset_mock() + light.set_brightness.assert_called_with(39, transition=KASA_TRANSITION_VALUE) + light.set_brightness.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -122,10 +188,10 @@ async def test_color_light( {**BASE_PAYLOAD, ATTR_COLOR_TEMP_KELVIN: 6666}, blocking=True, ) - bulb.set_color_temp.assert_called_with( + light.set_color_temp.assert_called_with( 6666, brightness=None, transition=KASA_TRANSITION_VALUE ) - bulb.set_color_temp.reset_mock() + light.set_color_temp.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -133,10 +199,10 @@ async def test_color_light( {**BASE_PAYLOAD, ATTR_COLOR_TEMP_KELVIN: 6666}, blocking=True, ) - bulb.set_color_temp.assert_called_with( + light.set_color_temp.assert_called_with( 6666, brightness=None, transition=KASA_TRANSITION_VALUE ) - bulb.set_color_temp.reset_mock() + light.set_color_temp.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -144,8 +210,8 @@ async def test_color_light( {**BASE_PAYLOAD, ATTR_HS_COLOR: (10, 30)}, blocking=True, ) - bulb.set_hsv.assert_called_with(10, 30, None, transition=KASA_TRANSITION_VALUE) - bulb.set_hsv.reset_mock() + light.set_hsv.assert_called_with(10, 30, None, transition=KASA_TRANSITION_VALUE) + light.set_hsv.reset_mock() async def test_color_light_no_temp(hass: HomeAssistant) -> None: @@ -154,14 +220,15 @@ async def test_color_light_no_temp(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb = _mocked_bulb() - bulb.is_variable_color_temp = False - type(bulb).color_temp = PropertyMock(side_effect=Exception) - with _patch_discovery(device=bulb), _patch_connect(device=bulb): + device = _mocked_device(modules=[Module.Light], alias="my_light") + light = device.modules[Module.Light] + light.is_variable_color_temp = False + type(light).color_temp = PropertyMock(side_effect=Exception) + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == "on" @@ -176,13 +243,14 @@ async def test_color_light_no_temp(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - bulb.turn_off.assert_called_once() + light.set_state.assert_called_once() + light.set_state.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - bulb.turn_on.assert_called_once() - bulb.turn_on.reset_mock() + light.set_state.assert_called_once() + light.set_state.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -190,8 +258,8 @@ async def test_color_light_no_temp(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100}, blocking=True, ) - bulb.set_brightness.assert_called_with(39, transition=None) - bulb.set_brightness.reset_mock() + light.set_brightness.assert_called_with(39, transition=None) + light.set_brightness.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -199,12 +267,16 @@ async def test_color_light_no_temp(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_HS_COLOR: (10, 30)}, blocking=True, ) - bulb.set_hsv.assert_called_with(10, 30, None, transition=None) - bulb.set_hsv.reset_mock() + light.set_hsv.assert_called_with(10, 30, None, transition=None) + light.set_hsv.reset_mock() @pytest.mark.parametrize( - ("bulb", "is_color"), [(_mocked_bulb(), True), (_mocked_smart_light_strip(), False)] + ("bulb", "is_color"), + [ + (_mocked_device(modules=[Module.Light], alias="my_light"), True), + (_mocked_device(modules=[Module.Light], alias="my_light"), False), + ], ) async def test_color_temp_light( hass: HomeAssistant, bulb: MagicMock, is_color: bool @@ -214,22 +286,24 @@ async def test_color_temp_light( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb.is_color = is_color - bulb.color_temp = 4000 - bulb.is_variable_color_temp = True + device = _mocked_device(modules=[Module.Light], alias="my_light") + light = device.modules[Module.Light] + light.is_color = is_color + light.color_temp = 4000 + light.is_variable_color_temp = True - with _patch_discovery(device=bulb), _patch_connect(device=bulb): + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == "on" attributes = state.attributes assert attributes[ATTR_BRIGHTNESS] == 128 assert attributes[ATTR_COLOR_MODE] == "color_temp" - if bulb.is_color: + if light.is_color: assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] else: assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp"] @@ -240,13 +314,14 @@ async def test_color_temp_light( await hass.services.async_call( LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - bulb.turn_off.assert_called_once() + light.set_state.assert_called_once() + light.set_state.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - bulb.turn_on.assert_called_once() - bulb.turn_on.reset_mock() + light.set_state.assert_called_once() + light.set_state.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -254,8 +329,8 @@ async def test_color_temp_light( {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100}, blocking=True, ) - bulb.set_brightness.assert_called_with(39, transition=None) - bulb.set_brightness.reset_mock() + light.set_brightness.assert_called_with(39, transition=None) + light.set_brightness.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -263,8 +338,8 @@ async def test_color_temp_light( {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6666}, blocking=True, ) - bulb.set_color_temp.assert_called_with(6666, brightness=None, transition=None) - bulb.set_color_temp.reset_mock() + light.set_color_temp.assert_called_with(6666, brightness=None, transition=None) + light.set_color_temp.reset_mock() # Verify color temp is clamped to the valid range await hass.services.async_call( @@ -273,8 +348,8 @@ async def test_color_temp_light( {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 20000}, blocking=True, ) - bulb.set_color_temp.assert_called_with(9000, brightness=None, transition=None) - bulb.set_color_temp.reset_mock() + light.set_color_temp.assert_called_with(9000, brightness=None, transition=None) + light.set_color_temp.reset_mock() # Verify color temp is clamped to the valid range await hass.services.async_call( @@ -283,8 +358,8 @@ async def test_color_temp_light( {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 1}, blocking=True, ) - bulb.set_color_temp.assert_called_with(4000, brightness=None, transition=None) - bulb.set_color_temp.reset_mock() + light.set_color_temp.assert_called_with(4000, brightness=None, transition=None) + light.set_color_temp.reset_mock() async def test_brightness_only_light(hass: HomeAssistant) -> None: @@ -293,15 +368,16 @@ async def test_brightness_only_light(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb = _mocked_bulb() - bulb.is_color = False - bulb.is_variable_color_temp = False + device = _mocked_device(modules=[Module.Light], alias="my_light") + light = device.modules[Module.Light] + light.is_color = False + light.is_variable_color_temp = False - with _patch_discovery(device=bulb), _patch_connect(device=bulb): + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == "on" @@ -313,13 +389,14 @@ async def test_brightness_only_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - bulb.turn_off.assert_called_once() + light.set_state.assert_called_once() + light.set_state.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - bulb.turn_on.assert_called_once() - bulb.turn_on.reset_mock() + light.set_state.assert_called_once() + light.set_state.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -327,8 +404,8 @@ async def test_brightness_only_light(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100}, blocking=True, ) - bulb.set_brightness.assert_called_with(39, transition=None) - bulb.set_brightness.reset_mock() + light.set_brightness.assert_called_with(39, transition=None) + light.set_brightness.reset_mock() async def test_on_off_light(hass: HomeAssistant) -> None: @@ -337,16 +414,17 @@ async def test_on_off_light(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb = _mocked_bulb() - bulb.is_color = False - bulb.is_variable_color_temp = False - bulb.is_dimmable = False + device = _mocked_device(modules=[Module.Light], alias="my_light") + light = device.modules[Module.Light] + light.is_color = False + light.is_variable_color_temp = False + light.is_dimmable = False - with _patch_discovery(device=bulb), _patch_connect(device=bulb): + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == "on" @@ -356,13 +434,14 @@ async def test_on_off_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - bulb.turn_off.assert_called_once() + light.set_state.assert_called_once() + light.set_state.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - bulb.turn_on.assert_called_once() - bulb.turn_on.reset_mock() + light.set_state.assert_called_once() + light.set_state.reset_mock() async def test_off_at_start_light(hass: HomeAssistant) -> None: @@ -371,17 +450,18 @@ async def test_off_at_start_light(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb = _mocked_bulb() - bulb.is_color = False - bulb.is_variable_color_temp = False - bulb.is_dimmable = False - bulb.is_on = False + device = _mocked_device(modules=[Module.Light], alias="my_light") + light = device.modules[Module.Light] + light.is_color = False + light.is_variable_color_temp = False + light.is_dimmable = False + light.state = LightState(light_on=False) - with _patch_discovery(device=bulb), _patch_connect(device=bulb): + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == "off" @@ -395,15 +475,16 @@ async def test_dimmer_turn_on_fix(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb = _mocked_bulb() - bulb.is_dimmer = True - bulb.is_on = False + device = _mocked_device(modules=[Module.Light], alias="my_light") + light = device.modules[Module.Light] + device.device_type = DeviceType.Dimmer + light.state = LightState(light_on=False) - with _patch_discovery(device=bulb), _patch_connect(device=bulb): + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == "off" @@ -411,45 +492,60 @@ async def test_dimmer_turn_on_fix(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - bulb.turn_on.assert_called_once_with(transition=1) - bulb.turn_on.reset_mock() + light.set_state.assert_called_once_with( + LightState( + light_on=True, + brightness=None, + hue=None, + saturation=None, + color_temp=None, + transition=1, + ) + ) + light.set_state.reset_mock() -async def test_smart_strip_effects(hass: HomeAssistant) -> None: +async def test_smart_strip_effects( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Test smart strip effects.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - strip = _mocked_smart_light_strip() + device = _mocked_device( + modules=[Module.Light, Module.LightEffect], alias="my_light" + ) + light = device.modules[Module.Light] + light_effect = device.modules[Module.LightEffect] with ( - _patch_discovery(device=strip), - _patch_single_discovery(device=strip), - _patch_connect(device=strip), + _patch_discovery(device=device), + _patch_single_discovery(device=device), + _patch_connect(device=device), ): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == STATE_ON assert state.attributes[ATTR_EFFECT] == "Effect1" - assert state.attributes[ATTR_EFFECT_LIST] == ["Effect1", "Effect2"] + assert state.attributes[ATTR_EFFECT_LIST] == ["Off", "Effect1", "Effect2"] # Ensure setting color temp when an effect - # is in progress calls set_hsv to clear the effect + # is in progress calls set_effect to clear the effect await hass.services.async_call( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) - strip.set_hsv.assert_called_once_with(0, 0, None) - strip.set_color_temp.assert_called_once_with(4000, brightness=None, transition=None) - strip.set_hsv.reset_mock() - strip.set_color_temp.reset_mock() + light_effect.set_effect.assert_called_once_with(LightEffect.LIGHT_EFFECTS_OFF) + light.set_color_temp.assert_called_once_with(4000, brightness=None, transition=None) + light_effect.set_effect.reset_mock() + light.set_color_temp.reset_mock() await hass.services.async_call( LIGHT_DOMAIN, @@ -457,21 +553,54 @@ async def test_smart_strip_effects(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "Effect2"}, blocking=True, ) - strip.set_effect.assert_called_once_with( + light_effect.set_effect.assert_called_once_with( "Effect2", brightness=None, transition=None ) - strip.set_effect.reset_mock() + light_effect.set_effect.reset_mock() + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_EFFECT] == "Effect2" - strip.effect = {"name": "Effect1", "enable": 0, "custom": 0} + # Test setting light effect off + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "off"}, + blocking=True, + ) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_EFFECT] == "off" + light.set_state.assert_not_called() + + # Test setting light effect to invalid value + caplog.clear() + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "Effect3"}, + blocking=True, + ) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_EFFECT] == "off" + assert "Invalid effect Effect3 for" in caplog.text + + light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_EFFECT] is None + assert state.attributes[ATTR_EFFECT] == EFFECT_OFF - strip.is_off = True - strip.is_on = False + light.state = LightState(light_on=False) async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=20)) await hass.async_block_till_done() @@ -485,12 +614,11 @@ async def test_smart_strip_effects(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - strip.turn_on.assert_called_once() - strip.turn_on.reset_mock() + light.set_state.assert_called_once() + light.set_state.reset_mock() - strip.is_off = False - strip.is_on = True - strip.effect_list = None + light.state = LightState(light_on=True) + light_effect.effect_list = None async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() @@ -505,13 +633,17 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - strip = _mocked_smart_light_strip() + device = _mocked_device( + modules=[Module.Light, Module.LightEffect], alias="my_light" + ) + light = device.modules[Module.Light] + light_effect = device.modules[Module.LightEffect] - with _patch_discovery(device=strip), _patch_connect(device=strip): + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == STATE_ON @@ -526,7 +658,7 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None: }, blocking=True, ) - strip.set_custom_effect.assert_called_once_with( + light_effect.set_custom_effect.assert_called_once_with( { "custom": 1, "id": "yMwcNpLxijmoKamskHCvvravpbnIqAIN", @@ -543,7 +675,7 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None: "backgrounds": [(340, 20, 50), (20, 50, 50), (0, 100, 50)], } ) - strip.set_custom_effect.reset_mock() + light_effect.set_custom_effect.reset_mock() await hass.services.async_call( DOMAIN, @@ -555,7 +687,7 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None: }, blocking=True, ) - strip.set_custom_effect.assert_called_once_with( + light_effect.set_custom_effect.assert_called_once_with( { "custom": 1, "id": "yMwcNpLxijmoKamskHCvvravpbnIqAIN", @@ -571,9 +703,9 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None: "random_seed": 600, } ) - strip.set_custom_effect.reset_mock() + light_effect.set_custom_effect.reset_mock() - strip.effect = { + light_effect.effect = { "custom": 1, "id": "yMwcNpLxijmoKamskHCvvravpbnIqAIN", "brightness": 100, @@ -586,15 +718,8 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None: state = hass.states.get(entity_id) assert state.state == STATE_ON - strip.is_off = True - strip.is_on = False - strip.effect = { - "custom": 1, - "id": "yMwcNpLxijmoKamskHCvvravpbnIqAIN", - "brightness": 100, - "name": "Custom", - "enable": 0, - } + light.state = LightState(light_on=False) + light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=20)) await hass.async_block_till_done() @@ -608,8 +733,8 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - strip.turn_on.assert_called_once() - strip.turn_on.reset_mock() + light.set_state.assert_called_once() + light.set_state.reset_mock() await hass.services.async_call( DOMAIN, @@ -631,7 +756,7 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - strip.set_custom_effect.assert_called_once_with( + light_effect.set_custom_effect.assert_called_once_with( { "custom": 1, "id": "yMwcNpLxijmoKamskHCvvravpbnIqAIN", @@ -653,7 +778,7 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None: "transition_range": [2000, 3000], } ) - strip.set_custom_effect.reset_mock() + light_effect.set_custom_effect.reset_mock() async def test_smart_strip_custom_random_effect_at_start(hass: HomeAssistant) -> None: @@ -662,19 +787,17 @@ async def test_smart_strip_custom_random_effect_at_start(hass: HomeAssistant) -> domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - strip = _mocked_smart_light_strip() - strip.effect = { - "custom": 1, - "id": "yMwcNpLxijmoKamskHCvvravpbnIqAIN", - "brightness": 100, - "name": "Custom", - "enable": 0, - } - with _patch_discovery(device=strip), _patch_connect(device=strip): + device = _mocked_device( + modules=[Module.Light, Module.LightEffect], alias="my_light" + ) + light = device.modules[Module.Light] + light_effect = device.modules[Module.LightEffect] + light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == STATE_ON @@ -685,8 +808,8 @@ async def test_smart_strip_custom_random_effect_at_start(hass: HomeAssistant) -> {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - strip.turn_on.assert_called_once() - strip.turn_on.reset_mock() + light.set_state.assert_called_once() + light.set_state.reset_mock() async def test_smart_strip_custom_sequence_effect(hass: HomeAssistant) -> None: @@ -695,13 +818,16 @@ async def test_smart_strip_custom_sequence_effect(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - strip = _mocked_smart_light_strip() + device = _mocked_device( + modules=[Module.Light, Module.LightEffect], alias="my_light" + ) + light_effect = device.modules[Module.LightEffect] - with _patch_discovery(device=strip), _patch_connect(device=strip): + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" state = hass.states.get(entity_id) assert state.state == STATE_ON @@ -715,7 +841,7 @@ async def test_smart_strip_custom_sequence_effect(hass: HomeAssistant) -> None: }, blocking=True, ) - strip.set_custom_effect.assert_called_once_with( + light_effect.set_custom_effect.assert_called_once_with( { "custom": 1, "id": "yMwcNpLxijmoKamskHCvvravpbnIqAIN", @@ -733,24 +859,24 @@ async def test_smart_strip_custom_sequence_effect(hass: HomeAssistant) -> None: "direction": 4, } ) - strip.set_custom_effect.reset_mock() + light_effect.set_custom_effect.reset_mock() @pytest.mark.parametrize( ("exception_type", "msg", "reauth_expected"), [ ( - AuthenticationException, + AuthenticationError, "Device authentication error async_turn_on: test error", True, ), ( - TimeoutException, + TimeoutError, "Timeout communicating with the device async_turn_on: test error", False, ), ( - SmartDeviceException, + KasaException, "Unable to communicate with the device async_turn_on: test error", False, ), @@ -768,14 +894,15 @@ async def test_light_errors_when_turned_on( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb = _mocked_bulb() - bulb.turn_on.side_effect = exception_type(msg) + device = _mocked_device(modules=[Module.Light], alias="my_light") + light = device.modules[Module.Light] + light.set_state.side_effect = exception_type(msg) - with _patch_discovery(device=bulb), _patch_connect(device=bulb): + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - entity_id = "light.my_bulb" + entity_id = "light.my_light" assert not any( already_migrated_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH}) @@ -786,7 +913,7 @@ async def test_light_errors_when_turned_on( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.async_block_till_done() - assert bulb.turn_on.call_count == 1 + assert light.set_state.call_count == 1 assert ( any( flow @@ -797,3 +924,121 @@ async def test_light_errors_when_turned_on( ) == reauth_expected ) + + +async def test_light_child( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Test child lights are added to parent device with the right ids.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + + child_light_1 = _mocked_device( + modules=[Module.Light], alias="my_light_0", device_id=f"{DEVICE_ID}00" + ) + child_light_2 = _mocked_device( + modules=[Module.Light], alias="my_light_1", device_id=f"{DEVICE_ID}01" + ) + parent_device = _mocked_device( + device_id=DEVICE_ID, + alias="my_device", + children=[child_light_1, child_light_2], + modules=[Module.Light], + ) + + with _patch_discovery(device=parent_device), _patch_connect(device=parent_device): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "light.my_device" + entity = entity_registry.async_get(entity_id) + assert entity + + for light_id in range(2): + child_entity_id = f"light.my_device_my_light_{light_id}" + child_entity = entity_registry.async_get(child_entity_id) + assert child_entity + assert child_entity.unique_id == f"{DEVICE_ID}0{light_id}" + assert child_entity.device_id == entity.device_id + + +async def test_scene_effect_light( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test activating a scene works with effects. + + i.e. doesn't try to set the effect to 'off' + """ + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + device = _mocked_device( + modules=[Module.Light, Module.LightEffect], alias="my_light" + ) + light_effect = device.modules[Module.LightEffect] + light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF + + with _patch_discovery(device=device), _patch_connect(device=device): + assert await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + assert await async_setup_component(hass, "scene", {}) + await hass.async_block_till_done() + + entity_id = "light.my_light" + + await hass.services.async_call( + LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + await hass.async_block_till_done() + freezer.tick(5) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state is STATE_ON + assert state.attributes["effect"] is EFFECT_OFF + + await hass.services.async_call( + "scene", + "create", + {"scene_id": "effect_off_scene", "snapshot_entities": [entity_id]}, + blocking=True, + ) + await hass.async_block_till_done() + scene_state = hass.states.get("scene.effect_off_scene") + assert scene_state.state is STATE_UNKNOWN + + await hass.services.async_call( + LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + await hass.async_block_till_done() + freezer.tick(5) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state is STATE_OFF + + await hass.services.async_call( + "scene", + "turn_on", + { + "entity_id": "scene.effect_off_scene", + }, + blocking=True, + ) + await hass.async_block_till_done() + scene_state = hass.states.get("scene.effect_off_scene") + assert scene_state.state is not STATE_UNKNOWN + + freezer.tick(5) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state is STATE_ON + assert state.attributes["effect"] is EFFECT_OFF diff --git a/tests/components/tplink/test_number.py b/tests/components/tplink/test_number.py new file mode 100644 index 00000000000..865ce27ffc0 --- /dev/null +++ b/tests/components/tplink/test_number.py @@ -0,0 +1,163 @@ +"""Tests for tplink number platform.""" + +from kasa import Feature +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components import tplink +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.components.tplink.const import DOMAIN +from homeassistant.components.tplink.entity import EXCLUDED_FEATURES +from homeassistant.components.tplink.number import NUMBER_DESCRIPTIONS +from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from . import ( + DEVICE_ID, + MAC_ADDRESS, + _mocked_device, + _mocked_feature, + _mocked_strip_children, + _patch_connect, + _patch_discovery, + setup_platform_for_device, + snapshot_platform, +) + +from tests.common import MockConfigEntry + + +async def test_states( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a sensor unique ids.""" + features = {description.key for description in NUMBER_DESCRIPTIONS} + features.update(EXCLUDED_FEATURES) + device = _mocked_device(alias="my_device", features=features) + + await setup_platform_for_device(hass, mock_config_entry, Platform.NUMBER, device) + await snapshot_platform( + hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id + ) + + for excluded in EXCLUDED_FEATURES: + assert hass.states.get(f"sensor.my_device_{excluded}") is None + + +async def test_number(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: + """Test a sensor unique ids.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + new_feature = _mocked_feature( + "temperature_offset", + value=10, + name="Temperature offset", + type_=Feature.Type.Number, + category=Feature.Category.Config, + minimum_value=1, + maximum_value=100, + ) + plug = _mocked_device(alias="my_plug", features=[new_feature]) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "number.my_plug_temperature_offset" + entity = entity_registry.async_get(entity_id) + assert entity + assert entity.unique_id == f"{DEVICE_ID}_temperature_offset" + + +async def test_number_children( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test a sensor unique ids.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + new_feature = _mocked_feature( + "temperature_offset", + value=10, + name="Some number", + type_=Feature.Type.Number, + category=Feature.Category.Config, + minimum_value=1, + maximum_value=100, + ) + plug = _mocked_device( + alias="my_plug", + features=[new_feature], + children=_mocked_strip_children(features=[new_feature]), + ) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "number.my_plug_temperature_offset" + entity = entity_registry.async_get(entity_id) + assert entity + device = device_registry.async_get(entity.device_id) + + for plug_id in range(2): + child_entity_id = f"number.my_plug_plug{plug_id}_temperature_offset" + child_entity = entity_registry.async_get(child_entity_id) + assert child_entity + assert child_entity.unique_id == f"PLUG{plug_id}DEVICEID_temperature_offset" + assert child_entity.device_id != entity.device_id + child_device = device_registry.async_get(child_entity.device_id) + assert child_device + assert child_device.via_device_id == device.id + + +async def test_number_set( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test a number entity limits and setting values.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + new_feature = _mocked_feature( + "temperature_offset", + value=10, + name="Some number", + type_=Feature.Type.Number, + category=Feature.Category.Config, + minimum_value=1, + maximum_value=200, + ) + plug = _mocked_device(alias="my_plug", features=[new_feature]) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "number.my_plug_temperature_offset" + entity = entity_registry.async_get(entity_id) + assert entity + assert entity.unique_id == f"{DEVICE_ID}_temperature_offset" + + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "10" + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + new_feature.set_value.assert_called_with(50) diff --git a/tests/components/tplink/test_select.py b/tests/components/tplink/test_select.py new file mode 100644 index 00000000000..6c49185d91c --- /dev/null +++ b/tests/components/tplink/test_select.py @@ -0,0 +1,158 @@ +"""Tests for tplink select platform.""" + +from kasa import Feature +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components import tplink +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.components.tplink.const import DOMAIN +from homeassistant.components.tplink.entity import EXCLUDED_FEATURES +from homeassistant.components.tplink.select import SELECT_DESCRIPTIONS +from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from . import ( + DEVICE_ID, + MAC_ADDRESS, + _mocked_device, + _mocked_feature, + _mocked_strip_children, + _patch_connect, + _patch_discovery, + setup_platform_for_device, + snapshot_platform, +) + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mocked_feature_select() -> Feature: + """Return mocked tplink binary sensor feature.""" + return _mocked_feature( + "light_preset", + value="First choice", + name="light_preset", + choices=["First choice", "Second choice"], + type_=Feature.Type.Choice, + category=Feature.Category.Config, + ) + + +async def test_states( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a sensor unique ids.""" + features = {description.key for description in SELECT_DESCRIPTIONS} + features.update(EXCLUDED_FEATURES) + device = _mocked_device(alias="my_device", features=features) + + await setup_platform_for_device(hass, mock_config_entry, Platform.SELECT, device) + await snapshot_platform( + hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id + ) + + for excluded in EXCLUDED_FEATURES: + assert hass.states.get(f"sensor.my_device_{excluded}") is None + + +async def test_select( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mocked_feature_select: Feature, +) -> None: + """Test a sensor unique ids.""" + mocked_feature = mocked_feature_select + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + + plug = _mocked_device(alias="my_plug", features=[mocked_feature]) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + # The entity_id is based on standard name from core. + entity_id = "select.my_plug_light_preset" + entity = entity_registry.async_get(entity_id) + assert entity + assert entity.unique_id == f"{DEVICE_ID}_{mocked_feature.id}" + + +async def test_select_children( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mocked_feature_select: Feature, +) -> None: + """Test a sensor unique ids.""" + mocked_feature = mocked_feature_select + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + plug = _mocked_device( + alias="my_plug", + features=[mocked_feature], + children=_mocked_strip_children(features=[mocked_feature]), + ) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "select.my_plug_light_preset" + entity = entity_registry.async_get(entity_id) + assert entity + device = device_registry.async_get(entity.device_id) + + for plug_id in range(2): + child_entity_id = f"select.my_plug_plug{plug_id}_light_preset" + child_entity = entity_registry.async_get(child_entity_id) + assert child_entity + assert child_entity.unique_id == f"PLUG{plug_id}DEVICEID_{mocked_feature.id}" + assert child_entity.device_id != entity.device_id + child_device = device_registry.async_get(child_entity.device_id) + assert child_device + assert child_device.via_device_id == device.id + + +async def test_select_select( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mocked_feature_select: Feature, +) -> None: + """Test a select setting values.""" + mocked_feature = mocked_feature_select + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + plug = _mocked_device(alias="my_plug", features=[mocked_feature]) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "select.my_plug_light_preset" + entity = entity_registry.async_get(entity_id) + assert entity + assert entity.unique_id == f"{DEVICE_ID}_light_preset" + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "Second choice"}, + blocking=True, + ) + mocked_feature.set_value.assert_called_with("Second choice") diff --git a/tests/components/tplink/test_sensor.py b/tests/components/tplink/test_sensor.py index 43884083483..dda43c52430 100644 --- a/tests/components/tplink/test_sensor.py +++ b/tests/components/tplink/test_sensor.py @@ -1,35 +1,71 @@ """Tests for light platform.""" -from unittest.mock import Mock +from kasa import Device, Feature, Module +import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components import tplink from homeassistant.components.tplink.const import DOMAIN -from homeassistant.const import CONF_HOST +from homeassistant.components.tplink.entity import EXCLUDED_FEATURES +from homeassistant.components.tplink.sensor import SENSOR_DESCRIPTIONS +from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from . import MAC_ADDRESS, _mocked_bulb, _mocked_plug, _patch_connect, _patch_discovery +from . import ( + DEVICE_ID, + MAC_ADDRESS, + _mocked_device, + _mocked_energy_features, + _mocked_feature, + _mocked_strip_children, + _patch_connect, + _patch_discovery, + setup_platform_for_device, + snapshot_platform, +) from tests.common import MockConfigEntry +async def test_states( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a sensor unique ids.""" + features = {description.key for description in SENSOR_DESCRIPTIONS} + features.update(EXCLUDED_FEATURES) + device = _mocked_device(alias="my_device", features=features) + + await setup_platform_for_device(hass, mock_config_entry, Platform.SENSOR, device) + await snapshot_platform( + hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id + ) + + for excluded in EXCLUDED_FEATURES: + assert hass.states.get(f"sensor.my_device_{excluded}") is None + + async def test_color_light_with_an_emeter(hass: HomeAssistant) -> None: """Test a light with an emeter.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb = _mocked_bulb() - bulb.color_temp = None - bulb.has_emeter = True - bulb.emeter_realtime = Mock( + emeter_features = _mocked_energy_features( power=None, total=None, voltage=None, current=5, + today=5000.0036, + ) + bulb = _mocked_device( + alias="my_bulb", modules=[Module.Light], features=["state", *emeter_features] ) - bulb.emeter_today = 5000.0036 with _patch_discovery(device=bulb), _patch_connect(device=bulb): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() @@ -60,16 +96,13 @@ async def test_plug_with_an_emeter(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - plug = _mocked_plug() - plug.color_temp = None - plug.has_emeter = True - plug.emeter_realtime = Mock( + emeter_features = _mocked_energy_features( power=100.06, total=30.0049, voltage=121.19, current=5.035, ) - plug.emeter_today = None + plug = _mocked_device(alias="my_plug", features=["state", *emeter_features]) with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() @@ -95,8 +128,7 @@ async def test_color_light_no_emeter(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - bulb = _mocked_bulb() - bulb.color_temp = None + bulb = _mocked_device(alias="my_bulb", modules=[Module.Light]) bulb.has_emeter = False with _patch_discovery(device=bulb), _patch_connect(device=bulb): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -126,26 +158,175 @@ async def test_sensor_unique_id( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - plug = _mocked_plug() - plug.color_temp = None - plug.has_emeter = True - plug.emeter_realtime = Mock( + emeter_features = _mocked_energy_features( power=100, total=30, voltage=121, current=5, + today=None, ) - plug.emeter_today = None + plug = _mocked_device(alias="my_plug", features=emeter_features) with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() expected = { - "sensor.my_plug_current_consumption": "aa:bb:cc:dd:ee:ff_current_power_w", - "sensor.my_plug_total_consumption": "aa:bb:cc:dd:ee:ff_total_energy_kwh", - "sensor.my_plug_today_s_consumption": "aa:bb:cc:dd:ee:ff_today_energy_kwh", - "sensor.my_plug_voltage": "aa:bb:cc:dd:ee:ff_voltage", - "sensor.my_plug_current": "aa:bb:cc:dd:ee:ff_current_a", + "sensor.my_plug_current_consumption": f"{DEVICE_ID}_current_power_w", + "sensor.my_plug_total_consumption": f"{DEVICE_ID}_total_energy_kwh", + "sensor.my_plug_today_s_consumption": f"{DEVICE_ID}_today_energy_kwh", + "sensor.my_plug_voltage": f"{DEVICE_ID}_voltage", + "sensor.my_plug_current": f"{DEVICE_ID}_current_a", } for sensor_entity_id, value in expected.items(): assert entity_registry.async_get(sensor_entity_id).unique_id == value + + +async def test_undefined_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test a message is logged when discovering a feature without a description.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + new_feature = _mocked_feature( + "consumption_this_fortnight", + value=5.2, + name="Consumption for fortnight", + type_=Feature.Type.Sensor, + category=Feature.Category.Primary, + unit="A", + precision_hint=2, + ) + plug = _mocked_device(alias="my_plug", features=[new_feature]) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + msg = ( + "Device feature: Consumption for fortnight (consumption_this_fortnight) " + "needs an entity description defined in HA" + ) + assert msg in caplog.text + + +async def test_sensor_children_on_parent( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test a WallSwitch sensor entities are added to parent.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + feature = _mocked_feature( + "consumption_this_month", + value=5.2, + # integration should ignore name and use the value from strings.json: + # This month's consumption + name="Consumption for month", + type_=Feature.Type.Sensor, + category=Feature.Category.Primary, + unit="A", + precision_hint=2, + ) + plug = _mocked_device( + alias="my_plug", + features=[feature], + children=_mocked_strip_children(features=[feature]), + device_type=Device.Type.WallSwitch, + ) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "sensor.my_plug_this_month_s_consumption" + entity = entity_registry.async_get(entity_id) + assert entity + device = device_registry.async_get(entity.device_id) + + for plug_id in range(2): + child_entity_id = f"sensor.my_plug_plug{plug_id}_this_month_s_consumption" + child_entity = entity_registry.async_get(child_entity_id) + assert child_entity + assert child_entity.unique_id == f"PLUG{plug_id}DEVICEID_consumption_this_month" + child_device = device_registry.async_get(child_entity.device_id) + assert child_device + + assert child_entity.device_id == entity.device_id + assert child_device.connections == device.connections + + +async def test_sensor_children_on_child( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test strip sensors are on child device.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + feature = _mocked_feature( + "consumption_this_month", + value=5.2, + # integration should ignore name and use the value from strings.json: + # This month's consumption + name="Consumption for month", + type_=Feature.Type.Sensor, + category=Feature.Category.Primary, + unit="A", + precision_hint=2, + ) + plug = _mocked_device( + alias="my_plug", + features=[feature], + children=_mocked_strip_children(features=[feature]), + device_type=Device.Type.Strip, + ) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "sensor.my_plug_this_month_s_consumption" + entity = entity_registry.async_get(entity_id) + assert entity + device = device_registry.async_get(entity.device_id) + + for plug_id in range(2): + child_entity_id = f"sensor.my_plug_plug{plug_id}_this_month_s_consumption" + child_entity = entity_registry.async_get(child_entity_id) + assert child_entity + assert child_entity.unique_id == f"PLUG{plug_id}DEVICEID_consumption_this_month" + child_device = device_registry.async_get(child_entity.device_id) + assert child_device + + assert child_entity.device_id != entity.device_id + assert child_device.via_device_id == device.id + + +@pytest.mark.skip +async def test_new_datetime_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test a sensor unique ids.""" + # Skipped temporarily while datetime handling on hold. + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + plug = _mocked_device(alias="my_plug", features=["on_since"]) + with _patch_discovery(device=plug), _patch_connect(device=plug): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "sensor.my_plug_on_since" + entity = entity_registry.async_get(entity_id) + assert entity + assert entity.unique_id == f"{DEVICE_ID}_on_since" + state = hass.states.get(entity_id) + assert state + assert state.attributes["device_class"] == "timestamp" diff --git a/tests/components/tplink/test_switch.py b/tests/components/tplink/test_switch.py index 02913e0c37e..e9c8cc07b67 100644 --- a/tests/components/tplink/test_switch.py +++ b/tests/components/tplink/test_switch.py @@ -3,12 +3,16 @@ from datetime import timedelta from unittest.mock import AsyncMock -from kasa import AuthenticationException, SmartDeviceException, TimeoutException +from kasa import AuthenticationError, Device, KasaException, Module, TimeoutError +from kasa.iot import IotStrip import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components import tplink from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.tplink.const import DOMAIN +from homeassistant.components.tplink.entity import EXCLUDED_FEATURES +from homeassistant.components.tplink.switch import SWITCH_DESCRIPTIONS from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import ( ATTR_ENTITY_ID, @@ -16,32 +20,57 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util, slugify from . import ( + DEVICE_ID, MAC_ADDRESS, - _mocked_dimmer, - _mocked_plug, - _mocked_strip, + _mocked_device, + _mocked_strip_children, _patch_connect, _patch_discovery, + setup_platform_for_device, + snapshot_platform, ) from tests.common import MockConfigEntry, async_fire_time_changed +async def test_states( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a sensor unique ids.""" + features = {description.key for description in SWITCH_DESCRIPTIONS} + features.update(EXCLUDED_FEATURES) + device = _mocked_device(alias="my_device", features=features) + + await setup_platform_for_device(hass, mock_config_entry, Platform.SWITCH, device) + await snapshot_platform( + hass, entity_registry, device_registry, snapshot, mock_config_entry.entry_id + ) + + for excluded in EXCLUDED_FEATURES: + assert hass.states.get(f"sensor.my_device_{excluded}") is None + + async def test_plug(hass: HomeAssistant) -> None: """Test a smart plug.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - plug = _mocked_plug() + plug = _mocked_device(alias="my_plug", features=["state"]) + feat = plug.features["state"] with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() @@ -53,29 +82,42 @@ async def test_plug(hass: HomeAssistant) -> None: await hass.services.async_call( SWITCH_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - plug.turn_off.assert_called_once() - plug.turn_off.reset_mock() + feat.set_value.assert_called_once() + feat.set_value.reset_mock() await hass.services.async_call( SWITCH_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - plug.turn_on.assert_called_once() - plug.turn_on.reset_mock() + feat.set_value.assert_called_once() + feat.set_value.reset_mock() @pytest.mark.parametrize( ("dev", "domain"), [ - (_mocked_plug(), "switch"), - (_mocked_strip(), "switch"), - (_mocked_dimmer(), "light"), + (_mocked_device(alias="my_plug", features=["state", "led"]), "switch"), + ( + _mocked_device( + alias="my_strip", + features=["state", "led"], + children=_mocked_strip_children(), + ), + "switch", + ), + ( + _mocked_device( + alias="my_light", modules=[Module.Light], features=["state", "led"] + ), + "light", + ), ], ) -async def test_led_switch(hass: HomeAssistant, dev, domain: str) -> None: +async def test_led_switch(hass: HomeAssistant, dev: Device, domain: str) -> None: """Test LED setting for plugs, strips and dimmers.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) + feat = dev.features["led"] already_migrated_config_entry.add_to_hass(hass) with _patch_discovery(device=dev), _patch_connect(device=dev): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -91,14 +133,14 @@ async def test_led_switch(hass: HomeAssistant, dev, domain: str) -> None: await hass.services.async_call( SWITCH_DOMAIN, "turn_off", {ATTR_ENTITY_ID: led_entity_id}, blocking=True ) - dev.set_led.assert_called_once_with(False) - dev.set_led.reset_mock() + feat.set_value.assert_called_once_with(False) + feat.set_value.reset_mock() await hass.services.async_call( SWITCH_DOMAIN, "turn_on", {ATTR_ENTITY_ID: led_entity_id}, blocking=True ) - dev.set_led.assert_called_once_with(True) - dev.set_led.reset_mock() + feat.set_value.assert_called_once_with(True) + feat.set_value.reset_mock() async def test_plug_unique_id( @@ -109,13 +151,13 @@ async def test_plug_unique_id( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - plug = _mocked_plug() + plug = _mocked_device(alias="my_plug", features=["state", "led"]) with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() entity_id = "switch.my_plug" - assert entity_registry.async_get(entity_id).unique_id == "aa:bb:cc:dd:ee:ff" + assert entity_registry.async_get(entity_id).unique_id == DEVICE_ID async def test_plug_update_fails(hass: HomeAssistant) -> None: @@ -124,7 +166,7 @@ async def test_plug_update_fails(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - plug = _mocked_plug() + plug = _mocked_device(alias="my_plug", features=["state", "led"]) with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() @@ -132,7 +174,7 @@ async def test_plug_update_fails(hass: HomeAssistant) -> None: entity_id = "switch.my_plug" state = hass.states.get(entity_id) assert state.state == STATE_ON - plug.update = AsyncMock(side_effect=SmartDeviceException) + plug.update = AsyncMock(side_effect=KasaException) async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() @@ -146,15 +188,18 @@ async def test_strip(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - strip = _mocked_strip() + strip = _mocked_device( + alias="my_strip", + children=_mocked_strip_children(features=["state"]), + features=["state", "led"], + spec=IotStrip, + ) + strip.children[0].features["state"].value = True + strip.children[1].features["state"].value = False with _patch_discovery(device=strip), _patch_connect(device=strip): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() - # Verify we only create entities for the children - # since this is what the previous version did - assert hass.states.get("switch.my_strip") is None - entity_id = "switch.my_strip_plug0" state = hass.states.get(entity_id) assert state.state == STATE_ON @@ -162,14 +207,15 @@ async def test_strip(hass: HomeAssistant) -> None: await hass.services.async_call( SWITCH_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - strip.children[0].turn_off.assert_called_once() - strip.children[0].turn_off.reset_mock() + feat = strip.children[0].features["state"] + feat.set_value.assert_called_once() + feat.set_value.reset_mock() await hass.services.async_call( SWITCH_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - strip.children[0].turn_on.assert_called_once() - strip.children[0].turn_on.reset_mock() + feat.set_value.assert_called_once() + feat.set_value.reset_mock() entity_id = "switch.my_strip_plug1" state = hass.states.get(entity_id) @@ -178,14 +224,15 @@ async def test_strip(hass: HomeAssistant) -> None: await hass.services.async_call( SWITCH_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - strip.children[1].turn_off.assert_called_once() - strip.children[1].turn_off.reset_mock() + feat = strip.children[1].features["state"] + feat.set_value.assert_called_once() + feat.set_value.reset_mock() await hass.services.async_call( SWITCH_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) - strip.children[1].turn_on.assert_called_once() - strip.children[1].turn_on.reset_mock() + feat.set_value.assert_called_once() + feat.set_value.reset_mock() async def test_strip_unique_ids( @@ -196,7 +243,11 @@ async def test_strip_unique_ids( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - strip = _mocked_strip() + strip = _mocked_device( + alias="my_strip", + children=_mocked_strip_children(features=["state"]), + features=["state", "led"], + ) with _patch_discovery(device=strip), _patch_connect(device=strip): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() @@ -208,21 +259,45 @@ async def test_strip_unique_ids( ) +async def test_strip_blank_alias( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test a strip unique id.""" + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + strip = _mocked_device( + alias="", + model="KS123", + children=_mocked_strip_children(features=["state", "led"], alias=""), + features=["state", "led"], + ) + with _patch_discovery(device=strip), _patch_connect(device=strip): + await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + await hass.async_block_till_done() + + for plug_id in range(2): + entity_id = f"switch.unnamed_ks123_stripsocket_{plug_id + 1}" + state = hass.states.get(entity_id) + assert state.name == f"Unnamed KS123 Stripsocket {plug_id + 1}" + + @pytest.mark.parametrize( ("exception_type", "msg", "reauth_expected"), [ ( - AuthenticationException, + AuthenticationError, "Device authentication error async_turn_on: test error", True, ), ( - TimeoutException, + TimeoutError, "Timeout communicating with the device async_turn_on: test error", False, ), ( - SmartDeviceException, + KasaException, "Unable to communicate with the device async_turn_on: test error", False, ), @@ -240,8 +315,9 @@ async def test_plug_errors_when_turned_on( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - plug = _mocked_plug() - plug.turn_on.side_effect = exception_type("test error") + plug = _mocked_device(alias="my_plug", features=["state", "led"]) + feat = plug.features["state"] + feat.set_value.side_effect = exception_type("test error") with _patch_discovery(device=plug), _patch_connect(device=plug): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -258,7 +334,7 @@ async def test_plug_errors_when_turned_on( SWITCH_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) await hass.async_block_till_done() - assert plug.turn_on.call_count == 1 + assert feat.set_value.call_count == 1 assert ( any( flow diff --git a/tests/components/tplink_omada/conftest.py b/tests/components/tplink_omada/conftest.py index c29fcb633e4..510a2e7a87c 100644 --- a/tests/components/tplink_omada/conftest.py +++ b/tests/components/tplink_omada/conftest.py @@ -1,6 +1,6 @@ """Test fixtures for TP-Link Omada integration.""" -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Generator import json from unittest.mock import AsyncMock, MagicMock, patch @@ -17,7 +17,6 @@ from tplink_omada_client.devices import ( OmadaSwitch, OmadaSwitchPortDetails, ) -from typing_extensions import Generator from homeassistant.components.tplink_omada.config_flow import CONF_SITE from homeassistant.components.tplink_omada.const import DOMAIN @@ -130,6 +129,7 @@ def _get_mock_client(mac: str) -> OmadaNetworkClient: if c["wireless"]: return OmadaWirelessClient(c) return OmadaWiredClient(c) + raise ValueError(f"Client with MAC {mac} not found in mock data") @pytest.fixture diff --git a/tests/components/tplink_omada/snapshots/test_switch.ambr b/tests/components/tplink_omada/snapshots/test_switch.ambr index 282d2a4a6a5..a13d386e721 100644 --- a/tests/components/tplink_omada/snapshots/test_switch.ambr +++ b/tests/components/tplink_omada/snapshots/test_switch.ambr @@ -25,19 +25,6 @@ 'state': 'on', }) # --- -# name: test_gateway_disappear_disables_switches - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Router Port 4 Internet Connected', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.test_router_port_4_internet_connected', - 'last_changed': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_gateway_port_change_disables_switch_entities StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -110,144 +97,6 @@ 'unit_of_measurement': None, }) # --- -# name: test_poe_switches.10 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 6 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_6_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.11 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_6_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 6 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000006_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.12 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 7 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_7_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.13 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_7_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 7 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000007_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.14 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 8 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_8_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.15 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_8_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 8 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000008_poe', - 'unit_of_measurement': None, - }) -# --- # name: test_poe_switches.2 StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -294,141 +143,3 @@ 'unit_of_measurement': None, }) # --- -# name: test_poe_switches.4 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 3 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_3_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.5 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_3_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 3 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000003_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.6 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 4 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.7 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 4 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000004_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.8 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 5 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_5_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.9 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_5_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 5 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000005_poe', - 'unit_of_measurement': None, - }) -# --- diff --git a/tests/components/traccar/test_init.py b/tests/components/traccar/test_init.py index feacbb7b13f..49127aec347 100644 --- a/tests/components/traccar/test_init.py +++ b/tests/components/traccar/test_init.py @@ -45,7 +45,7 @@ async def traccar_client( @pytest.fixture(autouse=True) -async def setup_zones(hass): +async def setup_zones(hass: HomeAssistant) -> None: """Set up Zone config in HA.""" assert await async_setup_component( hass, @@ -63,7 +63,7 @@ async def setup_zones(hass): @pytest.fixture(name="webhook_id") -async def webhook_id_fixture(hass, client): +async def webhook_id_fixture(hass: HomeAssistant, client: TestClient) -> str: """Initialize the Traccar component and get the webhook_id.""" await async_process_ha_core_config( hass, diff --git a/tests/components/traccar_server/conftest.py b/tests/components/traccar_server/conftest.py index 6a8e428e7a2..0013b3249bd 100644 --- a/tests/components/traccar_server/conftest.py +++ b/tests/components/traccar_server/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Traccar Server tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from pytraccar import ApiClient, SubscriptionStatus -from typing_extensions import Generator from homeassistant.components.traccar_server.const import ( CONF_CUSTOM_ATTRIBUTES, diff --git a/tests/components/traccar_server/test_config_flow.py b/tests/components/traccar_server/test_config_flow.py index 5da6f592957..62f39f00dc1 100644 --- a/tests/components/traccar_server/test_config_flow.py +++ b/tests/components/traccar_server/test_config_flow.py @@ -1,11 +1,11 @@ """Test the Traccar Server config flow.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock import pytest from pytraccar import TraccarException -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.traccar.device_tracker import PLATFORM_SCHEMA diff --git a/tests/components/traccar_server/test_diagnostics.py b/tests/components/traccar_server/test_diagnostics.py index 15d74ef9ef5..738fea1a45d 100644 --- a/tests/components/traccar_server/test_diagnostics.py +++ b/tests/components/traccar_server/test_diagnostics.py @@ -1,9 +1,9 @@ """Test Traccar Server diagnostics.""" +from collections.abc import Generator from unittest.mock import AsyncMock from syrupy import SnapshotAssertion -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er diff --git a/tests/components/trace/test_websocket_api.py b/tests/components/trace/test_websocket_api.py index 92ba2c67020..b0b982d4825 100644 --- a/tests/components/trace/test_websocket_api.py +++ b/tests/components/trace/test_websocket_api.py @@ -9,11 +9,11 @@ from unittest.mock import patch import pytest from pytest_unordered import unordered -from homeassistant.bootstrap import async_setup_component from homeassistant.components.trace.const import DEFAULT_STORED_TRACES from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import Context, CoreState, HomeAssistant, callback from homeassistant.helpers.typing import UNDEFINED +from homeassistant.setup import async_setup_component from homeassistant.util.uuid import random_uuid_hex from tests.common import load_fixture @@ -207,7 +207,7 @@ async def test_get_trace( _assert_raw_config(domain, sun_config, trace) assert trace["blueprint_inputs"] is None assert trace["context"] - assert trace["error"] == "Service test.automation not found" + assert trace["error"] == "Action test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == "error" assert trace["item_id"] == "sun" @@ -899,7 +899,7 @@ async def test_list_traces( assert len(_find_traces(response["result"], domain, "sun")) == 1 trace = _find_traces(response["result"], domain, "sun")[0] assert trace["last_step"] == last_step[0].format(prefix=prefix) - assert trace["error"] == "Service test.automation not found" + assert trace["error"] == "Action test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == script_execution[0] assert trace["timestamp"] @@ -1639,7 +1639,7 @@ async def test_trace_blueprint_automation( assert trace["config"]["id"] == "sun" assert trace["blueprint_inputs"] == sun_config assert trace["context"] - assert trace["error"] == "Service test.automation not found" + assert trace["error"] == "Action test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == "error" assert trace["item_id"] == "sun" diff --git a/tests/components/tractive/conftest.py b/tests/components/tractive/conftest.py index 9a17a557c49..7f319a87b5b 100644 --- a/tests/components/tractive/conftest.py +++ b/tests/components/tractive/conftest.py @@ -1,12 +1,12 @@ """Common fixtures for the Tractive tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiotractive.trackable_object import TrackableObject from aiotractive.tracker import Tracker import pytest -from typing_extensions import Generator from homeassistant.components.tractive.const import DOMAIN, SERVER_UNAVAILABLE from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/tractive/snapshots/test_binary_sensor.ambr b/tests/components/tractive/snapshots/test_binary_sensor.ambr index c6d50fb0fbb..4b610e927d5 100644 --- a/tests/components/tractive/snapshots/test_binary_sensor.ambr +++ b/tests/components/tractive/snapshots/test_binary_sensor.ambr @@ -46,50 +46,3 @@ 'state': 'on', }) # --- -# name: test_sensor[binary_sensor.test_pet_tracker_battery_charging-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_pet_tracker_battery_charging', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tracker battery charging', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker_battery_charging', - 'unique_id': 'pet_id_123_battery_charging', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[binary_sensor.test_pet_tracker_battery_charging-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery_charging', - 'friendly_name': 'Test Pet Tracker battery charging', - }), - 'context': , - 'entity_id': 'binary_sensor.test_pet_tracker_battery_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/tractive/snapshots/test_device_tracker.ambr b/tests/components/tractive/snapshots/test_device_tracker.ambr index 3a145a48b5a..4e7c5bfe173 100644 --- a/tests/components/tractive/snapshots/test_device_tracker.ambr +++ b/tests/components/tractive/snapshots/test_device_tracker.ambr @@ -50,54 +50,3 @@ 'state': 'not_home', }) # --- -# name: test_sensor[device_tracker.test_pet_tracker-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'device_tracker', - 'entity_category': , - 'entity_id': 'device_tracker.test_pet_tracker', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tracker', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker', - 'unique_id': 'pet_id_123', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[device_tracker.test_pet_tracker-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'battery_level': 88, - 'friendly_name': 'Test Pet Tracker', - 'gps_accuracy': 99, - 'latitude': 22.333, - 'longitude': 44.555, - 'source_type': , - }), - 'context': , - 'entity_id': 'device_tracker.test_pet_tracker', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_home', - }) -# --- diff --git a/tests/components/tractive/snapshots/test_switch.ambr b/tests/components/tractive/snapshots/test_switch.ambr index ea9ea9d9e48..08e0c984d0c 100644 --- a/tests/components/tractive/snapshots/test_switch.ambr +++ b/tests/components/tractive/snapshots/test_switch.ambr @@ -1,142 +1,4 @@ # serializer version: 1 -# name: test_sensor[switch.test_pet_live_tracking-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_pet_live_tracking', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Live tracking', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'live_tracking', - 'unique_id': 'pet_id_123_live_tracking', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.test_pet_live_tracking-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pet Live tracking', - }), - 'context': , - 'entity_id': 'switch.test_pet_live_tracking', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.test_pet_tracker_buzzer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_pet_tracker_buzzer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tracker buzzer', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker_buzzer', - 'unique_id': 'pet_id_123_buzzer', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.test_pet_tracker_buzzer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pet Tracker buzzer', - }), - 'context': , - 'entity_id': 'switch.test_pet_tracker_buzzer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.test_pet_tracker_led-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_pet_tracker_led', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tracker LED', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker_led', - 'unique_id': 'pet_id_123_led', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.test_pet_tracker_led-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pet Tracker LED', - }), - 'context': , - 'entity_id': 'switch.test_pet_tracker_led', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_switch[switch.test_pet_live_tracking-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tractive/test_diagnostics.py b/tests/components/tractive/test_diagnostics.py index cc4fcdeba15..ce07b4d6e2a 100644 --- a/tests/components/tractive/test_diagnostics.py +++ b/tests/components/tractive/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -27,4 +28,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/tradfri/conftest.py b/tests/components/tradfri/conftest.py index 08afe77b4a3..4b0b742850b 100644 --- a/tests/components/tradfri/conftest.py +++ b/tests/components/tradfri/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Generator import json from typing import Any from unittest.mock import AsyncMock, MagicMock, patch @@ -12,7 +12,6 @@ from pytradfri.command import Command from pytradfri.const import ATTR_FIRMWARE_VERSION, ATTR_GATEWAY_ID from pytradfri.device import Device from pytradfri.gateway import Gateway -from typing_extensions import Generator from homeassistant.components.tradfri.const import DOMAIN diff --git a/tests/components/tradfri/test_fan.py b/tests/components/tradfri/test_fan.py index 2abe03d629a..4f72e4709e9 100644 --- a/tests/components/tradfri/test_fan.py +++ b/tests/components/tradfri/test_fan.py @@ -52,7 +52,7 @@ async def test_fan_available( assert state.attributes[ATTR_PERCENTAGE_STEP] == pytest.approx(2.040816) assert state.attributes[ATTR_PRESET_MODES] == ["Auto"] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == 9 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 57 await command_store.trigger_observe_callback( hass, device, {ATTR_REACHABLE_STATE: 0} @@ -172,7 +172,7 @@ async def test_services( assert state.attributes[ATTR_PERCENTAGE_STEP] == pytest.approx(2.040816) assert state.attributes[ATTR_PRESET_MODES] == ["Auto"] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == 9 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 57 await hass.services.async_call( FAN_DOMAIN, diff --git a/tests/components/trafikverket_camera/conftest.py b/tests/components/trafikverket_camera/conftest.py index 61eebb623b2..cef85af2228 100644 --- a/tests/components/trafikverket_camera/conftest.py +++ b/tests/components/trafikverket_camera/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime from unittest.mock import patch import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.components.trafikverket_camera.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -21,7 +21,9 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(name="load_int") async def load_integration_from_entry( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, get_camera: CameraInfo + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + get_camera: CameraInfoModel, ) -> MockConfigEntry: """Set up the Trafikverket Camera integration in Home Assistant.""" aioclient_mock.get( @@ -51,10 +53,10 @@ async def load_integration_from_entry( @pytest.fixture(name="get_camera") -def fixture_get_camera() -> CameraInfo: +def fixture_get_camera() -> CameraInfoModel: """Construct Camera Mock.""" - return CameraInfo( + return CameraInfoModel( camera_name="Test Camera", camera_id="1234", active=True, @@ -72,10 +74,10 @@ def fixture_get_camera() -> CameraInfo: @pytest.fixture(name="get_camera2") -def fixture_get_camera2() -> CameraInfo: +def fixture_get_camera2() -> CameraInfoModel: """Construct Camera Mock 2.""" - return CameraInfo( + return CameraInfoModel( camera_name="Test Camera2", camera_id="5678", active=True, @@ -93,11 +95,11 @@ def fixture_get_camera2() -> CameraInfo: @pytest.fixture(name="get_cameras") -def fixture_get_cameras() -> CameraInfo: +def fixture_get_cameras() -> CameraInfoModel: """Construct Camera Mock with multiple cameras.""" return [ - CameraInfo( + CameraInfoModel( camera_name="Test Camera", camera_id="1234", active=True, @@ -112,7 +114,7 @@ def fixture_get_cameras() -> CameraInfo: status="Running", camera_type="Road", ), - CameraInfo( + CameraInfoModel( camera_name="Test Camera2", camera_id="5678", active=True, @@ -131,10 +133,10 @@ def fixture_get_cameras() -> CameraInfo: @pytest.fixture(name="get_camera_no_location") -def fixture_get_camera_no_location() -> CameraInfo: +def fixture_get_camera_no_location() -> CameraInfoModel: """Construct Camera Mock.""" - return CameraInfo( + return CameraInfoModel( camera_name="Test Camera", camera_id="1234", active=True, diff --git a/tests/components/trafikverket_camera/test_binary_sensor.py b/tests/components/trafikverket_camera/test_binary_sensor.py index 6c694f76233..6750c05772b 100644 --- a/tests/components/trafikverket_camera/test_binary_sensor.py +++ b/tests/components/trafikverket_camera/test_binary_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_ON @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, - get_camera: CameraInfo, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera binary sensor.""" diff --git a/tests/components/trafikverket_camera/test_camera.py b/tests/components/trafikverket_camera/test_camera.py index 1bf742b5f08..51d4563c19b 100644 --- a/tests/components/trafikverket_camera/test_camera.py +++ b/tests/components/trafikverket_camera/test_camera.py @@ -7,7 +7,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.components.camera import async_get_image from homeassistant.config_entries import ConfigEntry @@ -24,7 +24,7 @@ async def test_camera( freezer: FrozenDateTimeFactory, monkeypatch: pytest.MonkeyPatch, aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfo, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera sensor.""" state1 = hass.states.get("camera.test_camera") diff --git a/tests/components/trafikverket_camera/test_config_flow.py b/tests/components/trafikverket_camera/test_config_flow.py index 8162db076fa..2e9e34f4c35 100644 --- a/tests/components/trafikverket_camera/test_config_flow.py +++ b/tests/components/trafikverket_camera/test_config_flow.py @@ -6,7 +6,7 @@ from unittest.mock import patch import pytest from pytrafikverket.exceptions import InvalidAuthentication, NoCameraFound, UnknownError -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant import config_entries from homeassistant.components.trafikverket_camera.const import DOMAIN @@ -17,7 +17,7 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -async def test_form(hass: HomeAssistant, get_camera: CameraInfo) -> None: +async def test_form(hass: HomeAssistant, get_camera: CameraInfoModel) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -56,7 +56,9 @@ async def test_form(hass: HomeAssistant, get_camera: CameraInfo) -> None: async def test_form_multiple_cameras( - hass: HomeAssistant, get_cameras: list[CameraInfo], get_camera2: CameraInfo + hass: HomeAssistant, + get_cameras: list[CameraInfoModel], + get_camera2: CameraInfoModel, ) -> None: """Test we get the form with multiple cameras.""" @@ -108,7 +110,7 @@ async def test_form_multiple_cameras( async def test_form_no_location_data( - hass: HomeAssistant, get_camera_no_location: CameraInfo + hass: HomeAssistant, get_camera_no_location: CameraInfoModel ) -> None: """Test we get the form.""" diff --git a/tests/components/trafikverket_camera/test_coordinator.py b/tests/components/trafikverket_camera/test_coordinator.py index 3f37ad05575..f50ab56724e 100644 --- a/tests/components/trafikverket_camera/test_coordinator.py +++ b/tests/components/trafikverket_camera/test_coordinator.py @@ -11,9 +11,9 @@ from pytrafikverket.exceptions import ( NoCameraFound, UnknownError, ) +from pytrafikverket.models import CameraInfoModel from homeassistant.components.trafikverket_camera.const import DOMAIN -from homeassistant.components.trafikverket_camera.coordinator import CameraData from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -28,7 +28,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_coordinator( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - get_camera: CameraData, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera coordinator.""" aioclient_mock.get( @@ -86,7 +86,7 @@ async def test_coordinator( async def test_coordinator_failed_update( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - get_camera: CameraData, + get_camera: CameraInfoModel, sideeffect: str, p_error: Exception, entry_state: str, @@ -123,7 +123,7 @@ async def test_coordinator_failed_update( async def test_coordinator_failed_get_image( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - get_camera: CameraData, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera coordinator.""" aioclient_mock.get( diff --git a/tests/components/trafikverket_camera/test_init.py b/tests/components/trafikverket_camera/test_init.py index f21d36fda27..aaa4c3cfed7 100644 --- a/tests/components/trafikverket_camera/test_init.py +++ b/tests/components/trafikverket_camera/test_init.py @@ -7,7 +7,7 @@ from unittest.mock import patch import pytest from pytrafikverket.exceptions import UnknownError -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.components.trafikverket_camera import async_migrate_entry from homeassistant.components.trafikverket_camera.const import DOMAIN @@ -23,7 +23,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_setup_entry( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, ) -> None: """Test setup entry.""" @@ -55,7 +55,7 @@ async def test_setup_entry( async def test_unload_entry( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, ) -> None: """Test unload an entry.""" @@ -89,7 +89,7 @@ async def test_unload_entry( async def test_migrate_entry( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, ) -> None: """Test migrate entry to version 2.""" @@ -136,7 +136,7 @@ async def test_migrate_entry( ) async def test_migrate_entry_fails_with_error( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, version: int, unique_id: str, @@ -205,7 +205,7 @@ async def test_migrate_entry_fails_no_id( ) entry.add_to_hass(hass) - _camera = CameraInfo( + _camera = CameraInfoModel( camera_name="Test_camera", camera_id=None, active=True, @@ -236,7 +236,7 @@ async def test_migrate_entry_fails_no_id( async def test_no_migration_needed( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, ) -> None: """Test migrate entry fails, camera returns no id.""" diff --git a/tests/components/trafikverket_camera/test_recorder.py b/tests/components/trafikverket_camera/test_recorder.py index 23ebd3f2189..d9778ab851a 100644 --- a/tests/components/trafikverket_camera/test_recorder.py +++ b/tests/components/trafikverket_camera/test_recorder.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states @@ -22,7 +22,7 @@ async def test_exclude_attributes( load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfo, + get_camera: CameraInfoModel, ) -> None: """Test camera has description and location excluded from recording.""" state1 = hass.states.get("camera.test_camera") diff --git a/tests/components/trafikverket_camera/test_sensor.py b/tests/components/trafikverket_camera/test_sensor.py index 18ccbe56070..0f4ef02a850 100644 --- a/tests/components/trafikverket_camera/test_sensor.py +++ b/tests/components/trafikverket_camera/test_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, - get_camera: CameraInfo, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera sensor.""" diff --git a/tests/components/trafikverket_ferry/conftest.py b/tests/components/trafikverket_ferry/conftest.py index 3491b8474af..99f3ad10636 100644 --- a/tests/components/trafikverket_ferry/conftest.py +++ b/tests/components/trafikverket_ferry/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime, timedelta from unittest.mock import patch import pytest -from pytrafikverket.trafikverket_ferry import FerryStop +from pytrafikverket.models import FerryStopModel from homeassistant.components.trafikverket_ferry.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -20,7 +20,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="load_int") async def load_integration_from_entry( - hass: HomeAssistant, get_ferries: list[FerryStop] + hass: HomeAssistant, get_ferries: list[FerryStopModel] ) -> MockConfigEntry: """Set up the Trafikverket Ferry integration in Home Assistant.""" config_entry = MockConfigEntry( @@ -44,40 +44,51 @@ async def load_integration_from_entry( @pytest.fixture(name="get_ferries") -def fixture_get_ferries() -> list[FerryStop]: +def fixture_get_ferries() -> list[FerryStopModel]: """Construct FerryStop Mock.""" - depart1 = FerryStop( - "13", - False, - datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC), - [""], - "0", - datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), - "Harbor 1", - "Harbor 2", + depart1 = FerryStopModel( + ferry_stop_id="13", + ferry_stop_name="Harbor1lane", + short_name="Harle", + deleted=False, + departure_time=datetime( + dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC + ), + other_information=[""], + deviation_id="0", + modified_time=datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), + from_harbor_name="Harbor 1", + to_harbor_name="Harbor 2", + type_name="Turnaround", ) - depart2 = FerryStop( - "14", - False, - datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + depart2 = FerryStopModel( + ferry_stop_id="14", + ferry_stop_name="Harbor1lane", + short_name="Harle", + deleted=False, + departure_time=datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=15), - [""], - "0", - datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), - "Harbor 1", - "Harbor 2", + other_information=[""], + deviation_id="0", + modified_time=datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), + from_harbor_name="Harbor 1", + to_harbor_name="Harbor 2", + type_name="Turnaround", ) - depart3 = FerryStop( - "15", - False, - datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + depart3 = FerryStopModel( + ferry_stop_id="15", + ferry_stop_name="Harbor1lane", + short_name="Harle", + deleted=False, + departure_time=datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=30), - [""], - "0", - datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), - "Harbor 1", - "Harbor 2", + other_information=[""], + deviation_id="0", + modified_time=datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), + from_harbor_name="Harbor 1", + to_harbor_name="Harbor 2", + type_name="Turnaround", ) return [depart1, depart2, depart3] diff --git a/tests/components/trafikverket_ferry/test_coordinator.py b/tests/components/trafikverket_ferry/test_coordinator.py index ef6329bfd82..ae9a8fc3626 100644 --- a/tests/components/trafikverket_ferry/test_coordinator.py +++ b/tests/components/trafikverket_ferry/test_coordinator.py @@ -8,7 +8,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest from pytrafikverket.exceptions import InvalidAuthentication, NoFerryFound -from pytrafikverket.trafikverket_ferry import FerryStop +from pytrafikverket.models import FerryStopModel from homeassistant.components.trafikverket_ferry.const import DOMAIN from homeassistant.components.trafikverket_ferry.coordinator import next_departuredate @@ -27,7 +27,7 @@ async def test_coordinator( hass: HomeAssistant, freezer: FrozenDateTimeFactory, monkeypatch: pytest.MonkeyPatch, - get_ferries: list[FerryStop], + get_ferries: list[FerryStopModel], ) -> None: """Test the Trafikverket Ferry coordinator.""" entry = MockConfigEntry( diff --git a/tests/components/trafikverket_ferry/test_init.py b/tests/components/trafikverket_ferry/test_init.py index 22ada7e0f40..827711363ff 100644 --- a/tests/components/trafikverket_ferry/test_init.py +++ b/tests/components/trafikverket_ferry/test_init.py @@ -4,7 +4,7 @@ from __future__ import annotations from unittest.mock import patch -from pytrafikverket.trafikverket_ferry import FerryStop +from pytrafikverket.models import FerryStopModel from homeassistant.components.trafikverket_ferry.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntryState @@ -15,7 +15,9 @@ from . import ENTRY_CONFIG from tests.common import MockConfigEntry -async def test_setup_entry(hass: HomeAssistant, get_ferries: list[FerryStop]) -> None: +async def test_setup_entry( + hass: HomeAssistant, get_ferries: list[FerryStopModel] +) -> None: """Test setup entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -37,7 +39,9 @@ async def test_setup_entry(hass: HomeAssistant, get_ferries: list[FerryStop]) -> assert len(mock_tvt_ferry.mock_calls) == 1 -async def test_unload_entry(hass: HomeAssistant, get_ferries: list[FerryStop]) -> None: +async def test_unload_entry( + hass: HomeAssistant, get_ferries: list[FerryStopModel] +) -> None: """Test unload an entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/trafikverket_ferry/test_sensor.py b/tests/components/trafikverket_ferry/test_sensor.py index fc8fa557714..bc5510b0b1d 100644 --- a/tests/components/trafikverket_ferry/test_sensor.py +++ b/tests/components/trafikverket_ferry/test_sensor.py @@ -6,7 +6,7 @@ from datetime import timedelta from unittest.mock import patch import pytest -from pytrafikverket.trafikverket_ferry import FerryStop +from pytrafikverket.models import FerryStopModel from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -19,7 +19,7 @@ async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, - get_ferries: list[FerryStop], + get_ferries: list[FerryStopModel], ) -> None: """Test the Trafikverket Ferry sensor.""" state1 = hass.states.get("sensor.harbor1_departure_from") diff --git a/tests/components/trafikverket_train/conftest.py b/tests/components/trafikverket_train/conftest.py index 7221d96bae2..14671d27252 100644 --- a/tests/components/trafikverket_train/conftest.py +++ b/tests/components/trafikverket_train/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime, timedelta from unittest.mock import patch import pytest -from pytrafikverket.trafikverket_train import TrainStop +from pytrafikverket.models import TrainStopModel from homeassistant.components.trafikverket_train.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -21,8 +21,8 @@ from tests.common import MockConfigEntry @pytest.fixture(name="load_int") async def load_integration_from_entry( hass: HomeAssistant, - get_trains: list[TrainStop], - get_train_stop: TrainStop, + get_trains: list[TrainStopModel], + get_train_stop: TrainStopModel, ) -> MockConfigEntry: """Set up the Trafikverket Train integration in Home Assistant.""" @@ -38,7 +38,7 @@ async def load_integration_from_entry( return_value=get_train_stop, ), patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), ): await hass.config_entries.async_setup(config_entry_id) @@ -69,11 +69,11 @@ async def load_integration_from_entry( @pytest.fixture(name="get_trains") -def fixture_get_trains() -> list[TrainStop]: +def fixture_get_trains() -> list[TrainStopModel]: """Construct TrainStop Mock.""" - depart1 = TrainStop( - id=13, + depart1 = TrainStopModel( + train_stop_id=13, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), estimated_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), @@ -83,8 +83,8 @@ def fixture_get_trains() -> list[TrainStop]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart2 = TrainStop( - id=14, + depart2 = TrainStopModel( + train_stop_id=14, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=15), @@ -95,8 +95,8 @@ def fixture_get_trains() -> list[TrainStop]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart3 = TrainStop( - id=15, + depart3 = TrainStopModel( + train_stop_id=15, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=30), @@ -112,11 +112,11 @@ def fixture_get_trains() -> list[TrainStop]: @pytest.fixture(name="get_trains_next") -def fixture_get_trains_next() -> list[TrainStop]: +def fixture_get_trains_next() -> list[TrainStopModel]: """Construct TrainStop Mock.""" - depart1 = TrainStop( - id=13, + depart1 = TrainStopModel( + train_stop_id=13, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC), estimated_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC), @@ -126,8 +126,8 @@ def fixture_get_trains_next() -> list[TrainStop]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart2 = TrainStop( - id=14, + depart2 = TrainStopModel( + train_stop_id=14, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC) + timedelta(minutes=15), @@ -138,8 +138,8 @@ def fixture_get_trains_next() -> list[TrainStop]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart3 = TrainStop( - id=15, + depart3 = TrainStopModel( + train_stop_id=15, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC) + timedelta(minutes=30), @@ -155,11 +155,11 @@ def fixture_get_trains_next() -> list[TrainStop]: @pytest.fixture(name="get_train_stop") -def fixture_get_train_stop() -> TrainStop: +def fixture_get_train_stop() -> TrainStopModel: """Construct TrainStop Mock.""" - return TrainStop( - id=13, + return TrainStopModel( + train_stop_id=13, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 11, 0, tzinfo=dt_util.UTC), estimated_time_at_location=None, diff --git a/tests/components/trafikverket_train/test_config_flow.py b/tests/components/trafikverket_train/test_config_flow.py index a6ba82a85bc..83cc5a89016 100644 --- a/tests/components/trafikverket_train/test_config_flow.py +++ b/tests/components/trafikverket_train/test_config_flow.py @@ -12,7 +12,7 @@ from pytrafikverket.exceptions import ( NoTrainStationFound, UnknownError, ) -from pytrafikverket.trafikverket_train import TrainStop +from pytrafikverket.models import TrainStopModel from homeassistant import config_entries from homeassistant.components.trafikverket_train.const import ( @@ -479,8 +479,8 @@ async def test_reauth_flow_error_departures( async def test_options_flow( hass: HomeAssistant, - get_trains: list[TrainStop], - get_train_stop: TrainStop, + get_trains: list[TrainStopModel], + get_train_stop: TrainStopModel, ) -> None: """Test a reauthentication flow.""" entry = MockConfigEntry( @@ -499,7 +499,7 @@ async def test_options_flow( with ( patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", diff --git a/tests/components/trafikverket_train/test_init.py b/tests/components/trafikverket_train/test_init.py index 329d8d716d0..c8fea174e83 100644 --- a/tests/components/trafikverket_train/test_init.py +++ b/tests/components/trafikverket_train/test_init.py @@ -5,7 +5,7 @@ from __future__ import annotations from unittest.mock import patch from pytrafikverket.exceptions import InvalidAuthentication, NoTrainStationFound -from pytrafikverket.trafikverket_train import TrainStop +from pytrafikverket.models import TrainStopModel from syrupy.assertion import SnapshotAssertion from homeassistant.components.trafikverket_train.const import DOMAIN @@ -18,7 +18,9 @@ from . import ENTRY_CONFIG, OPTIONS_CONFIG from tests.common import MockConfigEntry -async def test_unload_entry(hass: HomeAssistant, get_trains: list[TrainStop]) -> None: +async def test_unload_entry( + hass: HomeAssistant, get_trains: list[TrainStopModel] +) -> None: """Test unload an entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -32,7 +34,7 @@ async def test_unload_entry(hass: HomeAssistant, get_trains: list[TrainStop]) -> with ( patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", @@ -52,7 +54,7 @@ async def test_unload_entry(hass: HomeAssistant, get_trains: list[TrainStop]) -> async def test_auth_failed( hass: HomeAssistant, - get_trains: list[TrainStop], + get_trains: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test authentication failed.""" @@ -67,7 +69,7 @@ async def test_auth_failed( entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", side_effect=InvalidAuthentication, ): await hass.config_entries.async_setup(entry.entry_id) @@ -82,7 +84,7 @@ async def test_auth_failed( async def test_no_stations( hass: HomeAssistant, - get_trains: list[TrainStop], + get_trains: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test stations are missing.""" @@ -97,7 +99,7 @@ async def test_no_stations( entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", side_effect=NoTrainStationFound, ): await hass.config_entries.async_setup(entry.entry_id) @@ -108,7 +110,7 @@ async def test_no_stations( async def test_migrate_entity_unique_id( hass: HomeAssistant, - get_trains: list[TrainStop], + get_trains: list[TrainStopModel], snapshot: SnapshotAssertion, entity_registry: EntityRegistry, ) -> None: @@ -133,7 +135,7 @@ async def test_migrate_entity_unique_id( with ( patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", diff --git a/tests/components/trafikverket_train/test_sensor.py b/tests/components/trafikverket_train/test_sensor.py index f21561dd287..f4da3526cb2 100644 --- a/tests/components/trafikverket_train/test_sensor.py +++ b/tests/components/trafikverket_train/test_sensor.py @@ -8,7 +8,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest from pytrafikverket.exceptions import InvalidAuthentication, NoTrainAnnouncementFound -from pytrafikverket.trafikverket_train import TrainStop +from pytrafikverket.models import TrainStopModel from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry @@ -23,8 +23,8 @@ async def test_sensor_next( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], - get_train_stop: TrainStop, + get_trains_next: list[TrainStopModel], + get_train_stop: TrainStopModel, snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor.""" @@ -70,7 +70,7 @@ async def test_sensor_single_stop( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], + get_trains_next: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor.""" @@ -86,7 +86,7 @@ async def test_sensor_update_auth_failure( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], + get_trains_next: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor with authentication update failure.""" @@ -119,7 +119,7 @@ async def test_sensor_update_failure( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], + get_trains_next: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor with update failure.""" @@ -149,7 +149,7 @@ async def test_sensor_update_failure_no_state( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], + get_trains_next: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor with update failure from empty state.""" diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index e1d9d973f25..1331f441940 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -2,20 +2,20 @@ from __future__ import annotations +from collections.abc import Generator from http import HTTPStatus from pathlib import Path from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.tts import ( CONF_LANG, DOMAIN as TTS_DOMAIN, - PLATFORM_SCHEMA, + PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA, Provider, TextToSpeechEntity, TtsAudioType, @@ -184,7 +184,7 @@ class MockTTSEntity(BaseProvider, TextToSpeechEntity): class MockTTS(MockPlatform): """A mock TTS platform.""" - PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( + PLATFORM_SCHEMA = TTS_PLATFORM_SCHEMA.extend( {vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES)} ) diff --git a/tests/components/tts/conftest.py b/tests/components/tts/conftest.py index b8abb086260..d9a4499f544 100644 --- a/tests/components/tts/conftest.py +++ b/tests/components/tts/conftest.py @@ -3,11 +3,11 @@ From http://doc.pytest.org/en/latest/example/simple.html#making-test-result-information-available-in-fixtures """ +from collections.abc import Generator from pathlib import Path from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigFlow diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index e0354170b06..7a54ecc26b0 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -47,15 +47,8 @@ ORIG_WRITE_TAGS = tts.SpeechManager.write_tags class DefaultEntity(tts.TextToSpeechEntity): """Test entity.""" - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return SUPPORT_LANGUAGES - - @property - def default_language(self) -> str: - """Return the default language.""" - return DEFAULT_LANG + _attr_supported_languages = SUPPORT_LANGUAGES + _attr_default_language = DEFAULT_LANG async def test_default_entity_attributes() -> None: @@ -523,10 +516,7 @@ class MockProviderWithDefaults(MockProvider): class MockEntityWithDefaults(MockTTSEntity): """Mock entity with default options.""" - @property - def default_options(self): - """Return a mapping with the default options.""" - return {"voice": "alex"} + _attr_default_options = {"voice": "alex"} @pytest.mark.parametrize( @@ -1026,7 +1016,7 @@ class MockProviderBoom(MockProvider): ) -> tts.TtsAudioType: """Load TTS dat.""" # This should not be called, data should be fetched from cache - raise Exception("Boom!") # pylint: disable=broad-exception-raised + raise Exception("Boom!") # noqa: TRY002 class MockEntityBoom(MockTTSEntity): @@ -1037,7 +1027,7 @@ class MockEntityBoom(MockTTSEntity): ) -> tts.TtsAudioType: """Load TTS dat.""" # This should not be called, data should be fetched from cache - raise Exception("Boom!") # pylint: disable=broad-exception-raised + raise Exception("Boom!") # noqa: TRY002 @pytest.mark.parametrize("mock_provider", [MockProviderBoom(DEFAULT_LANG)]) @@ -1054,9 +1044,7 @@ async def test_setup_legacy_cache_dir( mock_tts_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_test.mp3" ) - with open(cache_file, "wb") as voice_file: - voice_file.write(tts_data) - + await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) await mock_setup(hass, mock_provider) await hass.services.async_call( @@ -1090,9 +1078,7 @@ async def test_setup_cache_dir( "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" ) - with open(cache_file, "wb") as voice_file: - voice_file.write(tts_data) - + await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) await mock_config_entry_setup(hass, mock_tts_entity) await hass.services.async_call( @@ -1195,9 +1181,7 @@ async def test_load_cache_legacy_retrieve_without_mem_cache( mock_tts_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_test.mp3" ) - with open(cache_file, "wb") as voice_file: - voice_file.write(tts_data) - + await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) await mock_setup(hass, mock_provider) client = await hass_client() @@ -1221,9 +1205,7 @@ async def test_load_cache_retrieve_without_mem_cache( "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" ) - with open(cache_file, "wb") as voice_file: - voice_file.write(tts_data) - + await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) await mock_config_entry_setup(hass, mock_tts_entity) client = await hass_client() @@ -1766,3 +1748,93 @@ async def test_async_convert_audio_error(hass: HomeAssistant) -> None: with pytest.raises(RuntimeError): # Simulate a bad WAV file await tts.async_convert_audio(hass, "wav", bytes(0), "mp3") + + +async def test_ttsentity_subclass_properties( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test for errors when subclasses of the TextToSpeechEntity are missing required properties.""" + + class TestClass1(tts.TextToSpeechEntity): + _attr_default_language = DEFAULT_LANG + _attr_supported_languages = SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass1()) + + class TestClass2(tts.TextToSpeechEntity): + @property + def default_language(self) -> str: + return DEFAULT_LANG + + @property + def supported_languages(self) -> list[str]: + return SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass2()) + + assert all(record.exc_info is None for record in caplog.records) + + caplog.clear() + + class TestClass3(tts.TextToSpeechEntity): + _attr_default_language = DEFAULT_LANG + + await mock_config_entry_setup(hass, TestClass3()) + + assert ( + "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) + caplog.clear() + + class TestClass4(tts.TextToSpeechEntity): + _attr_supported_languages = SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass4()) + + assert ( + "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) + caplog.clear() + + class TestClass5(tts.TextToSpeechEntity): + @property + def default_language(self) -> str: + return DEFAULT_LANG + + await mock_config_entry_setup(hass, TestClass5()) + + assert ( + "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) + caplog.clear() + + class TestClass6(tts.TextToSpeechEntity): + @property + def supported_languages(self) -> list[str]: + return SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass6()) + + assert ( + "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) diff --git a/tests/components/tts/test_legacy.py b/tests/components/tts/test_legacy.py index 05bb6dec10f..0d7f99e8cd1 100644 --- a/tests/components/tts/test_legacy.py +++ b/tests/components/tts/test_legacy.py @@ -123,7 +123,7 @@ async def test_platform_setup_with_error( discovery_info: DiscoveryInfoType | None = None, ) -> Provider: """Raise exception during platform setup.""" - raise Exception("Setup error") # pylint: disable=broad-exception-raised + raise Exception("Setup error") # noqa: TRY002 mock_integration(hass, MockModule(domain="bad_tts")) mock_platform(hass, "bad_tts.tts", BadPlatform(mock_provider)) diff --git a/tests/components/tuya/conftest.py b/tests/components/tuya/conftest.py index 981e12ecceb..4fffb3ae389 100644 --- a/tests/components/tuya/conftest.py +++ b/tests/components/tuya/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from unittest.mock import AsyncMock, MagicMock, patch +from collections.abc import Generator +from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.tuya.const import CONF_APP_TYPE, CONF_USER_CODE, DOMAIN @@ -35,7 +35,7 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[None]: """Mock setting up a config entry.""" with patch("homeassistant.components.tuya.async_setup_entry", return_value=True): yield diff --git a/tests/components/twentemilieu/conftest.py b/tests/components/twentemilieu/conftest.py index 7b157572824..7ecf1657ce9 100644 --- a/tests/components/twentemilieu/conftest.py +++ b/tests/components/twentemilieu/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from datetime import date from unittest.mock import MagicMock, patch import pytest from twentemilieu import WasteType -from typing_extensions import Generator from homeassistant.components.twentemilieu.const import ( CONF_HOUSE_LETTER, diff --git a/tests/components/twentemilieu/snapshots/test_calendar.ambr b/tests/components/twentemilieu/snapshots/test_calendar.ambr index 78b2d56afca..1df4beb4232 100644 --- a/tests/components/twentemilieu/snapshots/test_calendar.ambr +++ b/tests/components/twentemilieu/snapshots/test_calendar.ambr @@ -99,8 +99,10 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/twentemilieu/snapshots/test_sensor.ambr b/tests/components/twentemilieu/snapshots/test_sensor.ambr index a0f3b75da57..86ffc171082 100644 --- a/tests/components/twentemilieu/snapshots/test_sensor.ambr +++ b/tests/components/twentemilieu/snapshots/test_sensor.ambr @@ -68,8 +68,10 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -145,8 +147,10 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -222,8 +226,10 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -299,8 +305,10 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -376,8 +384,10 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/twinkly/test_diagnostics.py b/tests/components/twinkly/test_diagnostics.py index 5cb9fc1fe9e..f9cf0bc562c 100644 --- a/tests/components/twinkly/test_diagnostics.py +++ b/tests/components/twinkly/test_diagnostics.py @@ -3,6 +3,7 @@ from collections.abc import Awaitable, Callable from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -26,4 +27,6 @@ async def test_diagnostics( await setup_integration() entry = hass.config_entries.async_entries(DOMAIN)[0] - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/twitch/__init__.py b/tests/components/twitch/__init__.py index 0238bbdadba..2d70aaf9649 100644 --- a/tests/components/twitch/__init__.py +++ b/tests/components/twitch/__init__.py @@ -1,10 +1,9 @@ """Tests for the Twitch component.""" -from collections.abc import AsyncIterator +from collections.abc import AsyncGenerator, AsyncIterator from typing import Any, Generic, TypeVar from twitchAPI.object.base import TwitchObject -from typing_extensions import AsyncGenerator from homeassistant.components.twitch import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/twitch/conftest.py b/tests/components/twitch/conftest.py index 6c243a8dbbf..25e443c2778 100644 --- a/tests/components/twitch/conftest.py +++ b/tests/components/twitch/conftest.py @@ -1,11 +1,11 @@ """Configure tests for the Twitch integration.""" +from collections.abc import Generator import time from unittest.mock import AsyncMock, patch import pytest from twitchAPI.object.api import FollowedChannel, Stream, TwitchUser, UserSubscription -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/twitch/fixtures/get_streams.json b/tests/components/twitch/fixtures/get_streams.json index 3714d97aaef..53330c9c82e 100644 --- a/tests/components/twitch/fixtures/get_streams.json +++ b/tests/components/twitch/fixtures/get_streams.json @@ -2,6 +2,7 @@ { "game_name": "Good game", "title": "Title", - "thumbnail_url": "stream-medium.png" + "thumbnail_url": "stream-medium.png", + "started_at": "2021-03-10T03:18:11Z" } ] diff --git a/tests/components/twitch/test_sensor.py b/tests/components/twitch/test_sensor.py index e5cddf8e192..8ce146adf07 100644 --- a/tests/components/twitch/test_sensor.py +++ b/tests/components/twitch/test_sensor.py @@ -3,6 +3,7 @@ from datetime import datetime from unittest.mock import AsyncMock +from dateutil.tz import tzutc from twitchAPI.object.api import FollowedChannel, Stream, UserSubscription from twitchAPI.type import TwitchResourceNotFound @@ -41,6 +42,9 @@ async def test_streaming( assert sensor_state.attributes["entity_picture"] == "stream-medium.png" assert sensor_state.attributes["game"] == "Good game" assert sensor_state.attributes["title"] == "Title" + assert sensor_state.attributes["started_at"] == datetime( + year=2021, month=3, day=10, hour=3, minute=18, second=11, tzinfo=tzutc() + ) async def test_oauth_without_sub_and_follow( diff --git a/tests/components/ukraine_alarm/test_config_flow.py b/tests/components/ukraine_alarm/test_config_flow.py index 58b5dde2bac..de9bdd618de 100644 --- a/tests/components/ukraine_alarm/test_config_flow.py +++ b/tests/components/ukraine_alarm/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Ukraine Alarm config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from aiohttp import ClientConnectionError, ClientError, ClientResponseError, RequestInfo import pytest -from typing_extensions import Generator from yarl import URL from homeassistant import config_entries diff --git a/tests/components/unifi/conftest.py b/tests/components/unifi/conftest.py index 4a7d86eea38..798b613b18d 100644 --- a/tests/components/unifi/conftest.py +++ b/tests/components/unifi/conftest.py @@ -3,21 +3,19 @@ from __future__ import annotations import asyncio -from collections.abc import Callable +from collections.abc import Callable, Coroutine, Generator from datetime import timedelta from types import MappingProxyType -from typing import Any +from typing import Any, Protocol from unittest.mock import AsyncMock, patch from aiounifi.models.message import MessageKey import orjson import pytest -from typing_extensions import Generator from homeassistant.components.unifi import STORAGE_KEY, STORAGE_VERSION from homeassistant.components.unifi.const import CONF_SITE_ID, DOMAIN as UNIFI_DOMAIN from homeassistant.components.unifi.hub.websocket import RETRY_TIMER -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -53,6 +51,20 @@ CONTROLLER_HOST = { "uptime": 1562600160, } +type ConfigEntryFactoryType = Callable[[], Coroutine[Any, Any, MockConfigEntry]] + + +class WebsocketMessageMock(Protocol): + """Fixture to mock websocket message.""" + + def __call__( + self, + *, + message: MessageKey | None = None, + data: list[dict[str, Any]] | dict[str, Any] | None = None, + ) -> None: + """Send websocket message.""" + @pytest.fixture(autouse=True, name="mock_discovery") def fixture_discovery(): @@ -97,7 +109,7 @@ def fixture_config_entry( hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any], config_entry_options: MappingProxyType[str, Any], -) -> ConfigEntry: +) -> MockConfigEntry: """Define a config entry fixture.""" config_entry = MockConfigEntry( domain=UNIFI_DOMAIN, @@ -161,6 +173,7 @@ def fixture_request( dpi_app_payload: list[dict[str, Any]], dpi_group_payload: list[dict[str, Any]], port_forward_payload: list[dict[str, Any]], + traffic_rule_payload: list[dict[str, Any]], site_payload: list[dict[str, Any]], system_information_payload: list[dict[str, Any]], wlan_payload: list[dict[str, Any]], @@ -171,9 +184,16 @@ def fixture_request( url = f"https://{host}:{DEFAULT_PORT}" def mock_get_request(path: str, payload: list[dict[str, Any]]) -> None: + # APIV2 request respoonses have `meta` and `data` automatically appended + json = {} + if path.startswith("/v2"): + json = payload + else: + json = {"meta": {"rc": "OK"}, "data": payload} + aioclient_mock.get( f"{url}{path}", - json={"meta": {"rc": "OK"}, "data": payload}, + json=json, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -183,6 +203,7 @@ def fixture_request( json={"data": "login successful", "meta": {"rc": "ok"}}, headers={"content-type": CONTENT_TYPE_JSON}, ) + mock_get_request("/api/self/sites", site_payload) mock_get_request(f"/api/s/{site_id}/stat/sta", client_payload) mock_get_request(f"/api/s/{site_id}/rest/user", clients_all_payload) @@ -192,6 +213,7 @@ def fixture_request( mock_get_request(f"/api/s/{site_id}/rest/portforward", port_forward_payload) mock_get_request(f"/api/s/{site_id}/stat/sysinfo", system_information_payload) mock_get_request(f"/api/s/{site_id}/rest/wlanconf", wlan_payload) + mock_get_request(f"/v2/api/site/{site_id}/trafficrules", traffic_rule_payload) return __mock_requests @@ -263,6 +285,12 @@ def fixture_system_information_data() -> list[dict[str, Any]]: ] +@pytest.fixture(name="traffic_rule_payload") +def traffic_rule_payload_data() -> list[dict[str, Any]]: + """Traffic rule data.""" + return [] + + @pytest.fixture(name="wlan_payload") def fixture_wlan_data() -> list[dict[str, Any]]: """WLAN data.""" @@ -280,12 +308,12 @@ def fixture_default_requests( @pytest.fixture(name="config_entry_factory") async def fixture_config_entry_factory( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: MockConfigEntry, mock_requests: Callable[[str, str], None], -) -> Callable[[], ConfigEntry]: +) -> ConfigEntryFactoryType: """Fixture factory that can set up UniFi network integration.""" - async def __mock_setup_config_entry() -> ConfigEntry: + async def __mock_setup_config_entry() -> MockConfigEntry: mock_requests(config_entry.data[CONF_HOST], config_entry.data[CONF_SITE_ID]) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -296,8 +324,8 @@ async def fixture_config_entry_factory( @pytest.fixture(name="config_entry_setup") async def fixture_config_entry_setup( - hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] -) -> ConfigEntry: + config_entry_factory: ConfigEntryFactoryType, +) -> MockConfigEntry: """Fixture providing a set up instance of UniFi network integration.""" return await config_entry_factory() @@ -367,13 +395,15 @@ def fixture_aiounifi_websocket_state( @pytest.fixture(name="mock_websocket_message") -def fixture_aiounifi_websocket_message(_mock_websocket: AsyncMock): +def fixture_aiounifi_websocket_message( + _mock_websocket: AsyncMock, +) -> WebsocketMessageMock: """No real websocket allowed.""" def make_websocket_call( *, message: MessageKey | None = None, - data: list[dict] | dict | None = None, + data: list[dict[str, Any]] | dict[str, Any] | None = None, ) -> None: """Generate a websocket call.""" message_handler = _mock_websocket.call_args[0][0] diff --git a/tests/components/unifi/snapshots/test_button.ambr b/tests/components/unifi/snapshots/test_button.ambr new file mode 100644 index 00000000000..de305aee7eb --- /dev/null +++ b/tests/components/unifi/snapshots/test_button.ambr @@ -0,0 +1,142 @@ +# serializer version: 1 +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.ssid_1_regenerate_password-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.ssid_1_regenerate_password', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Regenerate Password', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'regenerate_password-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.ssid_1_regenerate_password-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'update', + 'friendly_name': 'SSID 1 Regenerate Password', + }), + 'context': , + 'entity_id': 'button.ssid_1_regenerate_password', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_port_1_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.switch_port_1_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 1 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-00:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_port_1_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'switch Port 1 Power Cycle', + }), + 'context': , + 'entity_id': 'button.switch_port_1_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.switch_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_restart-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'switch Restart', + }), + 'context': , + 'entity_id': 'button.switch_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/unifi/snapshots/test_device_tracker.ambr b/tests/components/unifi/snapshots/test_device_tracker.ambr new file mode 100644 index 00000000000..3debd512050 --- /dev/null +++ b/tests/components/unifi/snapshots/test_device_tracker.ambr @@ -0,0 +1,149 @@ +# serializer version: 1 +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.switch_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.switch_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.switch_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch 1', + 'ip': '10.0.1.1', + 'mac': '00:00:00:00:01:01', + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.switch_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'home', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.wd_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.wd_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'wd_client_1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'site_id-00:00:00:00:00:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.wd_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'wd_client_1', + 'host_name': 'wd_client_1', + 'mac': '00:00:00:00:00:02', + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.wd_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.ws_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.ws_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'ws_client_1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'site_id-00:00:00:00:00:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.ws_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'ws_client_1', + 'host_name': 'ws_client_1', + 'ip': '10.0.0.1', + 'mac': '00:00:00:00:00:01', + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.ws_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- diff --git a/tests/components/unifi/snapshots/test_image.ambr b/tests/components/unifi/snapshots/test_image.ambr index 83d76688ea3..0922320ed4d 100644 --- a/tests/components/unifi/snapshots/test_image.ambr +++ b/tests/components/unifi/snapshots/test_image.ambr @@ -1,9 +1,51 @@ # serializer version: 1 -# name: test_wlan_qr_code - b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x84\x00\x00\x00\x84\x01\x00\x00\x00\x00y?\xbe\n\x00\x00\x00\xcaIDATx\xda\xedV[\n\xc30\x0c\x13\xbb\x80\xef\x7fK\xdd\xc0\x93\x94\xfd\xac\x1fcL\xfbl(\xc4\x04*\xacG\xdcb/\x8b\xb8O\xdeO\x00\xccP\x95\x8b\xe5\x03\xd7\xf5\xcd\x89pF\xcf\x8c \\48\x08\nS\x948\x03p\xfe\x80C\xa8\x9d\x16\xc7P\xabvJ}\xe2\xd7\x84[\xe5W\xfc7\xbbS\xfd\xde\xcfB\xf115\xa2\xe3%\x99\xad\x93\xa0:\xbf6\xbeS\xec\x1a^\xb4\xed\xfb\xb2\xab\xd1\x99\xc9\xcdAjx\x89\x0e\xc5\xea\xf4T\xf9\xee\xe40m58\xb6<\x1b\xab~\xf4\xban\xd7:\xceu\x9e\x05\xc4I\xa6\xbb\xfb%q<7:\xbf\xa2\x90wo\xf5, + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'image', + 'entity_category': , + 'entity_id': 'image.ssid_1_qr_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'QR Code', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'qr_code-012345678910111213141516', + 'unit_of_measurement': None, + }) # --- -# name: test_wlan_qr_code.1 - b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x94\x00\x00\x00\x94\x01\x00\x00\x00\x00]G=y\x00\x00\x00\xfdIDATx\xda\xedV1\x8e\x041\x0cB\xf7\x01\xff\xff\x97\xfc\xc0\x0bd\xb6\xda\xe6\xeeB\xb9V\xa4dR \xc7`<\xd8\x8f \xbew\x7f\xb9\x030\x98!\xb5\xe9\xb8\xfc\xc1g\xfc\xf6Nx\xa3%\x9c\x84\xbf\xae\xf1\x84\xb5 \xe796\xf0\\\npjx~1[xZ\\\xbfy+\xf5\xc3\x9b\x8c\xe9\xf0\xeb\xd0k]\xbe\xa3\xa1\xeb\xfaI\x850\xa2Ex\x9f\x1f-\xeb\xe46!\xba\xc0G\x18\xde\xb0|\x8f\x07e8\xca\xd0\xc0,\xd4/\xed&PA\x1a\xf5\xbe~R2m\x07\x8fa\\\xe3\x9d\xc4DnG\x7f\xb0F&\xc4L\xa3~J\xcciy\xdfF\xff\x9a`i\xda$w\xfcom\xcc\x02Kw\x14\xf4\xc2\xd3fn\xba-\xf0A&A\xe2\x0c\x92\x8e\xbfL<\xcb.\xd8\xf1?0~o\xc14\xfcy\xdc\xc48\xa6\xd0\x98\x1f\x99\xbd\xfb\xd0\xd3\x98o\xd1tFR\x07\x8f\xe95lo\xbeE\x88`\x8f\xdf\x8c`lE\x7f\xdf\xff\xc4\x7f\xde\xbd\x00\xfc\xb3\x80\x95k\x06#\x19\x00\x00\x00\x00IEND\xaeB`\x82' +# name: test_entity_and_device_data[site_payload0-wlan_payload0][image.ssid_1_qr_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1', + 'entity_picture': '/api/image_proxy/image.ssid_1_qr_code?token=1', + 'friendly_name': 'SSID 1 QR Code', + }), + 'context': , + 'entity_id': 'image.ssid_1_qr_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T01:01:00+00:00', + }) # --- # name: test_wlan_qr_code[wlan_payload0] b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x84\x00\x00\x00\x84\x01\x00\x00\x00\x00y?\xbe\n\x00\x00\x00\xcaIDATx\xda\xedV[\n\xc30\x0c\x13\xbb\x80\xef\x7fK\xdd\xc0\x93\x94\xfd\xac\x1fcL\xfbl(\xc4\x04*\xacG\xdcb/\x8b\xb8O\xdeO\x00\xccP\x95\x8b\xe5\x03\xd7\xf5\xcd\x89pF\xcf\x8c \\48\x08\nS\x948\x03p\xfe\x80C\xa8\x9d\x16\xc7P\xabvJ}\xe2\xd7\x84[\xe5W\xfc7\xbbS\xfd\xde\xcfB\xf115\xa2\xe3%\x99\xad\x93\xa0:\xbf6\xbeS\xec\x1a^\xb4\xed\xfb\xb2\xab\xd1\x99\xc9\xcdAjx\x89\x0e\xc5\xea\xf4T\xf9\xee\xe40m58\xb6<\x1b\xab~\xf4\xban\xd7:\xceu\x9e\x05\xc4I\xa6\xbb\xfb%q<7:\xbf\xa2\x90wo\xf5 -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].3 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].4 - '1234.0' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].3 - 'Wired client RX' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].6 - '1234.0' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0] - 'uptime-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].2 - 'timestamp' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].3 - 'Wired client Uptime' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].4 - None -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].5 - None -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].6 - '2020-09-14T14:41:45+00:00' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].3 - 'Wired client RX' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].6 - '1234.0' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0] - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].1 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].2 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].3 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].4 - '5678.0' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].3 - 'Wired client TX' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].6 - '5678.0' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].3 - 'Wired client TX' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].6 - '5678.0' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0] - 'uptime-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].2 - 'timestamp' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].3 - 'Wired client Uptime' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].4 - None -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].5 - None -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].6 - '2020-09-14T14:41:45+00:00' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:02' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].3 - 'Wireless client RX' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].6 - '2345.0' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].3 - 'Wireless client RX' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].6 - '2345.0' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:02' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].3 - 'Wireless client TX' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].6 - '6789.0' -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].3 - 'Wireless client TX' -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].6 - '6789.0' -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0] - 'uptime-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].2 - 'timestamp' -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].3 - 'Wireless client Uptime' -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].4 - None -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].5 - None -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].6 - '2021-01-01T01:00:00+00:00' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_clients-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_clients', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clients', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_clients-20:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_clients-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Clients', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.device_clients', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_state-20:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Device State', + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.device_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Connected', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_temperature-20:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Device Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_uptime-20:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Device Uptime', + }), + 'context': , + 'entity_id': 'sensor.device_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T01:00:00+00:00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_budget-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_budget', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC Power Budget', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ac_power_budget-01:02:03:04:05:ff', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_budget-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dummy USP-PDU-Pro AC Power Budget', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_budget', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1875.000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC Power Consumption', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ac_power_conumption-01:02:03:04:05:ff', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dummy USP-PDU-Pro AC Power Consumption', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '201.683', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_clients-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_clients', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clients', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_clients-01:02:03:04:05:ff', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_clients-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dummy USP-PDU-Pro Clients', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_clients', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_cpu_utilization-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_cpu_utilization', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU utilization', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cpu_utilization-01:02:03:04:05:ff', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_cpu_utilization-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dummy USP-PDU-Pro CPU utilization', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_cpu_utilization', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.4', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_memory_utilization-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_memory_utilization', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Memory utilization', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'memory_utilization-01:02:03:04:05:ff', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_memory_utilization-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dummy USP-PDU-Pro Memory utilization', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_memory_utilization', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.9', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_outlet_2_outlet_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_outlet_2_outlet_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2 Outlet Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet_power-01:02:03:04:05:ff_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_outlet_2_outlet_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2 Outlet Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_outlet_2_outlet_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '73.827', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_state-01:02:03:04:05:ff', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Dummy USP-PDU-Pro State', + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Connected', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_uptime-01:02:03:04:05:ff', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Dummy USP-PDU-Pro Uptime', + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020-12-18T05:36:58+00:00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_clients-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_clients', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clients', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_clients-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_clients-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'mock-name Clients', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_clients', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan2_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_cloudflare_wan2_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloudflare WAN2 latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cloudflare_wan2_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan2_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Cloudflare WAN2 latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_cloudflare_wan2_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_cloudflare_wan_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloudflare WAN latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cloudflare_wan_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Cloudflare WAN latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_cloudflare_wan_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan2_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_google_wan2_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Google WAN2 latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'google_wan2_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan2_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Google WAN2 latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_google_wan2_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_google_wan_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Google WAN latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'google_wan_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Google WAN latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_google_wan_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '53', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan2_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_microsoft_wan2_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Microsoft WAN2 latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'microsoft_wan2_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan2_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Microsoft WAN2 latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_microsoft_wan2_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_microsoft_wan_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Microsoft WAN latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'microsoft_wan_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Microsoft WAN latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_microsoft_wan_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_poe_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_1_poe_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 1 PoE Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe_power-10:00:00:00:01:01_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_poe_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'mock-name Port 1 PoE Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_1_poe_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.56', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_1_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 1 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 1 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_1_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_1_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 1 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 1 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_1_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_poe_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_2_poe_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 2 PoE Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe_power-10:00:00:00:01:01_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_poe_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'mock-name Port 2 PoE Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_2_poe_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.56', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_2_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 2 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 2 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_2_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_2_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 2 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 2 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_2_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_3_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 3 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 3 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_3_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_3_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 3 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 3 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_3_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_poe_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_4_poe_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 4 PoE Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe_power-10:00:00:00:01:01_4', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_poe_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'mock-name Port 4 PoE Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_4_poe_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_4_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 4 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_4', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 4 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_4_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_4_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 4 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_4', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 4 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_4_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_state-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'mock-name State', + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.mock_name_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Connected', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_uptime-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'mock-name Uptime', + }), + 'context': , + 'entity_id': 'sensor.mock_name_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan_clients-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SSID 1', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wired_client_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'rx-00:00:00:00:00:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wired client RX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wired_client_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1234.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wired_client_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'tx-00:00:00:00:00:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wired client TX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wired_client_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5678.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wired_client_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uptime-00:00:00:00:00:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Wired client Uptime', + }), + 'context': , + 'entity_id': 'sensor.wired_client_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020-09-14T14:41:45+00:00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wireless_client_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'rx-00:00:00:00:00:02', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wireless client RX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wireless_client_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2345.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wireless_client_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'tx-00:00:00:00:00:02', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wireless client TX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wireless_client_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6789.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wireless_client_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uptime-00:00:00:00:00:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Wireless client Uptime', + }), + 'context': , + 'entity_id': 'sensor.wireless_client_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T01:00:00+00:00', + }) # --- diff --git a/tests/components/unifi/snapshots/test_switch.ambr b/tests/components/unifi/snapshots/test_switch.ambr new file mode 100644 index 00000000000..04b15f329fd --- /dev/null +++ b/tests/components/unifi/snapshots/test_switch.ambr @@ -0,0 +1,2473 @@ +# serializer version: 1 +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_port_1_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 1 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Port 1 Power Cycle', + }), + 'context': , + 'entity_id': 'button.mock_name_port_1_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_port_2_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 2 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Port 2 Power Cycle', + }), + 'context': , + 'entity_id': 'button.mock_name_port_2_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_port_4_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 4 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Port 4 Power Cycle', + }), + 'context': , + 'entity_id': 'button.mock_name_port_4_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_restart-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Restart', + }), + 'context': , + 'entity_id': 'button.mock_name_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'block-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Block Client 1', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.block_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_plex', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload-network', + 'original_name': 'plex', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network plex', + 'icon': 'mdi:upload-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_plex', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_plex', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload-network', + 'original_name': 'plex', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network plex', + 'icon': 'mdi:upload-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_plex', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'block-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Block Client 1', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.block_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_plex', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload-network', + 'original_name': 'plex', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network plex', + 'icon': 'mdi:upload-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_plex', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_test_traffic_rule-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_test_traffic_rule', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:security-network', + 'original_name': 'Test Traffic Rule', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'traffic_rule-6452cd9b859d5b11aa002ea1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_test_traffic_rule-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network Test Traffic Rule', + 'icon': 'mdi:security-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_test_traffic_rule', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/unifi/snapshots/test_update.ambr b/tests/components/unifi/snapshots/test_update.ambr new file mode 100644 index 00000000000..99a403a8f21 --- /dev/null +++ b/tests/components/unifi/snapshots/test_update.ambr @@ -0,0 +1,229 @@ +# serializer version: 1 +# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.device_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'device_update-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', + 'friendly_name': 'Device 1', + 'in_progress': False, + 'installed_version': '4.0.42.10433', + 'latest_version': '4.3.17.11279', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.device_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.device_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'device_update-00:00:00:00:01:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', + 'friendly_name': 'Device 2', + 'in_progress': False, + 'installed_version': '4.0.42.10433', + 'latest_version': '4.0.42.10433', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.device_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.device_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'device_update-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', + 'friendly_name': 'Device 1', + 'in_progress': False, + 'installed_version': '4.0.42.10433', + 'latest_version': '4.3.17.11279', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.device_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.device_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'device_update-00:00:00:00:01:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', + 'friendly_name': 'Device 2', + 'in_progress': False, + 'installed_version': '4.0.42.10433', + 'latest_version': '4.0.42.10433', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.device_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/unifi/test_button.py b/tests/components/unifi/test_button.py index b58d01e7724..fc3aeccea9f 100644 --- a/tests/components/unifi/test_button.py +++ b/tests/components/unifi/test_button.py @@ -1,27 +1,35 @@ """UniFi Network button platform tests.""" +from copy import deepcopy from datetime import timedelta from typing import Any from unittest.mock import patch +from aiounifi.models.message import MessageKey import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.components.unifi.const import CONF_SITE_ID -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( - ATTR_DEVICE_CLASS, CONF_HOST, CONTENT_TYPE_JSON, STATE_UNAVAILABLE, - EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker RANDOM_TOKEN = "random_token" @@ -119,33 +127,44 @@ WLAN_REGENERATE_PASSWORD = [ ] -async def _test_button_entity( +@pytest.mark.parametrize("device_payload", [DEVICE_RESTART + DEVICE_POWER_CYCLE_POE]) +@pytest.mark.parametrize("wlan_payload", [WLAN_REGENERATE_PASSWORD]) +@pytest.mark.parametrize( + "site_payload", + [ + [{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}], + [{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}], + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_entity_and_device_data( hass: HomeAssistant, entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + site_payload: dict[str, Any], + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.BUTTON]): + config_entry = await config_entry_factory() + if site_payload[0]["role"] == "admin": + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + else: + assert len(hass.states.async_entity_ids(BUTTON_DOMAIN)) == 0 + + +async def _test_button_entity( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_state, - config_entry: ConfigEntry, - entity_count: int, + mock_websocket_state: WebsocketStateManager, + config_entry: MockConfigEntry, entity_id: str, - unique_id: str, - device_class: ButtonDeviceClass, request_method: str, request_path: str, request_data: dict[str, Any], call: dict[str, str], ) -> None: """Test button entity.""" - assert len(hass.states.async_entity_ids(BUTTON_DOMAIN)) == entity_count - - ent_reg_entry = entity_registry.async_get(entity_id) - assert ent_reg_entry.unique_id == unique_id - assert ent_reg_entry.entity_category is EntityCategory.CONFIG - - # Validate state object - button = hass.states.get(entity_id) - assert button is not None - assert button.attributes.get(ATTR_DEVICE_CLASS) == device_class - # Send and validate device command aioclient_mock.clear_requests() aioclient_mock.request( @@ -175,10 +194,7 @@ async def _test_button_entity( @pytest.mark.parametrize( ( "device_payload", - "entity_count", "entity_id", - "unique_id", - "device_class", "request_method", "request_path", "call", @@ -186,10 +202,7 @@ async def _test_button_entity( [ ( DEVICE_RESTART, - 1, "button.switch_restart", - "device_restart-00:00:00:00:01:01", - ButtonDeviceClass.RESTART, "post", "/cmd/devmgr", { @@ -200,10 +213,7 @@ async def _test_button_entity( ), ( DEVICE_POWER_CYCLE_POE, - 2, "button.switch_port_1_power_cycle", - "power_cycle-00:00:00:00:01:01_1", - ButtonDeviceClass.RESTART, "post", "/cmd/devmgr", { @@ -216,14 +226,10 @@ async def _test_button_entity( ) async def test_device_button_entities( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, - mock_websocket_state, - entity_count: int, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateManager, entity_id: str, - unique_id: str, - device_class: ButtonDeviceClass, request_method: str, request_path: str, call: dict[str, str], @@ -231,14 +237,10 @@ async def test_device_button_entities( """Test button entities based on device sources.""" await _test_button_entity( hass, - entity_registry, aioclient_mock, mock_websocket_state, config_entry_setup, - entity_count, entity_id, - unique_id, - device_class, request_method, request_path, {}, @@ -249,10 +251,7 @@ async def test_device_button_entities( @pytest.mark.parametrize( ( "wlan_payload", - "entity_count", "entity_id", - "unique_id", - "device_class", "request_method", "request_path", "request_data", @@ -261,10 +260,7 @@ async def test_device_button_entities( [ ( WLAN_REGENERATE_PASSWORD, - 1, "button.ssid_1_regenerate_password", - "regenerate_password-012345678910111213141516", - ButtonDeviceClass.UPDATE, "put", f"/rest/wlanconf/{WLAN_REGENERATE_PASSWORD[0]["_id"]}", { @@ -279,12 +275,9 @@ async def test_wlan_button_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, - mock_websocket_state, - entity_count: int, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateManager, entity_id: str, - unique_id: str, - device_class: ButtonDeviceClass, request_method: str, request_path: str, request_data: dict[str, Any], @@ -306,16 +299,42 @@ async def test_wlan_button_entities( await _test_button_entity( hass, - entity_registry, aioclient_mock, mock_websocket_state, config_entry_setup, - entity_count, entity_id, - unique_id, - device_class, request_method, request_path, request_data, call, ) + + +@pytest.mark.parametrize("device_payload", [DEVICE_POWER_CYCLE_POE]) +@pytest.mark.usefixtures("config_entry_setup") +async def test_power_cycle_availability( + hass: HomeAssistant, + mock_websocket_message: WebsocketMessageMock, + device_payload: dict[str, Any], +) -> None: + """Verify that disabling PoE marks entity as unavailable.""" + entity_id = "button.switch_port_1_power_cycle" + + assert hass.states.get(entity_id).state != STATE_UNAVAILABLE + + # PoE disabled + + device_1 = deepcopy(device_payload[0]) + device_1["port_table"][0]["poe_enable"] = False + mock_websocket_message(message=MessageKey.DEVICE, data=device_1) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # PoE enabled + device_1 = deepcopy(device_payload[0]) + device_1["port_table"][0]["poe_enable"] = True + mock_websocket_message(message=MessageKey.DEVICE, data=device_1) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state != STATE_UNAVAILABLE diff --git a/tests/components/unifi/test_config_flow.py b/tests/components/unifi/test_config_flow.py index 7b37437cd1d..1d745511dc5 100644 --- a/tests/components/unifi/test_config_flow.py +++ b/tests/components/unifi/test_config_flow.py @@ -24,7 +24,7 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_WIRED_CLIENTS, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry +from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -35,8 +35,9 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import ConfigEntryFactoryType + from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker CLIENTS = [{"mac": "00:00:00:00:00:01"}] @@ -136,9 +137,7 @@ async def test_flow_works(hass: HomeAssistant, mock_discovery) -> None: } -async def test_flow_works_negative_discovery( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +async def test_flow_works_negative_discovery(hass: HomeAssistant) -> None: """Test config flow with a negative outcome of async_discovery_unifi.""" result = await hass.config_entries.flow.async_init( UNIFI_DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -298,7 +297,7 @@ async def test_flow_fails_hub_unavailable(hass: HomeAssistant) -> None: async def test_reauth_flow_update_configuration( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Verify reauth flow can update hub configuration.""" config_entry = config_entry_setup @@ -338,12 +337,50 @@ async def test_reauth_flow_update_configuration( assert config_entry.data[CONF_PASSWORD] == "new_pass" +async def test_reauth_flow_update_configuration_on_not_loaded_entry( + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType +) -> None: + """Verify reauth flow can update hub configuration on a not loaded entry.""" + with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError): + config_entry = await config_entry_factory() + + result = await hass.config_entries.flow.async_init( + UNIFI_DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": config_entry.unique_id, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "1.2.3.4", + CONF_USERNAME: "new_name", + CONF_PASSWORD: "new_pass", + CONF_PORT: 1234, + CONF_VERIFY_SSL: True, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert config_entry.data[CONF_HOST] == "1.2.3.4" + assert config_entry.data[CONF_USERNAME] == "new_name" + assert config_entry.data[CONF_PASSWORD] == "new_pass" + + @pytest.mark.parametrize("client_payload", [CLIENTS]) @pytest.mark.parametrize("device_payload", [DEVICES]) @pytest.mark.parametrize("wlan_payload", [WLANS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_advanced_option_flow( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test advanced config flow options.""" config_entry = config_entry_setup @@ -427,7 +464,7 @@ async def test_advanced_option_flow( @pytest.mark.parametrize("client_payload", [CLIENTS]) async def test_simple_option_flow( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test simple config flow options.""" config_entry = config_entry_setup @@ -496,9 +533,8 @@ async def test_form_ssdp(hass: HomeAssistant) -> None: } -async def test_form_ssdp_aborts_if_host_already_exists( - hass: HomeAssistant, config_entry: ConfigEntry -) -> None: +@pytest.mark.usefixtures("config_entry") +async def test_form_ssdp_aborts_if_host_already_exists(hass: HomeAssistant) -> None: """Test we abort if the host is already configured.""" result = await hass.config_entries.flow.async_init( UNIFI_DOMAIN, @@ -518,9 +554,8 @@ async def test_form_ssdp_aborts_if_host_already_exists( assert result["reason"] == "already_configured" -async def test_form_ssdp_aborts_if_serial_already_exists( - hass: HomeAssistant, config_entry: ConfigEntry -) -> None: +@pytest.mark.usefixtures("config_entry") +async def test_form_ssdp_aborts_if_serial_already_exists(hass: HomeAssistant) -> None: """Test we abort if the serial is already configured.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/unifi/test_device_tracker.py b/tests/components/unifi/test_device_tracker.py index 984fe50753f..c653370656d 100644 --- a/tests/components/unifi/test_device_tracker.py +++ b/tests/components/unifi/test_device_tracker.py @@ -1,20 +1,20 @@ """The tests for the UniFi Network device tracker platform.""" -from collections.abc import Callable from datetime import timedelta from types import MappingProxyType from typing import Any +from unittest.mock import patch from aiounifi.models.event import EventKey from aiounifi.models.message import MessageKey from freezegun.api import FrozenDateTimeFactory, freeze_time import pytest +from syrupy import SnapshotAssertion from homeassistant.components.device_tracker import DOMAIN as TRACKER_DOMAIN from homeassistant.components.unifi.const import ( CONF_BLOCK_CLIENT, CONF_CLIENT_SOURCE, - CONF_DETECTION_TIME, CONF_IGNORE_WIRED_BUG, CONF_SSID_FILTER, CONF_TRACK_CLIENTS, @@ -23,13 +23,18 @@ from homeassistant.components.unifi.const import ( DEFAULT_DETECTION_TIME, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE +from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform WIRED_CLIENT_1 = { "hostname": "wd_client_1", @@ -85,6 +90,25 @@ SWITCH_1 = { } +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT_1, WIRELESS_CLIENT_1]]) +@pytest.mark.parametrize("device_payload", [[SWITCH_1]]) +@pytest.mark.parametrize( + "site_payload", + [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], +) +@pytest.mark.usefixtures("mock_device_registry") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.DEVICE_TRACKER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + @pytest.mark.parametrize( "client_payload", [[WIRELESS_CLIENT_1, WIRED_BUG_CLIENT, UNSEEN_CLIENT]] ) @@ -92,8 +116,8 @@ SWITCH_1 = { @pytest.mark.usefixtures("mock_device_registry") async def test_client_state_update( hass: HomeAssistant, - mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + mock_websocket_message: WebsocketMessageMock, + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Verify tracking of wireless clients.""" @@ -145,7 +169,7 @@ async def test_client_state_update( async def test_client_state_from_event_source( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, client_payload: list[dict[str, Any]], ) -> None: """Verify update state of client based on event source.""" @@ -213,67 +237,40 @@ async def test_client_state_from_event_source( assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME +@pytest.mark.parametrize("device_payload", [[SWITCH_1]]) +@pytest.mark.usefixtures("mock_device_registry") @pytest.mark.parametrize( - "device_payload", + ("state", "interval", "expected"), [ - [ - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device 1", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "version": "4.0.42.10433", - }, - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "ip": "10.0.1.2", - "mac": "00:00:00:00:01:02", - "model": "US16P150", - "name": "Device 2", - "next_interval": 20, - "state": 0, - "type": "usw", - "version": "4.0.42.10433", - }, - ] + # Start home, new signal but still home, heartbeat timer triggers away + (1, 20, (STATE_HOME, STATE_HOME, STATE_NOT_HOME)), + # Start away, new signal but still home, heartbeat time do not trigger + (0, 40, (STATE_NOT_HOME, STATE_HOME, STATE_HOME)), ], ) -@pytest.mark.usefixtures("config_entry_setup") -@pytest.mark.usefixtures("mock_device_registry") -async def test_tracked_devices( +async def test_tracked_device_state_change( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - mock_websocket_message, + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], + state: int, + interval: int, + expected: list[str], ) -> None: """Test the update_items function with some devices.""" - assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 - assert hass.states.get("device_tracker.device_1").state == STATE_HOME - assert hass.states.get("device_tracker.device_2").state == STATE_NOT_HOME + device_payload[0] = device_payload[0] | {"state": state} + await config_entry_factory() + assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1 + assert hass.states.get("device_tracker.switch_1").state == expected[0] # State change signalling work - device_1 = device_payload[0] - device_1["next_interval"] = 20 - device_2 = device_payload[1] - device_2["state"] = 1 - device_2["next_interval"] = 50 - mock_websocket_message(message=MessageKey.DEVICE, data=[device_1, device_2]) + switch_1 = device_payload[0] | {"state": 1, "next_interval": interval} + mock_websocket_message(message=MessageKey.DEVICE, data=[switch_1]) await hass.async_block_till_done() - assert hass.states.get("device_tracker.device_1").state == STATE_HOME - assert hass.states.get("device_tracker.device_2").state == STATE_HOME + # Too little time has passed + assert hass.states.get("device_tracker.switch_1").state == expected[1] # Change of time can mark device not_home outside of expected reporting interval new_time = dt_util.utcnow() + timedelta(seconds=90) @@ -281,23 +278,24 @@ async def test_tracked_devices( async_fire_time_changed(hass, new_time) await hass.async_block_till_done() - assert hass.states.get("device_tracker.device_1").state == STATE_NOT_HOME - assert hass.states.get("device_tracker.device_2").state == STATE_HOME + # Heartbeat to update state is interval + 60 seconds + assert hass.states.get("device_tracker.switch_1").state == expected[2] # Disabled device is unavailable - device_1["disabled"] = True - mock_websocket_message(message=MessageKey.DEVICE, data=device_1) + switch_1["disabled"] = True + mock_websocket_message(message=MessageKey.DEVICE, data=switch_1) await hass.async_block_till_done() - assert hass.states.get("device_tracker.device_1").state == STATE_UNAVAILABLE - assert hass.states.get("device_tracker.device_2").state == STATE_HOME + assert hass.states.get("device_tracker.switch_1").state == STATE_UNAVAILABLE @pytest.mark.parametrize("client_payload", [[WIRELESS_CLIENT_1, WIRED_CLIENT_1]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("mock_device_registry") async def test_remove_clients( - hass: HomeAssistant, mock_websocket_message, client_payload: list[dict[str, Any]] + hass: HomeAssistant, + mock_websocket_message: WebsocketMessageMock, + client_payload: list[dict[str, Any]], ) -> None: """Test the remove_items function with some clients.""" assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 @@ -313,68 +311,35 @@ async def test_remove_clients( assert hass.states.get("device_tracker.wd_client_1") -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "essid": "ssid", - "hostname": "client", - "is_wired": False, - "last_seen": 1562600145, - "mac": "00:00:00:00:00:01", - } - ] - ], -) -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "version": "4.0.42.10433", - } - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRELESS_CLIENT_1]]) +@pytest.mark.parametrize("device_payload", [[SWITCH_1]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("mock_device_registry") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, + mock_websocket_state: WebsocketStateManager, +) -> None: """Verify entities state reflect on hub connection becoming unavailable.""" assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 - assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME - assert hass.states.get("device_tracker.device").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.switch_1").state == STATE_HOME # Controller unavailable await mock_websocket_state.disconnect() - assert hass.states.get("device_tracker.client").state == STATE_UNAVAILABLE - assert hass.states.get("device_tracker.device").state == STATE_UNAVAILABLE + assert hass.states.get("device_tracker.ws_client_1").state == STATE_UNAVAILABLE + assert hass.states.get("device_tracker.switch_1").state == STATE_UNAVAILABLE # Controller available await mock_websocket_state.reconnect() - assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME - assert hass.states.get("device_tracker.device").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.switch_1").state == STATE_HOME @pytest.mark.usefixtures("mock_device_registry") async def test_option_ssid_filter( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Test the SSID filter works. @@ -383,13 +348,7 @@ async def test_option_ssid_filter( Client on SSID2 will be removed on change of options. """ client_payload += [ - { - "essid": "ssid", - "hostname": "client", - "is_wired": False, - "last_seen": dt_util.as_timestamp(dt_util.utcnow()), - "mac": "00:00:00:00:00:01", - }, + WIRELESS_CLIENT_1 | {"last_seen": dt_util.as_timestamp(dt_util.utcnow())}, { "essid": "ssid2", "hostname": "client_on_ssid2", @@ -401,7 +360,7 @@ async def test_option_ssid_filter( config_entry = await config_entry_factory() assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 - assert hass.states.get("device_tracker.client").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME assert hass.states.get("device_tracker.client_on_ssid2").state == STATE_NOT_HOME # Setting SSID filter will remove clients outside of filter @@ -411,33 +370,29 @@ async def test_option_ssid_filter( await hass.async_block_till_done() # Not affected by SSID filter - assert hass.states.get("device_tracker.client").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Removed due to SSID filter assert not hass.states.get("device_tracker.client_on_ssid2") # Roams to SSID outside of filter - client = client_payload[0] - client["essid"] = "other_ssid" - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1 = client_payload[0] | {"essid": "other_ssid"} + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) # Data update while SSID filter is in effect shouldn't create the client - client_on_ssid2 = client_payload[1] - client_on_ssid2["last_seen"] = dt_util.as_timestamp(dt_util.utcnow()) + client_on_ssid2 = client_payload[1] | { + "last_seen": dt_util.as_timestamp(dt_util.utcnow()) + } mock_websocket_message(message=MessageKey.CLIENT, data=client_on_ssid2) await hass.async_block_till_done() - new_time = dt_util.utcnow() + timedelta( - seconds=( - config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME) + 1 - ) - ) + new_time = dt_util.utcnow() + timedelta(seconds=(DEFAULT_DETECTION_TIME + 1)) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() # SSID filter marks client as away - assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME # SSID still outside of filter assert not hass.states.get("device_tracker.client_on_ssid2") @@ -446,25 +401,23 @@ async def test_option_ssid_filter( hass.config_entries.async_update_entry(config_entry, options={CONF_SSID_FILTER: []}) await hass.async_block_till_done() - client["last_seen"] += 1 + ws_client_1["last_seen"] += 1 client_on_ssid2["last_seen"] += 1 - mock_websocket_message(message=MessageKey.CLIENT, data=[client, client_on_ssid2]) + mock_websocket_message( + message=MessageKey.CLIENT, data=[ws_client_1, client_on_ssid2] + ) await hass.async_block_till_done() - assert hass.states.get("device_tracker.client").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME assert hass.states.get("device_tracker.client_on_ssid2").state == STATE_HOME # Time pass to mark client as away - new_time += timedelta( - seconds=( - config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME) + 1 - ) - ) + new_time += timedelta(seconds=(DEFAULT_DETECTION_TIME + 1)) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() - assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME client_on_ssid2["last_seen"] += 1 mock_websocket_message(message=MessageKey.CLIENT, data=client_on_ssid2) @@ -478,9 +431,7 @@ async def test_option_ssid_filter( mock_websocket_message(message=MessageKey.CLIENT, data=client_on_ssid2) await hass.async_block_till_done() - new_time += timedelta( - seconds=(config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME)) - ) + new_time += timedelta(seconds=DEFAULT_DETECTION_TIME) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() @@ -492,7 +443,7 @@ async def test_option_ssid_filter( async def test_wireless_client_go_wired_issue( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Test the solution to catch wireless device go wired UniFi issue. @@ -500,64 +451,51 @@ async def test_wireless_client_go_wired_issue( UniFi Network has a known issue that when a wireless device goes away it sometimes gets marked as wired. """ client_payload.append( - { - "essid": "ssid", - "hostname": "client", - "ip": "10.0.0.1", - "is_wired": False, - "last_seen": dt_util.as_timestamp(dt_util.utcnow()), - "mac": "00:00:00:00:00:01", - } + WIRELESS_CLIENT_1 | {"last_seen": dt_util.as_timestamp(dt_util.utcnow())} ) - config_entry = await config_entry_factory() + await config_entry_factory() assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1 # Client is wireless - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Trigger wired bug - client = client_payload[0] - client["last_seen"] = dt_util.as_timestamp(dt_util.utcnow()) - client["is_wired"] = True - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1 = client_payload[0] | { + "last_seen": dt_util.as_timestamp(dt_util.utcnow()), + "is_wired": True, + } + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Wired bug fix keeps client marked as wireless - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Pass time - new_time = dt_util.utcnow() + timedelta( - seconds=(config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME)) - ) + new_time = dt_util.utcnow() + timedelta(seconds=DEFAULT_DETECTION_TIME) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() # Marked as home according to the timer - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME # Try to mark client as connected - client["last_seen"] += 1 - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1["last_seen"] += 1 + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Make sure it don't go online again until wired bug disappears - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME # Make client wireless - client["last_seen"] += 1 - client["is_wired"] = False - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1["last_seen"] += 1 + ws_client_1["is_wired"] = False + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Client is no longer affected by wired bug and can be marked online - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME @pytest.mark.parametrize("config_entry_options", [{CONF_IGNORE_WIRED_BUG: True}]) @@ -565,69 +503,54 @@ async def test_wireless_client_go_wired_issue( async def test_option_ignore_wired_bug( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Test option to ignore wired bug.""" client_payload.append( - { - "ap_mac": "00:00:00:00:02:01", - "essid": "ssid", - "hostname": "client", - "ip": "10.0.0.1", - "is_wired": False, - "last_seen": dt_util.as_timestamp(dt_util.utcnow()), - "mac": "00:00:00:00:00:01", - } + WIRELESS_CLIENT_1 | {"last_seen": dt_util.as_timestamp(dt_util.utcnow())} ) - config_entry = await config_entry_factory() + await config_entry_factory() assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1 # Client is wireless - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Trigger wired bug - client = client_payload[0] - client["is_wired"] = True - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1 = client_payload[0] + ws_client_1["is_wired"] = True + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Wired bug in effect - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME - # pass time - new_time = dt_util.utcnow() + timedelta( - seconds=config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME) - ) + # Pass time + new_time = dt_util.utcnow() + timedelta(seconds=DEFAULT_DETECTION_TIME) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() # Timer marks client as away - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME # Mark client as connected again - client["last_seen"] += 1 - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1["last_seen"] += 1 + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Ignoring wired bug allows client to go home again even while affected - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Make client wireless - client["last_seen"] += 1 - client["is_wired"] = False - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1["last_seen"] += 1 + ws_client_1["is_wired"] = False + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Client is wireless and still connected - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME @pytest.mark.parametrize( @@ -657,8 +580,8 @@ async def test_option_ignore_wired_bug( async def test_restoring_client( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry: ConfigEntry, - config_entry_factory: Callable[[], ConfigEntry], + config_entry: MockConfigEntry, + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], clients_all_payload: list[dict[str, Any]], ) -> None: @@ -731,10 +654,10 @@ async def test_restoring_client( @pytest.mark.usefixtures("mock_device_registry") async def test_config_entry_options_track( hass: HomeAssistant, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, config_entry_options: MappingProxyType[str, Any], counts: tuple[int], - expected: dict[tuple[bool | None]], + expected: tuple[tuple[bool | None, ...], ...], ) -> None: """Test the different config entry options. diff --git a/tests/components/unifi/test_diagnostics.py b/tests/components/unifi/test_diagnostics.py index fcaba59cbad..80359a9c75c 100644 --- a/tests/components/unifi/test_diagnostics.py +++ b/tests/components/unifi/test_diagnostics.py @@ -2,15 +2,16 @@ import pytest from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.unifi.const import ( CONF_ALLOW_BANDWIDTH_SENSORS, CONF_ALLOW_UPTIME_SENSORS, CONF_BLOCK_CLIENT, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -121,11 +122,10 @@ DPI_GROUP_DATA = [ async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry_setup) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry_setup + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/unifi/test_hub.py b/tests/components/unifi/test_hub.py index 0d75a83c5f5..af134c7449b 100644 --- a/tests/components/unifi/test_hub.py +++ b/tests/components/unifi/test_hub.py @@ -1,6 +1,5 @@ """Test UniFi Network.""" -from collections.abc import Callable from http import HTTPStatus from types import MappingProxyType from typing import Any @@ -12,18 +11,21 @@ import pytest from homeassistant.components.unifi.const import DOMAIN as UNIFI_DOMAIN from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect from homeassistant.components.unifi.hub import get_unifi_api -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr import homeassistant.util.dt as dt_util +from .conftest import ConfigEntryFactoryType, WebsocketStateManager + +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker async def test_hub_setup( device_registry: dr.DeviceRegistry, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, ) -> None: """Successful setup.""" with patch( @@ -54,7 +56,7 @@ async def test_hub_setup( async def test_reset_after_successful_setup( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Calling reset when the entry has been setup.""" assert config_entry_setup.state is ConfigEntryState.LOADED @@ -64,7 +66,7 @@ async def test_reset_after_successful_setup( async def test_reset_fails( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Calling reset when the entry has been setup can return false.""" assert config_entry_setup.state is ConfigEntryState.LOADED @@ -80,8 +82,8 @@ async def test_reset_fails( @pytest.mark.usefixtures("mock_device_registry") async def test_connection_state_signalling( hass: HomeAssistant, - mock_websocket_state, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_state: WebsocketStateManager, client_payload: list[dict[str, Any]], ) -> None: """Verify connection statesignalling and connection state are working.""" @@ -110,8 +112,8 @@ async def test_connection_state_signalling( async def test_reconnect_mechanism( aioclient_mock: AiohttpClientMocker, - mock_websocket_state, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateManager, ) -> None: """Verify reconnect prints only on first reconnection try.""" aioclient_mock.clear_requests() @@ -140,7 +142,10 @@ async def test_reconnect_mechanism( ], ) @pytest.mark.usefixtures("config_entry_setup") -async def test_reconnect_mechanism_exceptions(mock_websocket_state, exception) -> None: +async def test_reconnect_mechanism_exceptions( + mock_websocket_state: WebsocketStateManager, + exception: Exception, +) -> None: """Verify async_reconnect calls expected methods.""" with ( patch("aiounifi.Controller.login", side_effect=exception), @@ -170,8 +175,8 @@ async def test_reconnect_mechanism_exceptions(mock_websocket_state, exception) - ) async def test_get_unifi_api_fails_to_connect( hass: HomeAssistant, - side_effect, - raised_exception, + side_effect: Exception, + raised_exception: Exception, config_entry_data: MappingProxyType[str, Any], ) -> None: """Check that get_unifi_api can handle UniFi Network being unavailable.""" diff --git a/tests/components/unifi/test_image.py b/tests/components/unifi/test_image.py index 75d2f02900d..dc37d7cb8b7 100644 --- a/tests/components/unifi/test_image.py +++ b/tests/components/unifi/test_image.py @@ -3,22 +3,41 @@ from copy import deepcopy from datetime import timedelta from http import HTTPStatus +from typing import Any +from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY -from homeassistant.const import STATE_UNAVAILABLE, EntityCategory +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import async_fire_time_changed, snapshot_platform from tests.typing import ClientSessionGenerator + +@pytest.fixture(autouse=True) +def mock_getrandbits(): + """Mock image access token which normally is randomized.""" + with patch( + "homeassistant.components.image.SystemRandom.getrandbits", + return_value=1, + ): + yield + + WLAN = { "_id": "012345678910111213141516", "bc_filter_enabled": False, @@ -56,6 +75,32 @@ WLAN = { } +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.parametrize( + "site_payload", + [ + [{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}], + [{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}], + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.freeze_time("2021-01-01 01:01:00") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + site_payload: dict[str, Any], + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.IMAGE]): + config_entry = await config_entry_factory() + if site_payload[0]["role"] == "admin": + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + else: + assert len(hass.states.async_entity_ids(IMAGE_DOMAIN)) == 0 + + @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") async def test_wlan_qr_code( @@ -63,16 +108,13 @@ async def test_wlan_qr_code( entity_registry: er.EntityRegistry, hass_client: ClientSessionGenerator, snapshot: SnapshotAssertion, - mock_websocket_message, - mock_websocket_state, + mock_websocket_message: WebsocketMessageMock, ) -> None: """Test the update_clients function when no clients are found.""" assert len(hass.states.async_entity_ids(IMAGE_DOMAIN)) == 0 ent_reg_entry = entity_registry.async_get("image.ssid_1_qr_code") - assert ent_reg_entry.unique_id == "qr_code-012345678910111213141516" assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -84,10 +126,6 @@ async def test_wlan_qr_code( ) await hass.async_block_till_done() - # Validate state object - image_state_1 = hass.states.get("image.ssid_1_qr_code") - assert image_state_1.name == "SSID 1 QR Code" - # Validate image client = await hass_client() resp = await client.get("/api/image_proxy/image.ssid_1_qr_code") @@ -96,8 +134,8 @@ async def test_wlan_qr_code( assert body == snapshot # Update state object - same password - no change to state + image_state_1 = hass.states.get("image.ssid_1_qr_code") mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=WLAN) - await hass.async_block_till_done() image_state_2 = hass.states.get("image.ssid_1_qr_code") assert image_state_1.state == image_state_2.state @@ -105,7 +143,6 @@ async def test_wlan_qr_code( data = deepcopy(WLAN) data["x_passphrase"] = "new password" mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=data) - await hass.async_block_till_done() image_state_3 = hass.states.get("image.ssid_1_qr_code") assert image_state_1.state != image_state_3.state @@ -116,25 +153,41 @@ async def test_wlan_qr_code( body = await resp.read() assert body == snapshot - # Availability signalling - # Controller disconnects +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_hub_state_change( + hass: HomeAssistant, mock_websocket_state: WebsocketStateManager +) -> None: + """Verify entities state reflect on hub becoming unavailable.""" + assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE + + # Controller unavailable await mock_websocket_state.disconnect() assert hass.states.get("image.ssid_1_qr_code").state == STATE_UNAVAILABLE - # Controller reconnects + # Controller available await mock_websocket_state.reconnect() assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE + +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_source_availability( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: + """Verify entities state reflect on source becoming unavailable.""" + assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE + # WLAN gets disabled wlan_1 = deepcopy(WLAN) wlan_1["enabled"] = False mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) - await hass.async_block_till_done() assert hass.states.get("image.ssid_1_qr_code").state == STATE_UNAVAILABLE # WLAN gets re-enabled wlan_1["enabled"] = True mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) - await hass.async_block_till_done() assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE diff --git a/tests/components/unifi/test_init.py b/tests/components/unifi/test_init.py index 7cd203ab8fd..68f80555cd6 100644 --- a/tests/components/unifi/test_init.py +++ b/tests/components/unifi/test_init.py @@ -1,6 +1,5 @@ """Test UniFi Network integration setup process.""" -from collections.abc import Callable from typing import Any from unittest.mock import patch @@ -13,29 +12,25 @@ from homeassistant.components.unifi.const import ( CONF_ALLOW_UPTIME_SENSORS, CONF_TRACK_CLIENTS, CONF_TRACK_DEVICES, - DOMAIN as UNIFI_DOMAIN, ) from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from .conftest import DEFAULT_CONFIG_ENTRY_ID +from .conftest import ( + DEFAULT_CONFIG_ENTRY_ID, + ConfigEntryFactoryType, + WebsocketMessageMock, +) from tests.common import flush_store -from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator -async def test_setup_with_no_config(hass: HomeAssistant) -> None: - """Test that we do not discover anything or try to set up a hub.""" - assert await async_setup_component(hass, UNIFI_DOMAIN, {}) is True - assert UNIFI_DOMAIN not in hass.data - - async def test_setup_entry_fails_config_entry_not_ready( - hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Failed authentication trigger a reauthentication flow.""" with patch( @@ -48,7 +43,7 @@ async def test_setup_entry_fails_config_entry_not_ready( async def test_setup_entry_fails_trigger_reauth_flow( - hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Failed authentication trigger a reauthentication flow.""" with ( @@ -86,7 +81,7 @@ async def test_setup_entry_fails_trigger_reauth_flow( async def test_wireless_clients( hass: HomeAssistant, hass_storage: dict[str, Any], - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, ) -> None: """Verify wireless clients class.""" hass_storage[unifi.STORAGE_KEY] = { @@ -170,13 +165,11 @@ async def test_wireless_clients( ) async def test_remove_config_entry_device( hass: HomeAssistant, - hass_storage: dict[str, Any], - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], device_payload: list[dict[str, Any]], - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, hass_ws_client: WebSocketGenerator, ) -> None: """Verify removing a device manually.""" diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index 960a5d3e529..afa256c087e 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -1,6 +1,5 @@ """UniFi Network sensor platform tests.""" -from collections.abc import Callable from copy import deepcopy from datetime import datetime, timedelta from types import MappingProxyType @@ -11,14 +10,12 @@ from aiounifi.models.device import DeviceState from aiounifi.models.message import MessageKey from freezegun.api import FrozenDateTimeFactory, freeze_time import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from homeassistant.components.sensor import ( - ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN, SCAN_INTERVAL, SensorDeviceClass, - SensorStateClass, ) from homeassistant.components.unifi.const import ( CONF_ALLOW_BANDWIDTH_SENSORS, @@ -29,20 +26,39 @@ from homeassistant.components.unifi.const import ( DEFAULT_DETECTION_TIME, DEVICE_STATES, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_FRIENDLY_NAME, - ATTR_UNIT_OF_MEASUREMENT, - STATE_UNAVAILABLE, - EntityCategory, -) +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY +from homeassistant.const import ATTR_DEVICE_CLASS, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +WIRED_CLIENT = { + "hostname": "Wired client", + "is_wired": True, + "mac": "00:00:00:00:00:01", + "oui": "Producer", + "wired-rx_bytes-r": 1234000000, + "wired-tx_bytes-r": 5678000000, + "uptime": 1600094505, +} +WIRELESS_CLIENT = { + "is_wired": False, + "mac": "00:00:00:00:00:02", + "name": "Wireless client", + "oui": "Producer", + "rx_bytes-r": 2345000000.0, + "tx_bytes-r": 6789000000.0, + "uptime": 60, +} DEVICE_1 = { "board_rev": 2, @@ -316,6 +332,114 @@ PDU_OUTLETS_UPDATE_DATA = [ ] +@pytest.mark.parametrize( + "config_entry_options", + [ + { + CONF_ALLOW_BANDWIDTH_SENSORS: True, + CONF_ALLOW_UPTIME_SENSORS: True, + } + ], +) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) +@pytest.mark.parametrize( + "device_payload", + [ + [ + DEVICE_1, + PDU_DEVICE_1, + { # Temperature + "board_rev": 3, + "device_id": "mock-id", + "general_temperature": 30, + "has_fan": True, + "has_temperature": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "20:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + }, + { # Latency monitors + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "uptime_stats": { + "WAN": { + "availability": 100.0, + "latency_average": 39, + "monitors": [ + { + "availability": 100.0, + "latency_average": 56, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 53, + "target": "google.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 30, + "target": "1.1.1.1", + "type": "icmp", + }, + ], + }, + "WAN2": { + "monitors": [ + { + "availability": 0.0, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 0.0, + "target": "google.com", + "type": "icmp", + }, + {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, + ], + }, + }, + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + }, + ] + ], +) +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.freeze_time("2021-01-01 01:01:00") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory, + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.SENSOR]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + @pytest.mark.parametrize( "config_entry_options", [{CONF_ALLOW_BANDWIDTH_SENSORS: True, CONF_ALLOW_UPTIME_SENSORS: True}], @@ -337,64 +461,17 @@ async def test_no_clients(hass: HomeAssistant) -> None: } ], ) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes-r": 1234000000, - "wired-tx_bytes-r": 5678000000, - }, - { - "is_wired": False, - "mac": "00:00:00:00:00:02", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes-r": 2345000000.0, - "tx_bytes-r": 6789000000.0, - }, - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) async def test_bandwidth_sensors( hass: HomeAssistant, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, config_entry_options: MappingProxyType[str, Any], - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify that bandwidth sensors are working as expected.""" - assert len(hass.states.async_all()) == 5 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 4 - - # Verify sensor attributes and state - - wrx_sensor = hass.states.get("sensor.wired_client_rx") - assert wrx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wrx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wrx_sensor.state == "1234.0" - - wtx_sensor = hass.states.get("sensor.wired_client_tx") - assert wtx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wtx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wtx_sensor.state == "5678.0" - - wlrx_sensor = hass.states.get("sensor.wireless_client_rx") - assert wlrx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wlrx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wlrx_sensor.state == "2345.0" - - wltx_sensor = hass.states.get("sensor.wireless_client_tx") - assert wltx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wltx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wltx_sensor.state == "6789.0" - # Verify state update - wireless_client = client_payload[1] + wireless_client = deepcopy(client_payload[1]) wireless_client["rx_bytes-r"] = 3456000000 wireless_client["tx_bytes-r"] = 7891000000 @@ -459,138 +536,17 @@ async def test_bandwidth_sensors( assert hass.states.get("sensor.wired_client_tx") -@pytest.mark.parametrize( - "config_entry_options", - [ - { - CONF_ALLOW_BANDWIDTH_SENSORS: False, - CONF_ALLOW_UPTIME_SENSORS: True, - CONF_TRACK_CLIENTS: False, - CONF_TRACK_DEVICES: False, - } - ], -) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "mac": "00:00:00:00:00:01", - "name": "client1", - "oui": "Producer", - "uptime": 0, - } - ] - ], -) -@pytest.mark.parametrize( - ("initial_uptime", "event_uptime", "new_uptime"), - [ - # Uptime listed in epoch time should never change - (1609462800, 1609462800, 1612141200), - # Uptime counted in seconds increases with every event - (60, 64, 60), - ], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_uptime_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, - mock_websocket_message, - config_entry_options: MappingProxyType[str, Any], - config_entry_factory: Callable[[], ConfigEntry], - client_payload: list[dict[str, Any]], - initial_uptime, - event_uptime, - new_uptime, -) -> None: - """Verify that uptime sensors are working as expected.""" - uptime_client = client_payload[0] - uptime_client["uptime"] = initial_uptime - freezer.move_to(datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC)) - config_entry = await config_entry_factory() - - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 - assert hass.states.get("sensor.client1_uptime").state == "2021-01-01T01:00:00+00:00" - assert ( - entity_registry.async_get("sensor.client1_uptime").entity_category - is EntityCategory.DIAGNOSTIC - ) - - # Verify normal new event doesn't change uptime - # 4 seconds has passed - uptime_client["uptime"] = event_uptime - now = datetime(2021, 1, 1, 1, 1, 4, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.CLIENT, data=uptime_client) - await hass.async_block_till_done() - - assert hass.states.get("sensor.client1_uptime").state == "2021-01-01T01:00:00+00:00" - - # Verify new event change uptime - # 1 month has passed - uptime_client["uptime"] = new_uptime - now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.CLIENT, data=uptime_client) - await hass.async_block_till_done() - - assert hass.states.get("sensor.client1_uptime").state == "2021-02-01T01:00:00+00:00" - - # Disable option - options = deepcopy(config_entry_options) - options[CONF_ALLOW_UPTIME_SENSORS] = False - hass.config_entries.async_update_entry(config_entry, options=options) - await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 0 - assert hass.states.get("sensor.client1_uptime") is None - - # Enable option - options = deepcopy(config_entry_options) - options[CONF_ALLOW_UPTIME_SENSORS] = True - with patch("homeassistant.util.dt.now", return_value=now): - hass.config_entries.async_update_entry(config_entry, options=options) - await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 - assert hass.states.get("sensor.client1_uptime") - - @pytest.mark.parametrize( "config_entry_options", [{CONF_ALLOW_BANDWIDTH_SENSORS: True, CONF_ALLOW_UPTIME_SENSORS: True}], ) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes": 1234000000, - "wired-tx_bytes": 5678000000, - "uptime": 1600094505, - }, - { - "is_wired": False, - "mac": "00:00:00:00:00:02", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes": 2345000000, - "tx_bytes": 6789000000, - "uptime": 60, - }, - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_remove_sensors( - hass: HomeAssistant, mock_websocket_message, client_payload: list[dict[str, Any]] + hass: HomeAssistant, + mock_websocket_message: WebsocketMessageMock, + client_payload: list[dict[str, Any]], ) -> None: """Verify removing of clients work as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 @@ -619,15 +575,14 @@ async def test_remove_sensors( async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, - mock_websocket_state, + mock_websocket_message: WebsocketMessageMock, + mock_websocket_state: WebsocketStateManager, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 ent_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_poe_power") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -692,10 +647,9 @@ async def test_poe_port_switches( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) async def test_wlan_client_sensors( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_websocket_message, - mock_websocket_state, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + mock_websocket_state: WebsocketStateManager, client_payload: list[dict[str, Any]], ) -> None: """Verify that WLAN client sensors are working as expected.""" @@ -725,14 +679,8 @@ async def test_wlan_client_sensors( assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 - ent_reg_entry = entity_registry.async_get("sensor.ssid_1") - assert ent_reg_entry.unique_id == "wlan_clients-012345678910111213141516" - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - # Validate state object - ssid_1 = hass.states.get("sensor.ssid_1") - assert ssid_1 is not None - assert ssid_1.state == "1" + assert hass.states.get("sensor.ssid_1").state == "1" # Verify state update - increasing number wireless_client_1 = client_payload[0] @@ -801,7 +749,6 @@ async def test_wlan_client_sensors( @pytest.mark.parametrize( ( "entity_id", - "expected_unique_id", "expected_value", "changed_data", "expected_update_value", @@ -809,21 +756,18 @@ async def test_wlan_client_sensors( [ ( "dummy_usp_pdu_pro_outlet_2_outlet_power", - "outlet_power-01:02:03:04:05:ff_2", "73.827", {"outlet_table": PDU_OUTLETS_UPDATE_DATA}, "123.45", ), ( "dummy_usp_pdu_pro_ac_power_budget", - "ac_power_budget-01:02:03:04:05:ff", "1875.000", None, None, ), ( "dummy_usp_pdu_pro_ac_power_consumption", - "ac_power_conumption-01:02:03:04:05:ff", "201.683", {"outlet_ac_power_consumption": "456.78"}, "456.78", @@ -834,26 +778,18 @@ async def test_wlan_client_sensors( @pytest.mark.usefixtures("config_entry_setup") async def test_outlet_power_readings( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], entity_id: str, - expected_unique_id: str, - expected_value: any, - changed_data: dict | None, - expected_update_value: any, + expected_value: str, + changed_data: dict[str, Any] | None, + expected_update_value: str | None, ) -> None: """Test the outlet power reporting on PDU devices.""" assert len(hass.states.async_all()) == 13 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 7 - ent_reg_entry = entity_registry.async_get(f"sensor.{entity_id}") - assert ent_reg_entry.unique_id == expected_unique_id - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - - sensor_data = hass.states.get(f"sensor.{entity_id}") - assert sensor_data.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.POWER - assert sensor_data.state == expected_value + assert hass.states.get(f"sensor.{entity_id}").state == expected_value if changed_data is not None: updated_device_data = deepcopy(device_payload[0]) @@ -862,73 +798,7 @@ async def test_outlet_power_readings( mock_websocket_message(message=MessageKey.DEVICE, data=updated_device_data) await hass.async_block_till_done() - sensor_data = hass.states.get(f"sensor.{entity_id}") - assert sensor_data.state == expected_update_value - - -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "uptime": 60, - "version": "4.0.42.10433", - } - ] - ], -) -async def test_device_uptime( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], - device_payload: list[dict[str, Any]], -) -> None: - """Verify that uptime sensors are working as expected.""" - now = datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - await config_entry_factory() - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - assert hass.states.get("sensor.device_uptime").state == "2021-01-01T01:00:00+00:00" - - assert ( - entity_registry.async_get("sensor.device_uptime").entity_category - is EntityCategory.DIAGNOSTIC - ) - - # Verify normal new event doesn't change uptime - # 4 seconds has passed - device = device_payload[0] - device["uptime"] = 64 - now = datetime(2021, 1, 1, 1, 1, 4, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.DEVICE, data=device) - - assert hass.states.get("sensor.device_uptime").state == "2021-01-01T01:00:00+00:00" - - # Verify new event change uptime - # 1 month has passed - - device["uptime"] = 60 - now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.DEVICE, data=device) - - assert hass.states.get("sensor.device_uptime").state == "2021-02-01T01:00:00+00:00" + assert hass.states.get(f"sensor.{entity_id}").state == expected_update_value @pytest.mark.parametrize( @@ -961,17 +831,12 @@ async def test_device_uptime( @pytest.mark.usefixtures("config_entry_setup") async def test_device_temperature( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that temperature sensors are working as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 assert hass.states.get("sensor.device_temperature").state == "30" - assert ( - entity_registry.async_get("sensor.device_temperature").entity_category - is EntityCategory.DIAGNOSTIC - ) # Verify new event change temperature device = device_payload[0] @@ -1011,15 +876,11 @@ async def test_device_temperature( async def test_device_state( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that state sensors are working as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 - assert ( - entity_registry.async_get("sensor.device_state").entity_category - is EntityCategory.DIAGNOSTIC - ) device = device_payload[0] for i in list(map(int, DeviceState)): @@ -1047,8 +908,7 @@ async def test_device_state( @pytest.mark.usefixtures("config_entry_setup") async def test_device_system_stats( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that device stats sensors are working as expected.""" @@ -1058,16 +918,6 @@ async def test_device_system_stats( assert hass.states.get("sensor.device_cpu_utilization").state == "5.8" assert hass.states.get("sensor.device_memory_utilization").state == "31.1" - assert ( - entity_registry.async_get("sensor.device_cpu_utilization").entity_category - is EntityCategory.DIAGNOSTIC - ) - - assert ( - entity_registry.async_get("sensor.device_memory_utilization").entity_category - is EntityCategory.DIAGNOSTIC - ) - # Verify new event change system-stats device = device_payload[0] device["system-stats"] = {"cpu": 7.7, "mem": 33.3, "uptime": 7316} @@ -1143,9 +993,9 @@ async def test_device_system_stats( async def test_bandwidth_port_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, config_entry_options: MappingProxyType[str, Any], + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that port bandwidth sensors are working as expected.""" @@ -1154,11 +1004,9 @@ async def test_bandwidth_port_sensors( p1rx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_rx") assert p1rx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert p1rx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC p1tx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_tx") assert p1tx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert p1tx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -1185,26 +1033,11 @@ async def test_bandwidth_port_sensors( assert len(hass.states.async_all()) == 9 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 - # Verify sensor attributes and state - p1rx_sensor = hass.states.get("sensor.mock_name_port_1_rx") - assert p1rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p1rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p1rx_sensor.state == "0.00921" - - p1tx_sensor = hass.states.get("sensor.mock_name_port_1_tx") - assert p1tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p1tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p1tx_sensor.state == "0.04089" - - p2rx_sensor = hass.states.get("sensor.mock_name_port_2_rx") - assert p2rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p2rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p2rx_sensor.state == "0.01229" - - p2tx_sensor = hass.states.get("sensor.mock_name_port_2_tx") - assert p2tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p2tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p2tx_sensor.state == "0.02892" + # Verify sensor state + assert hass.states.get("sensor.mock_name_port_1_rx").state == "0.00921" + assert hass.states.get("sensor.mock_name_port_1_tx").state == "0.04089" + assert hass.states.get("sensor.mock_name_port_2_rx").state == "0.01229" + assert hass.states.get("sensor.mock_name_port_2_tx").state == "0.02892" # Verify state update device_1 = device_payload[0] @@ -1260,9 +1093,9 @@ async def test_bandwidth_port_sensors( async def test_device_client_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory, - mock_websocket_message, - client_payload, + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + client_payload: dict[str, Any], ) -> None: """Verify that WLAN client sensors are working as expected.""" client_payload += [ @@ -1298,13 +1131,9 @@ async def test_device_client_sensors( ent_reg_entry = entity_registry.async_get("sensor.wired_device_clients") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - assert ent_reg_entry.unique_id == "device_clients-01:00:00:00:00:00" ent_reg_entry = entity_registry.async_get("sensor.wireless_device_clients") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - assert ent_reg_entry.unique_id == "device_clients-02:00:00:00:00:00" # Enable entity entity_registry.async_update_entity( @@ -1341,67 +1170,514 @@ async def test_device_client_sensors( assert hass.states.get("sensor.wireless_device_clients").state == "0" -WIRED_CLIENT = { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes-r": 1234000000, - "wired-tx_bytes-r": 5678000000, - "uptime": 1600094505, -} -WIRELESS_CLIENT = { - "is_wired": False, - "mac": "00:00:00:00:00:01", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes-r": 2345000000.0, - "tx_bytes-r": 6789000000.0, - "uptime": 60, -} +async def _test_uptime_entity( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_websocket_message: WebsocketMessageMock, + config_entry_factory: ConfigEntryFactoryType, + payload: dict[str, Any], + entity_id: str, + message_key: MessageKey, + initial_uptime: int, + event_uptime: int, + small_variation_uptime: int, + new_uptime: int, +) -> None: + """Verify that uptime entities are working as expected.""" + payload["uptime"] = initial_uptime + freezer.move_to(datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC)) + config_entry = await config_entry_factory() + + assert hass.states.get(entity_id).state == "2021-01-01T01:00:00+00:00" + + # Verify normal new event doesn't change uptime + # 4 minutes have passed + + payload["uptime"] = event_uptime + now = datetime(2021, 1, 1, 1, 4, 0, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=message_key, data=payload) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == "2021-01-01T01:00:00+00:00" + + # Verify small variation of uptime (<120 seconds) is ignored + # 15 seconds variation after 8 minutes + + payload["uptime"] = small_variation_uptime + now = datetime(2021, 1, 1, 1, 8, 15, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=message_key, data=payload) + + assert hass.states.get(entity_id).state == "2021-01-01T01:00:00+00:00" + + # Verify new event change uptime + # 1 month has passed + + payload["uptime"] = new_uptime + now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=message_key, data=payload) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == "2021-02-01T01:00:00+00:00" + + return config_entry + + +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_UPTIME_SENSORS: True}]) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT]]) +@pytest.mark.parametrize( + ("initial_uptime", "event_uptime", "small_variation_uptime", "new_uptime"), + [ + # Uptime listed in epoch time should never change + (1609462800, 1609462800, 1609462800, 1612141200), + # Uptime counted in seconds increases with every event + (60, 240, 480, 60), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_client_uptime( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry_options: MappingProxyType[str, Any], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + client_payload: list[dict[str, Any]], + initial_uptime, + event_uptime, + small_variation_uptime, + new_uptime, +) -> None: + """Verify that client uptime sensors are working as expected.""" + config_entry = await _test_uptime_entity( + hass, + freezer, + mock_websocket_message, + config_entry_factory, + payload=client_payload[0], + entity_id="sensor.wired_client_uptime", + message_key=MessageKey.CLIENT, + initial_uptime=initial_uptime, + event_uptime=event_uptime, + small_variation_uptime=small_variation_uptime, + new_uptime=new_uptime, + ) + + # Disable option + options = deepcopy(config_entry_options) + options[CONF_ALLOW_UPTIME_SENSORS] = False + hass.config_entries.async_update_entry(config_entry, options=options) + await hass.async_block_till_done() + + assert hass.states.get("sensor.wired_client_uptime") is None + + # Enable option + options = deepcopy(config_entry_options) + options[CONF_ALLOW_UPTIME_SENSORS] = True + hass.config_entries.async_update_entry(config_entry, options=options) + await hass.async_block_till_done() + + assert hass.states.get("sensor.wired_client_uptime") + + +@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) +async def test_device_uptime( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + device_payload: list[dict[str, Any]], +) -> None: + """Verify that device uptime sensors are working as expected.""" + await _test_uptime_entity( + hass, + freezer, + mock_websocket_message, + config_entry_factory, + payload=device_payload[0], + entity_id="sensor.mock_name_uptime", + message_key=MessageKey.DEVICE, + initial_uptime=60, + event_uptime=240, + small_variation_uptime=480, + new_uptime=60, + ) @pytest.mark.parametrize( - "config_entry_options", + "device_payload", [ - { - CONF_ALLOW_BANDWIDTH_SENSORS: True, - CONF_ALLOW_UPTIME_SENSORS: True, - CONF_TRACK_CLIENTS: False, - CONF_TRACK_DEVICES: False, - } + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "uptime_stats": { + "WAN": { + "availability": 100.0, + "latency_average": 39, + "monitors": [ + { + "availability": 100.0, + "latency_average": 56, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 53, + "target": "google.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 30, + "target": "1.1.1.1", + "type": "icmp", + }, + ], + }, + "WAN2": { + "monitors": [ + { + "availability": 0.0, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 0.0, + "target": "google.com", + "type": "icmp", + }, + {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, + ], + }, + }, + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] ], ) @pytest.mark.parametrize( - ("client_payload", "entity_id", "unique_id_prefix"), + ("monitor_id", "state", "updated_state", "index_to_update"), [ - ([WIRED_CLIENT], "sensor.wired_client_rx", "rx-"), - ([WIRED_CLIENT], "sensor.wired_client_tx", "tx-"), - ([WIRED_CLIENT], "sensor.wired_client_uptime", "uptime-"), - ([WIRELESS_CLIENT], "sensor.wireless_client_rx", "rx-"), - ([WIRELESS_CLIENT], "sensor.wireless_client_tx", "tx-"), - ([WIRELESS_CLIENT], "sensor.wireless_client_uptime", "uptime-"), + # Microsoft + ("microsoft_wan", "56", "20", 0), + # Google + ("google_wan", "53", "90", 1), + # Cloudflare + ("cloudflare_wan", "30", "80", 2), ], ) @pytest.mark.usefixtures("config_entry_setup") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.freeze_time("2021-01-01 01:01:00") -async def test_sensor_sources( +async def test_wan_monitor_latency( hass: HomeAssistant, entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - entity_id: str, - unique_id_prefix: str, + mock_websocket_message: WebsocketMessageMock, + device_payload: list[dict[str, Any]], + monitor_id: str, + state: str, + updated_state: str, + index_to_update: int, ) -> None: - """Test sensor sources and the entity description.""" - ent_reg_entry = entity_registry.async_get(entity_id) - assert ent_reg_entry.unique_id.startswith(unique_id_prefix) - assert ent_reg_entry.unique_id == snapshot - assert ent_reg_entry.entity_category == snapshot + """Verify that wan latency sensors are working as expected.""" + entity_id = f"sensor.mock_name_{monitor_id}_latency" - state = hass.states.get(entity_id) - assert state.attributes.get(ATTR_DEVICE_CLASS) == snapshot - assert state.attributes.get(ATTR_FRIENDLY_NAME) == snapshot - assert state.attributes.get(ATTR_STATE_CLASS) == snapshot - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == snapshot - assert state.state == snapshot + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + latency_entry = entity_registry.async_get(entity_id) + assert latency_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + + # Enable entity + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + + await hass.async_block_till_done() + + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 7 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 + + # Verify sensor state + assert hass.states.get(entity_id).state == state + + # Verify state update + device = device_payload[0] + device["uptime_stats"]["WAN"]["monitors"][index_to_update]["latency_average"] = ( + updated_state + ) + + mock_websocket_message(message=MessageKey.DEVICE, data=device) + + assert hass.states.get(entity_id).state == updated_state + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "uptime_stats": { + "WAN": { + "monitors": [ + { + "availability": 100.0, + "latency_average": 30, + "target": "1.2.3.4", + "type": "icmp", + }, + ], + }, + "WAN2": { + "monitors": [ + { + "availability": 0.0, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 0.0, + "target": "google.com", + "type": "icmp", + }, + {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, + ], + }, + }, + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_wan_monitor_latency_with_no_entries( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that wan latency sensors is not created if there is no data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") + assert latency_entry is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_wan_monitor_latency_with_no_uptime( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that wan latency sensors is not created if there is no data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") + assert latency_entry is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + "temperatures": [ + {"name": "CPU", "type": "cpu", "value": 66.0}, + {"name": "Local", "type": "board", "value": 48.75}, + {"name": "PHY", "type": "board", "value": 50.25}, + ], + } + ] + ], +) +@pytest.mark.parametrize( + ("temperature_id", "state", "updated_state", "index_to_update"), + [ + ("device_cpu", "66.0", "20", 0), + ("device_local", "48.75", "90.64", 1), + ("device_phy", "50.25", "80", 2), + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_temperatures( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_websocket_message, + device_payload: list[dict[str, Any]], + temperature_id: str, + state: str, + updated_state: str, + index_to_update: int, +) -> None: + """Verify that device temperatures sensors are working as expected.""" + + entity_id = f"sensor.device_{temperature_id}_temperature" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + temperature_entity = entity_registry.async_get(entity_id) + assert temperature_entity.disabled_by == RegistryEntryDisabler.INTEGRATION + + # Enable entity + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + + await hass.async_block_till_done() + + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 7 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 + + # Verify sensor state + assert hass.states.get(entity_id).state == state + + # # Verify state update + device = device_payload[0] + device["temperatures"][index_to_update]["value"] = updated_state + + mock_websocket_message(message=MessageKey.DEVICE, data=device) + + assert hass.states.get(entity_id).state == updated_state + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_with_no_temperature( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that device temperature sensors is not created if there is no data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + temperature_entity = entity_registry.async_get( + "sensor.device_device_cpu_temperature" + ) + + assert temperature_entity is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + "temperatures": [ + {"name": "MEM", "type": "mem", "value": 66.0}, + ], + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_with_no_matching_temperatures( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that device temperature sensors is not created if there is no matching data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + temperature_entity = entity_registry.async_get( + "sensor.device_device_cpu_temperature" + ) + + assert temperature_entity is None diff --git a/tests/components/unifi/test_services.py b/tests/components/unifi/test_services.py index e3b03bc868d..a7968a92e22 100644 --- a/tests/components/unifi/test_services.py +++ b/tests/components/unifi/test_services.py @@ -10,11 +10,11 @@ from homeassistant.components.unifi.services import ( SERVICE_RECONNECT_CLIENT, SERVICE_REMOVE_CLIENTS, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_DEVICE_ID, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -25,7 +25,7 @@ async def test_reconnect_client( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify call to reconnect client is performed as expected.""" @@ -69,7 +69,7 @@ async def test_reconnect_device_without_mac( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Verify no call is made if device does not have a known mac.""" aioclient_mock.clear_requests() @@ -95,7 +95,7 @@ async def test_reconnect_client_hub_unavailable( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify no call is made if hub is unavailable.""" @@ -127,7 +127,7 @@ async def test_reconnect_client_unknown_mac( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Verify no call is made if trying to reconnect a mac unknown to hub.""" aioclient_mock.clear_requests() @@ -152,7 +152,7 @@ async def test_reconnect_wired_client( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify no call is made if client is wired.""" @@ -204,7 +204,7 @@ async def test_reconnect_wired_client( async def test_remove_clients( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Verify removing different variations of clients work.""" aioclient_mock.clear_requests() @@ -288,8 +288,8 @@ async def test_services_handle_unloaded_config_entry( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_setup: ConfigEntry, - clients_all_payload, + config_entry_setup: MockConfigEntry, + clients_all_payload: dict[str, Any], ) -> None: """Verify no call is made if config entry is unloaded.""" await hass.config_entries.async_unload(config_entry_setup.entry_id) diff --git a/tests/components/unifi/test_switch.py b/tests/components/unifi/test_switch.py index b0ae8bde445..ef93afa7e3e 100644 --- a/tests/components/unifi/test_switch.py +++ b/tests/components/unifi/test_switch.py @@ -1,18 +1,18 @@ """UniFi Network switch platform tests.""" -from collections.abc import Callable from copy import deepcopy from datetime import timedelta from typing import Any +from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest +from syrupy import SnapshotAssertion from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON, - SwitchDeviceClass, ) from homeassistant.components.unifi.const import ( CONF_BLOCK_CLIENT, @@ -22,24 +22,28 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_DEVICES, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( - ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, CONF_HOST, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, - EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.util import dt as dt_util -from .conftest import CONTROLLER_HOST +from .conftest import ( + CONTROLLER_HOST, + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) -from tests.common import async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker CLIENT_1 = { @@ -774,6 +778,65 @@ PORT_FORWARD_PLEX = { "src": "any", } +TRAFFIC_RULE = { + "_id": "6452cd9b859d5b11aa002ea1", + "action": "BLOCK", + "app_category_ids": [], + "app_ids": [], + "bandwidth_limit": { + "download_limit_kbps": 1024, + "enabled": False, + "upload_limit_kbps": 1024, + }, + "description": "Test Traffic Rule", + "name": "Test Traffic Rule", + "domains": [], + "enabled": True, + "ip_addresses": [], + "ip_ranges": [], + "matching_target": "INTERNET", + "network_ids": [], + "regions": [], + "schedule": { + "date_end": "2023-05-10", + "date_start": "2023-05-03", + "mode": "ALWAYS", + "repeat_on_days": [], + "time_all_day": False, + "time_range_end": "12:00", + "time_range_start": "09:00", + }, + "target_devices": [{"client_mac": CLIENT_1["mac"], "type": "CLIENT"}], +} + + +@pytest.mark.parametrize( + "config_entry_options", [{CONF_BLOCK_CLIENT: [BLOCKED["mac"]]}] +) +@pytest.mark.parametrize("client_payload", [[BLOCKED]]) +@pytest.mark.parametrize("device_payload", [[DEVICE_1, OUTLET_UP1, PDU_DEVICE_1]]) +@pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) +@pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) +@pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) +@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.parametrize( + "site_payload", + [[{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}]], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + site_payload: dict[str, Any], + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.SWITCH]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + @pytest.mark.parametrize("client_payload", [[CONTROLLER_HOST]]) @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @@ -784,18 +847,6 @@ async def test_hub_not_client(hass: HomeAssistant) -> None: assert hass.states.get("switch.cloud_key") is None -@pytest.mark.parametrize("client_payload", [[CLIENT_1]]) -@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) -@pytest.mark.parametrize( - "site_payload", - [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_not_admin(hass: HomeAssistant) -> None: - """Test that switch platform only work on an admin account.""" - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 - - @pytest.mark.parametrize( "config_entry_options", [ @@ -806,41 +857,17 @@ async def test_not_admin(hass: HomeAssistant) -> None: } ], ) -@pytest.mark.parametrize("client_payload", [[CLIENT_4]]) @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED, CLIENT_1]]) @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) -@pytest.mark.usefixtures("config_entry_setup") async def test_switches( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 3 - switch_4 = hass.states.get("switch.poe_client_4") - assert switch_4 is None - - blocked = hass.states.get("switch.block_client_1") - assert blocked is not None - assert blocked.state == "off" - - unblocked = hass.states.get("switch.block_client_2") - assert unblocked is not None - assert unblocked.state == "on" - - dpi_switch = hass.states.get("switch.block_media_streaming") - assert dpi_switch is not None - assert dpi_switch.state == "on" - assert dpi_switch.attributes["icon"] == "mdi:network" - - for entry_id in ("switch.block_client_1", "switch.block_media_streaming"): - assert ( - entity_registry.async_get(entry_id).entity_category is EntityCategory.CONFIG - ) - # Block and unblock client aioclient_mock.clear_requests() aioclient_mock.post( @@ -899,7 +926,9 @@ async def test_switches( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") -async def test_remove_switches(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_remove_switches( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 @@ -936,8 +965,8 @@ async def test_remove_switches(hass: HomeAssistant, mock_websocket_message) -> N async def test_block_switches( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 @@ -996,14 +1025,13 @@ async def test_block_switches( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") -async def test_dpi_switches(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_dpi_switches( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - dpi_switch = hass.states.get("switch.block_media_streaming") - assert dpi_switch is not None - assert dpi_switch.state == STATE_ON - assert dpi_switch.attributes["icon"] == "mdi:network" + assert hass.states.get("switch.block_media_streaming").state == STATE_ON mock_websocket_message(data=DPI_APP_DISABLED_EVENT) await hass.async_block_till_done() @@ -1022,7 +1050,7 @@ async def test_dpi_switches(hass: HomeAssistant, mock_websocket_message) -> None @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") async def test_dpi_switches_add_second_app( - hass: HomeAssistant, mock_websocket_message + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1072,6 +1100,60 @@ async def test_dpi_switches_add_second_app( assert hass.states.get("switch.block_media_streaming").state == STATE_ON +@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) +async def test_traffic_rules( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + config_entry_setup: MockConfigEntry, + traffic_rule_payload: list[dict[str, Any]], +) -> None: + """Test control of UniFi traffic rules.""" + assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 + + # Validate state object + assert hass.states.get("switch.unifi_network_test_traffic_rule").state == STATE_ON + + traffic_rule = deepcopy(traffic_rule_payload[0]) + + # Disable traffic rule + aioclient_mock.put( + f"https://{config_entry_setup.data[CONF_HOST]}:1234" + f"/v2/api/site/{config_entry_setup.data[CONF_SITE_ID]}" + f"/trafficrules/{traffic_rule['_id']}", + ) + + call_count = aioclient_mock.call_count + + await hass.services.async_call( + SWITCH_DOMAIN, + "turn_off", + {"entity_id": "switch.unifi_network_test_traffic_rule"}, + blocking=True, + ) + # Updating the value for traffic rules will make another call to retrieve the values + assert aioclient_mock.call_count == call_count + 2 + expected_disable_call = deepcopy(traffic_rule) + expected_disable_call["enabled"] = False + + assert aioclient_mock.mock_calls[call_count][2] == expected_disable_call + + call_count = aioclient_mock.call_count + + # Enable traffic rule + await hass.services.async_call( + SWITCH_DOMAIN, + "turn_on", + {"entity_id": "switch.unifi_network_test_traffic_rule"}, + blocking=True, + ) + + expected_enable_call = deepcopy(traffic_rule) + expected_enable_call["enabled"] = True + + assert aioclient_mock.call_count == call_count + 2 + assert aioclient_mock.mock_calls[call_count][2] == expected_enable_call + + @pytest.mark.parametrize( ("device_payload", "entity_id", "outlet_index", "expected_switches"), [ @@ -1083,8 +1165,8 @@ async def test_dpi_switches_add_second_app( async def test_outlet_switches( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, device_payload: list[dict[str, Any]], entity_id: str, outlet_index: int, @@ -1094,10 +1176,7 @@ async def test_outlet_switches( assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == expected_switches # Validate state object - switch_1 = hass.states.get(f"switch.{entity_id}") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.OUTLET + assert hass.states.get(f"switch.{entity_id}").state == STATE_ON # Update state object device_1 = deepcopy(device_payload[0]) @@ -1156,15 +1235,6 @@ async def test_outlet_switches( await hass.async_block_till_done() assert hass.states.get(f"switch.{entity_id}").state == STATE_OFF - # Unload config entry - await hass.config_entries.async_unload(config_entry_setup.entry_id) - assert hass.states.get(f"switch.{entity_id}").state == STATE_UNAVAILABLE - - # Remove config entry - await hass.config_entries.async_remove(config_entry_setup.entry_id) - await hass.async_block_till_done() - assert hass.states.get(f"switch.{entity_id}") is None - @pytest.mark.parametrize( "config_entry_options", @@ -1179,7 +1249,7 @@ async def test_outlet_switches( ) @pytest.mark.usefixtures("config_entry_setup") async def test_new_client_discovered_on_block_control( - hass: HomeAssistant, mock_websocket_message + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock ) -> None: """Test if 2nd update has a new client.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 @@ -1197,7 +1267,9 @@ async def test_new_client_discovered_on_block_control( ) @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED]]) async def test_option_block_clients( - hass: HomeAssistant, config_entry_setup: ConfigEntry, clients_all_payload + hass: HomeAssistant, + config_entry_setup: MockConfigEntry, + clients_all_payload: list[dict[str, Any]], ) -> None: """Test the changes to option reflects accordingly.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1245,7 +1317,7 @@ async def test_option_block_clients( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_option_remove_switches( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test removal of DPI switch when options updated.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1263,8 +1335,8 @@ async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Test PoE port entities work.""" @@ -1272,7 +1344,6 @@ async def test_poe_port_switches( ent_reg_entry = entity_registry.async_get("switch.mock_name_port_1_poe") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.CONFIG # Enable entity entity_registry.async_update_entity( @@ -1289,10 +1360,7 @@ async def test_poe_port_switches( await hass.async_block_till_done() # Validate state object - switch_1 = hass.states.get("switch.mock_name_port_1_poe") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.OUTLET + assert hass.states.get("switch.mock_name_port_1_poe").state == STATE_ON # Update state object device_1 = deepcopy(device_payload[0]) @@ -1360,24 +1428,16 @@ async def test_poe_port_switches( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) async def test_wlan_switches( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, + mock_websocket_message: WebsocketMessageMock, wlan_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi WLAN availability.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - ent_reg_entry = entity_registry.async_get("switch.ssid_1") - assert ent_reg_entry.unique_id == "wlan-012345678910111213141516" - assert ent_reg_entry.entity_category is EntityCategory.CONFIG - # Validate state object - switch_1 = hass.states.get("switch.ssid_1") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + assert hass.states.get("switch.ssid_1").state == STATE_ON # Update state object wlan = deepcopy(wlan_payload[0]) @@ -1416,24 +1476,16 @@ async def test_wlan_switches( @pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) async def test_port_forwarding_switches( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, + mock_websocket_message: WebsocketMessageMock, port_forward_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi port forwarding.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - ent_reg_entry = entity_registry.async_get("switch.unifi_network_plex") - assert ent_reg_entry.unique_id == "port_forward-5a32aa4ee4b0412345678911" - assert ent_reg_entry.entity_category is EntityCategory.CONFIG - # Validate state object - switch_1 = hass.states.get("switch.unifi_network_plex") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + assert hass.states.get("switch.unifi_network_plex").state == STATE_ON # Update state object data = port_forward_payload[0].copy() @@ -1517,9 +1569,9 @@ async def test_port_forwarding_switches( async def test_updating_unique_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory: Callable[[], ConfigEntry], - config_entry: ConfigEntry, - device_payload, + config_entry_factory: ConfigEntryFactoryType, + config_entry: MockConfigEntry, + device_payload: list[dict[str, Any]], ) -> None: """Verify outlet control and poe control unique ID update works.""" entity_registry.async_get_or_create( @@ -1552,10 +1604,13 @@ async def test_updating_unique_id( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) +@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, mock_websocket_state: WebsocketStateManager +) -> None: """Verify entities state reflect on hub connection becoming unavailable.""" entity_ids = ( "switch.block_client_2", @@ -1563,6 +1618,7 @@ async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> No "switch.plug_outlet_1", "switch.block_media_streaming", "switch.unifi_network_plex", + "switch.unifi_network_test_traffic_rule", "switch.ssid_1", ) for entity_id in entity_ids: diff --git a/tests/components/unifi/test_update.py b/tests/components/unifi/test_update.py index 3b1de6c4456..7bf4b9aec9d 100644 --- a/tests/components/unifi/test_update.py +++ b/tests/components/unifi/test_update.py @@ -1,9 +1,11 @@ """The tests for the UniFi Network update platform.""" from copy import deepcopy +from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest +from syrupy import SnapshotAssertion from yarl import URL from homeassistant.components.unifi.const import CONF_SITE_ID @@ -13,23 +15,28 @@ from homeassistant.components.update import ( ATTR_LATEST_VERSION, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, - UpdateDeviceClass, - UpdateEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, - ATTR_SUPPORTED_FEATURES, CONF_HOST, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker +# Device with new firmware available DEVICE_1 = { "board_rev": 3, "device_id": "mock-id", @@ -46,6 +53,7 @@ DEVICE_1 = { "upgrade_to_firmware": "4.3.17.11279", } +# Device without new firmware available DEVICE_2 = { "board_rev": 3, "device_id": "mock-id", @@ -61,43 +69,40 @@ DEVICE_2 = { @pytest.mark.parametrize("device_payload", [[DEVICE_1, DEVICE_2]]) +@pytest.mark.parametrize( + "site_payload", + [ + [{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}], + [{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}], + ], +) +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.UPDATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") -async def test_device_updates(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_device_updates( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Test the update_items function with some devices.""" - assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 2 - - # Device with new firmware available - device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_ON - assert device_1_state.attributes[ATTR_INSTALLED_VERSION] == "4.0.42.10433" - assert device_1_state.attributes[ATTR_LATEST_VERSION] == "4.3.17.11279" assert device_1_state.attributes[ATTR_IN_PROGRESS] is False - assert device_1_state.attributes[ATTR_DEVICE_CLASS] == UpdateDeviceClass.FIRMWARE - assert ( - device_1_state.attributes[ATTR_SUPPORTED_FEATURES] - == UpdateEntityFeature.PROGRESS | UpdateEntityFeature.INSTALL - ) - - # Device without new firmware available - - device_2_state = hass.states.get("update.device_2") - assert device_2_state.state == STATE_OFF - assert device_2_state.attributes[ATTR_INSTALLED_VERSION] == "4.0.42.10433" - assert device_2_state.attributes[ATTR_LATEST_VERSION] == "4.0.42.10433" - assert device_2_state.attributes[ATTR_IN_PROGRESS] is False - assert device_2_state.attributes[ATTR_DEVICE_CLASS] == UpdateDeviceClass.FIRMWARE - assert ( - device_2_state.attributes[ATTR_SUPPORTED_FEATURES] - == UpdateEntityFeature.PROGRESS | UpdateEntityFeature.INSTALL - ) # Simulate start of update device_1 = deepcopy(DEVICE_1) device_1["state"] = 4 mock_websocket_message(message=MessageKey.DEVICE, data=device_1) - await hass.async_block_till_done() device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_ON @@ -112,7 +117,6 @@ async def test_device_updates(hass: HomeAssistant, mock_websocket_message) -> No device_1["upgradable"] = False del device_1["upgrade_to_firmware"] mock_websocket_message(message=MessageKey.DEVICE, data=device_1) - await hass.async_block_till_done() device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_OFF @@ -121,30 +125,13 @@ async def test_device_updates(hass: HomeAssistant, mock_websocket_message) -> No assert device_1_state.attributes[ATTR_IN_PROGRESS] is False -@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) -@pytest.mark.parametrize( - "site_payload", - [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_not_admin(hass: HomeAssistant) -> None: - """Test that the INSTALL feature is not available on a non-admin account.""" - assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 1 - device_state = hass.states.get("update.device_1") - assert device_state.state == STATE_ON - assert ( - device_state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature.PROGRESS - ) - - @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) async def test_install( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Test the device update install call.""" - assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 1 device_state = hass.states.get("update.device_1") assert device_state.state == STATE_ON @@ -174,9 +161,10 @@ async def test_install( @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, mock_websocket_state: WebsocketStateManager +) -> None: """Verify entities state reflect on hub becoming unavailable.""" - assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 1 assert hass.states.get("update.device_1").state == STATE_ON # Controller unavailable diff --git a/tests/components/unifiprotect/conftest.py b/tests/components/unifiprotect/conftest.py index 6366a4f9244..0bef1ff0eb9 100644 --- a/tests/components/unifiprotect/conftest.py +++ b/tests/components/unifiprotect/conftest.py @@ -29,6 +29,7 @@ from uiprotect.data import ( Viewer, WSSubscriptionMessage, ) +from uiprotect.websocket import WebsocketState from homeassistant.components.unifiprotect.const import DOMAIN from homeassistant.core import HomeAssistant @@ -148,7 +149,14 @@ def mock_entry( ufp.ws_subscription = ws_callback return Mock() + def subscribe_websocket_state( + ws_state_subscription: Callable[[WebsocketState], None], + ) -> Any: + ufp.ws_state_subscription = ws_state_subscription + return Mock() + ufp_client.subscribe_websocket = subscribe + ufp_client.subscribe_websocket_state = subscribe_websocket_state yield ufp diff --git a/tests/components/unifiprotect/test_binary_sensor.py b/tests/components/unifiprotect/test_binary_sensor.py index 42782d10429..af8ce015955 100644 --- a/tests/components/unifiprotect/test_binary_sensor.py +++ b/tests/components/unifiprotect/test_binary_sensor.py @@ -25,7 +25,6 @@ from homeassistant.components.unifiprotect.binary_sensor import ( LIGHT_SENSORS, MOUNTABLE_SENSE_SENSORS, SENSE_SENSORS, - SMART_EVENT_SENSORS, ) from homeassistant.components.unifiprotect.const import ( ATTR_EVENT_SCORE, @@ -453,7 +452,7 @@ async def test_binary_sensor_package_detected( doorbell.smart_detect_settings.object_types.append(SmartDetectObjectType.PACKAGE) _, entity_id = ids_from_device_description( - Platform.BINARY_SENSOR, doorbell, SMART_EVENT_SENSORS[4] + Platform.BINARY_SENSOR, doorbell, EVENT_SENSORS[6] ) event = Event( diff --git a/tests/components/unifiprotect/test_camera.py b/tests/components/unifiprotect/test_camera.py index 444898fbd85..9fedb67fea4 100644 --- a/tests/components/unifiprotect/test_camera.py +++ b/tests/components/unifiprotect/test_camera.py @@ -4,10 +4,13 @@ from __future__ import annotations from unittest.mock import AsyncMock, Mock +from uiprotect.api import DEVICE_UPDATE_INTERVAL from uiprotect.data import Camera as ProtectCamera, CameraChannel, StateType from uiprotect.exceptions import NvrError +from uiprotect.websocket import WebsocketState from homeassistant.components.camera import ( + STATE_IDLE, CameraEntityFeature, async_get_image, async_get_stream_source, @@ -19,13 +22,13 @@ from homeassistant.components.unifiprotect.const import ( ATTR_HEIGHT, ATTR_WIDTH, DEFAULT_ATTRIBUTION, - DEFAULT_SCAN_INTERVAL, ) from homeassistant.components.unifiprotect.utils import get_camera_base_name from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, + STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant @@ -377,7 +380,7 @@ async def test_camera_interval_update( ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.update = AsyncMock(return_value=ufp.api.bootstrap) - await time_changed(hass, DEFAULT_SCAN_INTERVAL) + await time_changed(hass, DEVICE_UPDATE_INTERVAL) state = hass.states.get(entity_id) assert state and state.state == "recording" @@ -397,19 +400,46 @@ async def test_camera_bad_interval_update( # update fails ufp.api.update = AsyncMock(side_effect=NvrError) - await time_changed(hass, DEFAULT_SCAN_INTERVAL) + await time_changed(hass, DEVICE_UPDATE_INTERVAL) state = hass.states.get(entity_id) assert state and state.state == "unavailable" # next update succeeds ufp.api.update = AsyncMock(return_value=ufp.api.bootstrap) - await time_changed(hass, DEFAULT_SCAN_INTERVAL) + await time_changed(hass, DEVICE_UPDATE_INTERVAL) state = hass.states.get(entity_id) assert state and state.state == "idle" +async def test_camera_websocket_disconnected( + hass: HomeAssistant, ufp: MockUFPFixture, camera: ProtectCamera +) -> None: + """Test the websocket gets disconnected and reconnected.""" + + await init_entry(hass, ufp, [camera]) + assert_entity_counts(hass, Platform.CAMERA, 2, 1) + entity_id = "camera.test_camera_high_resolution_channel" + + state = hass.states.get(entity_id) + assert state and state.state == STATE_IDLE + + # websocket disconnects + ufp.ws_state_subscription(WebsocketState.DISCONNECTED) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state and state.state == STATE_UNAVAILABLE + + # websocket reconnects + ufp.ws_state_subscription(WebsocketState.CONNECTED) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state and state.state == STATE_IDLE + + async def test_camera_ws_update( hass: HomeAssistant, ufp: MockUFPFixture, camera: ProtectCamera ) -> None: diff --git a/tests/components/unifiprotect/test_event.py b/tests/components/unifiprotect/test_event.py new file mode 100644 index 00000000000..9d1a701fe39 --- /dev/null +++ b/tests/components/unifiprotect/test_event.py @@ -0,0 +1,154 @@ +"""Test the UniFi Protect event platform.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from unittest.mock import Mock + +from uiprotect.data import Camera, Event, EventType, ModelType, SmartDetectObjectType + +from homeassistant.components.unifiprotect.const import ( + ATTR_EVENT_ID, + DEFAULT_ATTRIBUTION, +) +from homeassistant.components.unifiprotect.event import EVENT_DESCRIPTIONS +from homeassistant.const import ATTR_ATTRIBUTION, Platform +from homeassistant.core import Event as HAEvent, HomeAssistant, callback +from homeassistant.helpers.event import async_track_state_change_event + +from .utils import ( + MockUFPFixture, + adopt_devices, + assert_entity_counts, + ids_from_device_description, + init_entry, + remove_entities, +) + + +async def test_camera_remove( + hass: HomeAssistant, ufp: MockUFPFixture, doorbell: Camera, unadopted_camera: Camera +) -> None: + """Test removing and re-adding a camera device.""" + + ufp.api.bootstrap.nvr.system_info.ustorage = None + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 1, 1) + await remove_entities(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 0, 0) + await adopt_devices(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 1, 1) + + +async def test_doorbell_ring( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell ring event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 1, 1) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[0] + ) + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.RING, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.copy() + new_camera.last_ring_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + timestamp = state.state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.RING, + start=fixed_now - timedelta(seconds=1), + end=fixed_now + timedelta(seconds=1), + score=50, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.copy() + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + # Event is already seen and has end, should now be off + state = hass.states.get(entity_id) + assert state + assert state.state == timestamp + + # Now send an event that has an end right away + event = Event( + model=ModelType.EVENT, + id="new_event_id", + type=EventType.RING, + start=fixed_now - timedelta(seconds=1), + end=fixed_now + timedelta(seconds=1), + score=80, + smart_detect_types=[SmartDetectObjectType.PACKAGE], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.copy() + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + + ufp.ws_msg(mock_msg) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state == timestamp + unsub() diff --git a/tests/components/unifiprotect/test_init.py b/tests/components/unifiprotect/test_init.py index 3b75afaace8..46e57c62101 100644 --- a/tests/components/unifiprotect/test_init.py +++ b/tests/components/unifiprotect/test_init.py @@ -5,12 +5,12 @@ from __future__ import annotations from unittest.mock import AsyncMock, patch from uiprotect import NotAuthorized, NvrError, ProtectApiClient +from uiprotect.api import DEVICE_UPDATE_INTERVAL from uiprotect.data import NVR, Bootstrap, CloudAccount, Light from homeassistant.components.unifiprotect.const import ( AUTH_RETRIES, CONF_DISABLE_RTSP, - DEFAULT_SCAN_INTERVAL, DOMAIN, ) from homeassistant.config_entries import ConfigEntry, ConfigEntryState @@ -116,12 +116,12 @@ async def test_setup_too_old( old_bootstrap = ufp.api.bootstrap.copy() old_bootstrap.nvr = old_nvr - ufp.api.get_bootstrap.return_value = old_bootstrap + ufp.api.update.return_value = old_bootstrap + ufp.api.bootstrap = old_bootstrap await hass.config_entries.async_setup(ufp.entry.entry_id) await hass.async_block_till_done() assert ufp.entry.state is ConfigEntryState.SETUP_ERROR - assert not ufp.api.update.called async def test_setup_cloud_account( @@ -179,13 +179,13 @@ async def test_setup_failed_update_reauth( # to verify it is not transient ufp.api.update = AsyncMock(side_effect=NotAuthorized) for _ in range(AUTH_RETRIES): - await time_changed(hass, DEFAULT_SCAN_INTERVAL) + await time_changed(hass, DEVICE_UPDATE_INTERVAL) assert len(hass.config_entries.flow._progress) == 0 assert ufp.api.update.call_count == AUTH_RETRIES assert ufp.entry.state is ConfigEntryState.LOADED - await time_changed(hass, DEFAULT_SCAN_INTERVAL) + await time_changed(hass, DEVICE_UPDATE_INTERVAL) assert ufp.api.update.call_count == AUTH_RETRIES + 1 assert len(hass.config_entries.flow._progress) == 1 @@ -193,18 +193,17 @@ async def test_setup_failed_update_reauth( async def test_setup_failed_error(hass: HomeAssistant, ufp: MockUFPFixture) -> None: """Test setup of unifiprotect entry with generic error.""" - ufp.api.get_bootstrap = AsyncMock(side_effect=NvrError) + ufp.api.update = AsyncMock(side_effect=NvrError) await hass.config_entries.async_setup(ufp.entry.entry_id) await hass.async_block_till_done() assert ufp.entry.state is ConfigEntryState.SETUP_RETRY - assert not ufp.api.update.called async def test_setup_failed_auth(hass: HomeAssistant, ufp: MockUFPFixture) -> None: """Test setup of unifiprotect entry with unauthorized error after multiple retries.""" - ufp.api.get_bootstrap = AsyncMock(side_effect=NotAuthorized) + ufp.api.update = AsyncMock(side_effect=NotAuthorized) await hass.config_entries.async_setup(ufp.entry.entry_id) assert ufp.entry.state is ConfigEntryState.SETUP_RETRY @@ -215,7 +214,6 @@ async def test_setup_failed_auth(hass: HomeAssistant, ufp: MockUFPFixture) -> No await hass.config_entries.async_reload(ufp.entry.entry_id) assert ufp.entry.state is ConfigEntryState.SETUP_ERROR - assert not ufp.api.update.called async def test_setup_starts_discovery( diff --git a/tests/components/unifiprotect/test_switch.py b/tests/components/unifiprotect/test_switch.py index 6e5c83ef237..9e0e9efa0ce 100644 --- a/tests/components/unifiprotect/test_switch.py +++ b/tests/components/unifiprotect/test_switch.py @@ -35,15 +35,16 @@ CAMERA_SWITCHES_BASIC = [ for d in CAMERA_SWITCHES if ( not d.name.startswith("Detections:") - and d.name != "SSH enabled" - and d.name != "Color night vision" - and d.name != "Tracking: person" - and d.name != "HDR mode" + and d.name + not in {"SSH enabled", "Color night vision", "Tracking: person", "HDR mode"} ) - or d.name == "Detections: motion" - or d.name == "Detections: person" - or d.name == "Detections: vehicle" - or d.name == "Detections: animal" + or d.name + in { + "Detections: motion", + "Detections: person", + "Detections: vehicle", + "Detections: animal", + } ] CAMERA_SWITCHES_NO_EXTRA = [ d diff --git a/tests/components/unifiprotect/utils.py b/tests/components/unifiprotect/utils.py index ab3aefaa09d..25a9ddcbb92 100644 --- a/tests/components/unifiprotect/utils.py +++ b/tests/components/unifiprotect/utils.py @@ -5,7 +5,6 @@ from __future__ import annotations from collections.abc import Callable, Sequence from dataclasses import dataclass from datetime import timedelta -from typing import Any from unittest.mock import Mock from uiprotect import ProtectApiClient @@ -20,6 +19,7 @@ from uiprotect.data import ( ) from uiprotect.data.bootstrap import ProtectDeviceRef from uiprotect.test_util.anonymize import random_hex +from uiprotect.websocket import WebsocketState from homeassistant.const import Platform from homeassistant.core import HomeAssistant, split_entity_id @@ -38,12 +38,13 @@ class MockUFPFixture: entry: MockConfigEntry api: ProtectApiClient ws_subscription: Callable[[WSSubscriptionMessage], None] | None = None + ws_state_subscription: Callable[[WebsocketState], None] | None = None - def ws_msg(self, msg: WSSubscriptionMessage) -> Any: + def ws_msg(self, msg: WSSubscriptionMessage) -> None: """Emit WS message for testing.""" if self.ws_subscription is not None: - return self.ws_subscription(msg) + self.ws_subscription(msg) def reset_objects(bootstrap: Bootstrap): diff --git a/tests/components/universal/test_media_player.py b/tests/components/universal/test_media_player.py index 814fa34a125..7c992814cfe 100644 --- a/tests/components/universal/test_media_player.py +++ b/tests/components/universal/test_media_player.py @@ -55,7 +55,7 @@ def validate_config(config): class MockMediaPlayer(media_player.MediaPlayerEntity): """Mock media player for testing.""" - def __init__(self, hass, name): + def __init__(self, hass: HomeAssistant, name: str) -> None: """Initialize the media player.""" self.hass = hass self._name = name @@ -220,7 +220,7 @@ class MockMediaPlayer(media_player.MediaPlayerEntity): @pytest.fixture -async def mock_states(hass): +async def mock_states(hass: HomeAssistant) -> Mock: """Set mock states used in tests.""" result = Mock() @@ -325,10 +325,10 @@ async def test_config_bad_children(hass: HomeAssistant) -> None: config_bad_children = {"name": "test", "children": {}, "platform": "universal"} config_no_children = validate_config(config_no_children) - assert [] == config_no_children["children"] + assert config_no_children["children"] == [] config_bad_children = validate_config(config_bad_children) - assert [] == config_bad_children["children"] + assert config_bad_children["children"] == [] async def test_config_bad_commands(hass: HomeAssistant) -> None: @@ -336,7 +336,7 @@ async def test_config_bad_commands(hass: HomeAssistant) -> None: config = {"name": "test", "platform": "universal"} config = validate_config(config) - assert {} == config["commands"] + assert config["commands"] == {} async def test_config_bad_attributes(hass: HomeAssistant) -> None: @@ -344,7 +344,7 @@ async def test_config_bad_attributes(hass: HomeAssistant) -> None: config = {"name": "test", "platform": "universal"} config = validate_config(config) - assert {} == config["attributes"] + assert config["attributes"] == {} async def test_config_bad_key(hass: HomeAssistant) -> None: @@ -1280,6 +1280,7 @@ async def test_master_state_with_template(hass: HomeAssistant) -> None: context = Context() hass.states.async_set("input_boolean.test", STATE_ON, context=context) await hass.async_block_till_done() + await hass.async_block_till_done() assert hass.states.get("media_player.tv").state == STATE_OFF assert events[0].context == context diff --git a/tests/components/upb/test_config_flow.py b/tests/components/upb/test_config_flow.py index d5d6d70bb68..efa6d60c344 100644 --- a/tests/components/upb/test_config_flow.py +++ b/tests/components/upb/test_config_flow.py @@ -1,7 +1,7 @@ """Test the UPB Control config flow.""" from asyncio import TimeoutError -from unittest.mock import MagicMock, PropertyMock, patch +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from homeassistant import config_entries from homeassistant.components.upb.const import DOMAIN @@ -15,11 +15,12 @@ def mocked_upb(sync_complete=True, config_ok=True): def _upb_lib_connect(callback): callback() - upb_mock = MagicMock() + upb_mock = AsyncMock() type(upb_mock).network_id = PropertyMock(return_value="42") type(upb_mock).config_ok = PropertyMock(return_value=config_ok) + type(upb_mock).disconnect = MagicMock() if sync_complete: - upb_mock.connect.side_effect = _upb_lib_connect + upb_mock.async_connect.side_effect = _upb_lib_connect return patch( "homeassistant.components.upb.config_flow.upb_lib.UpbPim", return_value=upb_mock ) diff --git a/tests/components/update/test_device_trigger.py b/tests/components/update/test_device_trigger.py index fa9af863f56..202b3d32509 100644 --- a/tests/components/update/test_device_trigger.py +++ b/tests/components/update/test_device_trigger.py @@ -21,7 +21,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -31,12 +30,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -182,7 +175,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_update_entities: list[MockUpdateEntity], ) -> None: """Test for turn_on and turn_off triggers firing.""" @@ -253,21 +246,21 @@ async def test_if_fires_on_state_change( state = hass.states.get("update.update_available") assert state assert state.state == STATE_ON - assert not calls + assert not service_calls hass.states.async_set("update.update_available", STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "no_update device - update.update_available - on - off - None" ) hass.states.async_set("update.update_available", STATE_ON) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == "update_available device - update.update_available - off - on - None" ) @@ -276,7 +269,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_update_entities: list[MockUpdateEntity], ) -> None: """Test for turn_on and turn_off triggers firing.""" @@ -326,13 +319,13 @@ async def test_if_fires_on_state_change_legacy( state = hass.states.get("update.update_available") assert state assert state.state == STATE_ON - assert not calls + assert not service_calls hass.states.async_set("update.update_available", STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "no_update device - update.update_available - on - off - None" ) @@ -341,7 +334,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_update_entities: list[MockUpdateEntity], ) -> None: """Test for triggers firing with delay.""" @@ -392,16 +385,16 @@ async def test_if_fires_on_state_change_with_for( state = hass.states.get("update.update_available") assert state assert state.state == STATE_ON - assert not calls + assert not service_calls hass.states.async_set("update.update_available", STATE_OFF) await hass.async_block_till_done() - assert not calls + assert not service_calls async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "turn_off device - update.update_available - on - off - 0:00:05" ) diff --git a/tests/components/update/test_init.py b/tests/components/update/test_init.py index b37abc2263a..7860c679f37 100644 --- a/tests/components/update/test_init.py +++ b/tests/components/update/test_init.py @@ -1,9 +1,9 @@ """The tests for the Update component.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.update import ( ATTR_BACKUP, diff --git a/tests/components/upnp/conftest.py b/tests/components/upnp/conftest.py index 00e8db124f0..1431ce2c9ef 100644 --- a/tests/components/upnp/conftest.py +++ b/tests/components/upnp/conftest.py @@ -2,13 +2,16 @@ from __future__ import annotations +from collections.abc import Generator import copy from datetime import datetime +import socket from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch from urllib.parse import urlparse +from async_upnp_client.aiohttp import AiohttpNotifyServer from async_upnp_client.client import UpnpDevice -from async_upnp_client.profiles.igd import IgdDevice, IgdState, StatusInfo +from async_upnp_client.profiles.igd import IgdDevice, IgdState import pytest from homeassistant.components import ssdp @@ -87,21 +90,35 @@ def mock_igd_device(mock_async_create_device) -> IgdDevice: bytes_sent=0, packets_received=0, packets_sent=0, - status_info=StatusInfo( - "Connected", - "", - 10, - ), + connection_status="Connected", + last_connection_error="", + uptime=10, external_ip_address="8.9.10.11", kibibytes_per_sec_received=None, kibibytes_per_sec_sent=None, packets_per_sec_received=None, packets_per_sec_sent=None, + port_mapping_number_of_entries=0, ) - with patch( - "homeassistant.components.upnp.device.IgdDevice.__new__", - return_value=mock_igd_device, + mock_igd_device.async_subscribe_services = AsyncMock() + + mock_notify_server = create_autospec(AiohttpNotifyServer) + mock_notify_server.event_handler = MagicMock() + + with ( + patch( + "homeassistant.components.upnp.device.async_get_local_ip", + return_value=(socket.AF_INET, "127.0.0.1"), + ), + patch( + "homeassistant.components.upnp.device.IgdDevice.__new__", + return_value=mock_igd_device, + ), + patch( + "homeassistant.components.upnp.device.AiohttpNotifyServer.__new__", + return_value=mock_notify_server, + ), ): yield mock_igd_device @@ -137,7 +154,7 @@ def mock_setup_entry(): @pytest.fixture(autouse=True) -async def silent_ssdp_scanner(hass): +def silent_ssdp_scanner() -> Generator[None]: """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), @@ -231,7 +248,7 @@ async def mock_config_entry( ssdp_instant_discovery, mock_igd_device: IgdDevice, mock_mac_address_from_host, -): +) -> MockConfigEntry: """Create an initialized integration.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/upnp/test_binary_sensor.py b/tests/components/upnp/test_binary_sensor.py index 3a800ca75b9..087cd9e9fb4 100644 --- a/tests/components/upnp/test_binary_sensor.py +++ b/tests/components/upnp/test_binary_sensor.py @@ -2,7 +2,7 @@ from datetime import datetime, timedelta -from async_upnp_client.profiles.igd import IgdDevice, IgdState, StatusInfo +from async_upnp_client.profiles.igd import IgdDevice, IgdState from homeassistant.components.upnp.const import DEFAULT_SCAN_INTERVAL from homeassistant.core import HomeAssistant @@ -27,16 +27,15 @@ async def test_upnp_binary_sensors( bytes_sent=0, packets_received=0, packets_sent=0, - status_info=StatusInfo( - "Disconnected", - "", - 40, - ), + connection_status="Disconnected", + last_connection_error="", + uptime=40, external_ip_address="8.9.10.11", kibibytes_per_sec_received=None, kibibytes_per_sec_sent=None, packets_per_sec_received=None, packets_per_sec_sent=None, + port_mapping_number_of_entries=0, ) async_fire_time_changed( diff --git a/tests/components/upnp/test_config_flow.py b/tests/components/upnp/test_config_flow.py index b8a08d3f592..8799e0faab3 100644 --- a/tests/components/upnp/test_config_flow.py +++ b/tests/components/upnp/test_config_flow.py @@ -9,6 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components import ssdp from homeassistant.components.upnp.const import ( + CONFIG_ENTRY_FORCE_POLL, CONFIG_ENTRY_HOST, CONFIG_ENTRY_LOCATION, CONFIG_ENTRY_MAC_ADDRESS, @@ -473,3 +474,28 @@ async def test_flow_ssdp_with_mismatched_udn(hass: HomeAssistant) -> None: CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, CONFIG_ENTRY_HOST: TEST_HOST, } + + +async def test_options_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that the options flow works.""" + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + user_input = { + CONFIG_ENTRY_FORCE_POLL: True, + } + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONFIG_ENTRY_FORCE_POLL: True, + } + assert mock_config_entry.options == { + CONFIG_ENTRY_FORCE_POLL: True, + } diff --git a/tests/components/upnp/test_init.py b/tests/components/upnp/test_init.py index 4b5e375f8e0..f87696b0bd1 100644 --- a/tests/components/upnp/test_init.py +++ b/tests/components/upnp/test_init.py @@ -5,10 +5,12 @@ from __future__ import annotations import copy from unittest.mock import AsyncMock, MagicMock, patch +from async_upnp_client.profiles.igd import IgdDevice import pytest from homeassistant.components import ssdp from homeassistant.components.upnp.const import ( + CONFIG_ENTRY_FORCE_POLL, CONFIG_ENTRY_LOCATION, CONFIG_ENTRY_MAC_ADDRESS, CONFIG_ENTRY_ORIGINAL_UDN, @@ -31,7 +33,9 @@ from tests.common import MockConfigEntry @pytest.mark.usefixtures("ssdp_instant_discovery", "mock_mac_address_from_host") -async def test_async_setup_entry_default(hass: HomeAssistant) -> None: +async def test_async_setup_entry_default( + hass: HomeAssistant, mock_igd_device: IgdDevice +) -> None: """Test async_setup_entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -43,12 +47,17 @@ async def test_async_setup_entry_default(hass: HomeAssistant) -> None: CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, + options={ + CONFIG_ENTRY_FORCE_POLL: False, + }, ) # Load config_entry. entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) is True + mock_igd_device.async_subscribe_services.assert_called() + @pytest.mark.usefixtures("ssdp_instant_discovery", "mock_no_mac_address_from_host") async def test_async_setup_entry_default_no_mac_address(hass: HomeAssistant) -> None: @@ -63,6 +72,9 @@ async def test_async_setup_entry_default_no_mac_address(hass: HomeAssistant) -> CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: None, }, + options={ + CONFIG_ENTRY_FORCE_POLL: False, + }, ) # Load config_entry. @@ -91,6 +103,9 @@ async def test_async_setup_entry_multi_location( CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, + options={ + CONFIG_ENTRY_FORCE_POLL: False, + }, ) # Load config_entry. @@ -119,6 +134,9 @@ async def test_async_setup_udn_mismatch( CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, + options={ + CONFIG_ENTRY_FORCE_POLL: False, + }, ) # Set up device discovery callback. @@ -143,3 +161,34 @@ async def test_async_setup_udn_mismatch( # Ensure that the IPv4 location is used. mock_async_create_device.assert_called_once_with(TEST_LOCATION) + + +@pytest.mark.usefixtures( + "ssdp_instant_discovery", + "mock_get_source_ip", + "mock_mac_address_from_host", +) +async def test_async_setup_entry_force_poll( + hass: HomeAssistant, mock_igd_device: IgdDevice +) -> None: + """Test async_setup_entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_USN, + data={ + CONFIG_ENTRY_ST: TEST_ST, + CONFIG_ENTRY_UDN: TEST_UDN, + CONFIG_ENTRY_ORIGINAL_UDN: TEST_UDN, + CONFIG_ENTRY_LOCATION: TEST_LOCATION, + CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, + }, + options={ + CONFIG_ENTRY_FORCE_POLL: True, + }, + ) + + # Load config_entry. + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) is True + + mock_igd_device.async_subscribe_services.assert_not_called() diff --git a/tests/components/upnp/test_sensor.py b/tests/components/upnp/test_sensor.py index 7dfbb144b01..67a64b265d9 100644 --- a/tests/components/upnp/test_sensor.py +++ b/tests/components/upnp/test_sensor.py @@ -2,7 +2,7 @@ from datetime import datetime, timedelta -from async_upnp_client.profiles.igd import IgdDevice, IgdState, StatusInfo +from async_upnp_client.profiles.igd import IgdDevice, IgdState from homeassistant.components.upnp.const import DEFAULT_SCAN_INTERVAL from homeassistant.core import HomeAssistant @@ -35,16 +35,15 @@ async def test_upnp_sensors( bytes_sent=20480, packets_received=30, packets_sent=40, - status_info=StatusInfo( - "Disconnected", - "", - 40, - ), + connection_status="Disconnected", + last_connection_error="", + uptime=40, external_ip_address="", kibibytes_per_sec_received=10.0, kibibytes_per_sec_sent=20.0, packets_per_sec_received=30.0, packets_per_sec_sent=40.0, + port_mapping_number_of_entries=0, ) now = dt_util.utcnow() diff --git a/tests/components/uptime/conftest.py b/tests/components/uptime/conftest.py index 2fe96b91b63..008172dc35a 100644 --- a/tests/components/uptime/conftest.py +++ b/tests/components/uptime/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.uptime.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/uptime/snapshots/test_sensor.ambr b/tests/components/uptime/snapshots/test_sensor.ambr index 0e7ae6dceaa..561e4b83320 100644 --- a/tests/components/uptime/snapshots/test_sensor.ambr +++ b/tests/components/uptime/snapshots/test_sensor.ambr @@ -61,30 +61,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Uptime', 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_uptime_sensor.3 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'is_new': False, - 'manufacturer': None, - 'model': None, - 'name': 'Uptime', - 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/utility_meter/test_diagnostics.py b/tests/components/utility_meter/test_diagnostics.py index cefd17fc7e4..9ecabe813b1 100644 --- a/tests/components/utility_meter/test_diagnostics.py +++ b/tests/components/utility_meter/test_diagnostics.py @@ -4,6 +4,7 @@ from aiohttp.test_utils import TestClient from freezegun import freeze_time import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.auth.models import Credentials from homeassistant.components.utility_meter.const import DOMAIN @@ -45,11 +46,6 @@ def _get_test_client_generator( return auth_client -def limit_diagnostic_attrs(prop, path) -> bool: - """Mark attributes to exclude from diagnostic snapshot.""" - return prop in {"entry_id"} - - @freeze_time("2024-04-06 00:00:00+00:00") @pytest.mark.usefixtures("socket_enabled") async def test_diagnostics( @@ -125,4 +121,4 @@ async def test_diagnostics( hass, _get_test_client_generator(hass, aiohttp_client, new_token), config_entry ) - assert diag == snapshot(exclude=limit_diagnostic_attrs) + assert diag == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/v2c/conftest.py b/tests/components/v2c/conftest.py index 1803298be28..5c7db8bbab3 100644 --- a/tests/components/v2c/conftest.py +++ b/tests/components/v2c/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the V2C tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from pytrydan.models.trydan import TrydanData -from typing_extensions import Generator from homeassistant.components.v2c.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/v2c/snapshots/test_diagnostics.ambr b/tests/components/v2c/snapshots/test_diagnostics.ambr index a4f6cad4cc8..cc34cae87f8 100644 --- a/tests/components/v2c/snapshots/test_diagnostics.ambr +++ b/tests/components/v2c/snapshots/test_diagnostics.ambr @@ -18,7 +18,7 @@ 'unique_id': 'ABC123', 'version': 1, }), - 'data': "TrydanData(ID='ABC123', charge_state=, ready_state=, charge_power=1500.27, charge_energy=1.8, slave_error=, charge_time=4355, house_power=0.0, fv_power=0.0, battery_power=0.0, paused=, locked=, timer=, intensity=6, dynamic=, min_intensity=6, max_intensity=16, pause_dynamic=, dynamic_power_mode=, contracted_power=4600, firmware_version='2.1.7')", + 'data': "TrydanData(ID='ABC123', charge_state=, ready_state=, charge_power=1500.27, voltage_installation=None, charge_energy=1.8, slave_error=, charge_time=4355, house_power=0.0, fv_power=0.0, battery_power=0.0, paused=, locked=, timer=, intensity=6, dynamic=, min_intensity=6, max_intensity=16, pause_dynamic=, dynamic_power_mode=, contracted_power=4600, firmware_version='2.1.7', SSID=None, IP=None, signal_status=None)", 'host_status': 200, 'raw_data': '{"ID":"ABC123","ChargeState":2,"ReadyState":0,"ChargePower":1500.27,"ChargeEnergy":1.8,"SlaveError":4,"ChargeTime":4355,"HousePower":0.0,"FVPower":0.0,"BatteryPower":0.0,"Paused":0,"Locked":0,"Timer":0,"Intensity":6,"Dynamic":0,"MinIntensity":6,"MaxIntensity":16,"PauseDynamic":0,"FirmwareVersion":"2.1.7","DynamicPowerMode":2,"ContractedPower":4600}', }) diff --git a/tests/components/v2c/snapshots/test_sensor.ambr b/tests/components/v2c/snapshots/test_sensor.ambr index cc8077333cb..7b9ae4a9ff3 100644 --- a/tests/components/v2c/snapshots/test_sensor.ambr +++ b/tests/components/v2c/snapshots/test_sensor.ambr @@ -126,7 +126,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:ev-station', + 'original_icon': None, 'original_name': 'Charge power', 'platform': 'v2c', 'previous_unique_id': None, @@ -141,7 +141,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'EVSE 1.1.1.1 Charge power', - 'icon': 'mdi:ev-station', 'state_class': , 'unit_of_measurement': , }), @@ -255,6 +254,103 @@ 'state': '0.0', }) # --- +# name: test_sensor[sensor.evse_1_1_1_1_installation_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.evse_1_1_1_1_installation_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Installation voltage', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_installation', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_voltage_installation', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_installation_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'EVSE 1.1.1.1 Installation voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_installation_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ip_address-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.evse_1_1_1_1_ip_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IP address', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ip_address', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_ip_address', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ip_address-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'EVSE 1.1.1.1 IP address', + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_ip_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_sensor[sensor.evse_1_1_1_1_meter_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -304,7 +400,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.evse_1_1_1_1_meter_error', 'has_entity_name': True, 'hidden_by': None, @@ -428,3 +524,98 @@ 'state': '0.0', }) # --- +# name: test_sensor[sensor.evse_1_1_1_1_signal_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.evse_1_1_1_1_signal_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Signal status', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'signal_status', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_signal_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_signal_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'EVSE 1.1.1.1 Signal status', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_signal_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ssid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.evse_1_1_1_1_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SSID', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ssid', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ssid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'EVSE 1.1.1.1 SSID', + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_ssid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/v2c/test_diagnostics.py b/tests/components/v2c/test_diagnostics.py index 770b00e988b..eafbd68e6fc 100644 --- a/tests/components/v2c/test_diagnostics.py +++ b/tests/components/v2c/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -24,7 +25,6 @@ async def test_entry_diagnostics( await init_integration(hass, mock_config_entry) - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) - == snapshot() - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/v2c/test_sensor.py b/tests/components/v2c/test_sensor.py index 9e7e3800767..430f91647dd 100644 --- a/tests/components/v2c/test_sensor.py +++ b/tests/components/v2c/test_sensor.py @@ -28,7 +28,7 @@ async def test_sensor( await init_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - assert [ + assert _METER_ERROR_OPTIONS == [ "no_error", "communication", "reading", @@ -64,4 +64,4 @@ async def test_sensor( "tcp_head_mismatch", "empty_message", "undefined_error", - ] == _METER_ERROR_OPTIONS + ] diff --git a/tests/components/vacuum/conftest.py b/tests/components/vacuum/conftest.py index 5167c868f9f..d298260c575 100644 --- a/tests/components/vacuum/conftest.py +++ b/tests/components/vacuum/conftest.py @@ -1,7 +1,8 @@ """Fixtures for Vacuum platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/vacuum/test_device_condition.py b/tests/components/vacuum/test_device_condition.py index 5cc222a1833..9a2a67f7141 100644 --- a/tests/components/vacuum/test_device_condition.py +++ b/tests/components/vacuum/test_device_condition.py @@ -17,11 +17,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -29,12 +25,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -119,7 +109,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -181,30 +171,30 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_docked - event - test_event2" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_docked - event - test_event2" hass.states.async_set(entry.entity_id, STATE_CLEANING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_cleaning - event - test_event1" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_cleaning - event - test_event1" # Returning means it's still cleaning hass.states.async_set(entry.entity_id, STATE_RETURNING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_cleaning - event - test_event1" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_cleaning - event - test_event1" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -247,5 +237,5 @@ async def test_if_state_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_cleaning - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_cleaning - event - test_event1" diff --git a/tests/components/vacuum/test_device_trigger.py b/tests/components/vacuum/test_device_trigger.py index 56e351a6446..c186bd4d9eb 100644 --- a/tests/components/vacuum/test_device_trigger.py +++ b/tests/components/vacuum/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -182,7 +175,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -247,18 +240,18 @@ async def test_if_fires_on_state_change( # Fake that the entity is cleaning hass.states.async_set(entry.entity_id, STATE_CLEANING) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"cleaning - device - {entry.entity_id} - docked - cleaning" ) # Fake that the entity is docked hass.states.async_set(entry.entity_id, STATE_DOCKED) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"docked - device - {entry.entity_id} - cleaning - docked" ) @@ -267,7 +260,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -313,9 +306,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is cleaning hass.states.async_set(entry.entity_id, STATE_CLEANING) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"cleaning - device - {entry.entity_id} - docked - cleaning" ) @@ -324,7 +317,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -370,16 +363,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_CLEANING) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - docked - cleaning - 0:00:05" ) diff --git a/tests/components/vallox/conftest.py b/tests/components/vallox/conftest.py index 9f65734b926..a6ea95944b3 100644 --- a/tests/components/vallox/conftest.py +++ b/tests/components/vallox/conftest.py @@ -112,9 +112,9 @@ def default_metrics(): "A_CYC_UUID5": 10, "A_CYC_UUID6": 11, "A_CYC_UUID7": 12, - "A_CYC_BOOST_TIMER": 30, - "A_CYC_FIREPLACE_TIMER": 30, - "A_CYC_EXTRA_TIMER": 30, + "A_CYC_BOOST_TIMER": 0, + "A_CYC_FIREPLACE_TIMER": 0, + "A_CYC_EXTRA_TIMER": 0, "A_CYC_MODE": 0, "A_CYC_STATE": 0, "A_CYC_FILTER_CHANGED_YEAR": 24, diff --git a/tests/components/vallox/test_sensor.py b/tests/components/vallox/test_sensor.py index d7af7bbb576..dd8d8026d06 100644 --- a/tests/components/vallox/test_sensor.py +++ b/tests/components/vallox/test_sensor.py @@ -135,3 +135,48 @@ async def test_cell_state_sensor( # Assert sensor = hass.states.get("sensor.vallox_cell_state") assert sensor.state == expected_state + + +@pytest.mark.parametrize( + ("metrics", "expected_state"), + [ + ( + {"A_CYC_STATE": 0}, + "unknown", + ), + ( + {"A_CYC_STATE": 1}, + "unknown", + ), + ( + {"A_CYC_EXTRA_TIMER": 10}, + "10", + ), + ( + {"A_CYC_FIREPLACE_TIMER": 9}, + "9", + ), + ( + {"A_CYC_BOOST_TIMER": 8}, + "8", + ), + ], +) +async def test_profile_duration_sensor( + metrics, + expected_state, + mock_entry: MockConfigEntry, + hass: HomeAssistant, + setup_fetch_metric_data_mock, +) -> None: + """Test profile sensor in different states.""" + # Arrange + setup_fetch_metric_data_mock(metrics=metrics) + + # Act + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + # Assert + sensor = hass.states.get("sensor.vallox_profile_duration") + assert sensor.state == expected_state diff --git a/tests/components/valve/test_init.py b/tests/components/valve/test_init.py index 3ef3b1ff4b0..e4519bcef08 100644 --- a/tests/components/valve/test_init.py +++ b/tests/components/valve/test_init.py @@ -1,8 +1,9 @@ """The tests for Valve.""" +from collections.abc import Generator + import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.valve import ( DOMAIN, diff --git a/tests/components/velbus/conftest.py b/tests/components/velbus/conftest.py index 3d59ad615c6..402acb821be 100644 --- a/tests/components/velbus/conftest.py +++ b/tests/components/velbus/conftest.py @@ -1,9 +1,9 @@ """Fixtures for the Velbus tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.velbus.const import DOMAIN from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/velbus/test_config_flow.py b/tests/components/velbus/test_config_flow.py index 59effcae706..432fcea10db 100644 --- a/tests/components/velbus/test_config_flow.py +++ b/tests/components/velbus/test_config_flow.py @@ -1,10 +1,10 @@ """Tests for the Velbus config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest import serial.tools.list_ports -from typing_extensions import Generator from velbusaio.exceptions import VelbusConnectionFailed from homeassistant.components import usb diff --git a/tests/components/velux/conftest.py b/tests/components/velux/conftest.py index 692216827b2..512b2a007ed 100644 --- a/tests/components/velux/conftest.py +++ b/tests/components/velux/conftest.py @@ -1,9 +1,9 @@ """Configuration for Velux tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/velux/test_config_flow.py b/tests/components/velux/test_config_flow.py index 8021ad52810..5f7932d358a 100644 --- a/tests/components/velux/test_config_flow.py +++ b/tests/components/velux/test_config_flow.py @@ -10,7 +10,7 @@ import pytest from pyvlx import PyVLXException from homeassistant.components.velux import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -69,22 +69,8 @@ async def test_user_errors( assert result["errors"] == {"base": error_name} -async def test_import_valid_config(hass: HomeAssistant) -> None: - """Test import initialized flow with valid config.""" - with patch(PYVLX_CONFIG_FLOW_CLASS_PATH, autospec=True): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=DUMMY_DATA, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DUMMY_DATA[CONF_HOST] - assert result["data"] == DUMMY_DATA - - -@pytest.mark.parametrize("flow_source", [SOURCE_IMPORT, SOURCE_USER]) -async def test_flow_duplicate_entry(hass: HomeAssistant, flow_source: str) -> None: - """Test import initialized flow with a duplicate entry.""" +async def test_flow_duplicate_entry(hass: HomeAssistant) -> None: + """Test initialized flow with a duplicate entry.""" with patch(PYVLX_CONFIG_FLOW_CLASS_PATH, autospec=True): conf_entry: MockConfigEntry = MockConfigEntry( domain=DOMAIN, title=DUMMY_DATA[CONF_HOST], data=DUMMY_DATA @@ -94,26 +80,8 @@ async def test_flow_duplicate_entry(hass: HomeAssistant, flow_source: str) -> No result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": flow_source}, + context={"source": SOURCE_USER}, data=DUMMY_DATA, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize(("error", "error_name"), error_types_to_test) -async def test_import_errors( - hass: HomeAssistant, error: Exception, error_name: str -) -> None: - """Test import initialized flow with exceptions.""" - with patch( - PYVLX_CONFIG_FLOW_CONNECT_FUNCTION_PATH, - side_effect=error, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=DUMMY_DATA, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == error_name diff --git a/tests/components/venstar/__init__.py b/tests/components/venstar/__init__.py index 116a3be0925..6a40212b793 100644 --- a/tests/components/venstar/__init__.py +++ b/tests/components/venstar/__init__.py @@ -15,7 +15,7 @@ class VenstarColorTouchMock: pin=None, proto="http", SSLCert=False, - ): + ) -> None: """Initialize the Venstar library.""" self.status = {} self.model = "COLORTOUCH" diff --git a/tests/components/vera/common.py b/tests/components/vera/common.py index 5e0fac6c84a..c5e3a5d4931 100644 --- a/tests/components/vera/common.py +++ b/tests/components/vera/common.py @@ -83,7 +83,7 @@ def new_simple_controller_config( class ComponentFactory: """Factory class.""" - def __init__(self, vera_controller_class_mock): + def __init__(self, vera_controller_class_mock) -> None: """Initialize the factory.""" self.vera_controller_class_mock = vera_controller_class_mock diff --git a/tests/components/verisure/conftest.py b/tests/components/verisure/conftest.py index 03086ac2ead..5aafcda2bb3 100644 --- a/tests/components/verisure/conftest.py +++ b/tests/components/verisure/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.verisure.const import CONF_GIID, DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/vesync/snapshots/test_diagnostics.ambr b/tests/components/vesync/snapshots/test_diagnostics.ambr index fcb2cc7b286..54ed8acf2d7 100644 --- a/tests/components/vesync/snapshots/test_diagnostics.ambr +++ b/tests/components/vesync/snapshots/test_diagnostics.ambr @@ -38,13 +38,7 @@ 'setDisplay', 'setLevel', ]), - 'cid': 'abcdefghabcdefghabcdefghabcdefgh', - 'config': dict({ - 'auto_target_humidity': 60, - 'automatic_stop': True, - 'display': True, - }), - 'config_dict': dict({ + '_config_dict': dict({ 'features': list([ 'warm_mist', 'nightlight', @@ -71,6 +65,7 @@ 'LUH-A602S-WEUR', 'LUH-A602S-WEU', 'LUH-A602S-WJP', + 'LUH-A602S-WUSC', ]), 'module': 'VeSyncHumid200300S', 'warm_mist_levels': list([ @@ -80,6 +75,16 @@ 3, ]), }), + '_features': list([ + 'warm_mist', + 'nightlight', + ]), + 'cid': 'abcdefghabcdefghabcdefghabcdefgh', + 'config': dict({ + 'auto_target_humidity': 60, + 'automatic_stop': True, + 'display': True, + }), 'config_module': 'WFON_AHM_LUH-A602S-WUS_US', 'connection_status': 'online', 'connection_type': 'WiFi+BTOnboarding+BTNotify', @@ -105,10 +110,6 @@ 'device_type': 'LUH-A602S-WUS', 'enabled': False, 'extension': None, - 'features': list([ - 'warm_mist', - 'nightlight', - ]), 'mac_id': '**REDACTED**', 'manager': '**REDACTED**', 'mist_levels': list([ @@ -203,7 +204,7 @@ 'auto', 'sleep', ]), - 'supported_features': 9, + 'supported_features': 57, }), 'entity_id': 'fan.fan', 'last_changed': str, diff --git a/tests/components/vesync/snapshots/test_fan.ambr b/tests/components/vesync/snapshots/test_fan.ambr index 59304e92d9d..21985afd7bf 100644 --- a/tests/components/vesync/snapshots/test_fan.ambr +++ b/tests/components/vesync/snapshots/test_fan.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', + 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -64,7 +66,7 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': 'air-purifier', 'unit_of_measurement': None, @@ -79,7 +81,7 @@ 'auto', 'sleep', ]), - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_131s', @@ -112,8 +114,10 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', + 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -153,7 +157,7 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': 'asd_sdfKIHG7IJHGwJGJ7GJ_ag5h3G55', 'unit_of_measurement': None, @@ -174,7 +178,7 @@ 'sleep', ]), 'screen_status': True, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_200s', @@ -207,8 +211,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', + 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -249,7 +255,7 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '400s-purifier', 'unit_of_measurement': None, @@ -271,7 +277,7 @@ 'sleep', ]), 'screen_status': True, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_400s', @@ -304,8 +310,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', + 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -346,7 +354,7 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '600s-purifier', 'unit_of_measurement': None, @@ -368,7 +376,7 @@ 'sleep', ]), 'screen_status': True, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_600s', @@ -401,8 +409,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', + 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -437,8 +447,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', + 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -489,8 +501,10 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', + 'model_id': None, 'name': 'Outlet', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -525,8 +539,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', + 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -561,8 +577,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', + 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/vesync/snapshots/test_light.ambr b/tests/components/vesync/snapshots/test_light.ambr index 9990395a36c..36694ae3ef6 100644 --- a/tests/components/vesync/snapshots/test_light.ambr +++ b/tests/components/vesync/snapshots/test_light.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', + 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -58,8 +60,10 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', + 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -94,8 +98,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', + 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -130,8 +136,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', + 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -166,8 +174,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', + 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -254,8 +264,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', + 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -360,8 +372,10 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', + 'model_id': None, 'name': 'Outlet', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -396,8 +410,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', + 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -499,8 +515,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', + 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/vesync/snapshots/test_sensor.ambr b/tests/components/vesync/snapshots/test_sensor.ambr index 268718fb2fe..11d931e023a 100644 --- a/tests/components/vesync/snapshots/test_sensor.ambr +++ b/tests/components/vesync/snapshots/test_sensor.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', + 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -150,8 +152,10 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', + 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -234,8 +238,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', + 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -411,8 +417,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', + 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -588,8 +596,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', + 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -624,8 +634,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', + 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -676,8 +688,10 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', + 'model_id': None, 'name': 'Outlet', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1006,8 +1020,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', + 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -1042,8 +1058,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', + 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/vesync/snapshots/test_switch.ambr b/tests/components/vesync/snapshots/test_switch.ambr index 3df26f74bcf..4b271ee55d9 100644 --- a/tests/components/vesync/snapshots/test_switch.ambr +++ b/tests/components/vesync/snapshots/test_switch.ambr @@ -22,8 +22,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', + 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -58,8 +60,10 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', + 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -94,8 +98,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', + 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -130,8 +136,10 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', + 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -166,8 +174,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', + 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -202,8 +212,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', + 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -254,8 +266,10 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', + 'model_id': None, 'name': 'Outlet', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -334,8 +348,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', + 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -370,8 +386,10 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', + 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/vicare/conftest.py b/tests/components/vicare/conftest.py index 6899839a0e1..372314d9fe2 100644 --- a/tests/components/vicare/conftest.py +++ b/tests/components/vicare/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import AsyncGenerator, Generator from dataclasses import dataclass from unittest.mock import AsyncMock, Mock, patch import pytest from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareService import ViCareDeviceAccessor, readFeature -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.vicare.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/vicare/test_diagnostics.py b/tests/components/vicare/test_diagnostics.py index 815b39545a9..6adf4fe0edc 100644 --- a/tests/components/vicare/test_diagnostics.py +++ b/tests/components/vicare/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import MagicMock from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -21,4 +22,4 @@ async def test_diagnostics( hass, hass_client, mock_vicare_gas_boiler ) - assert diag == snapshot + assert diag == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/vicare/test_types.py b/tests/components/vicare/test_types.py new file mode 100644 index 00000000000..575e549f0d9 --- /dev/null +++ b/tests/components/vicare/test_types.py @@ -0,0 +1,87 @@ +"""Test ViCare diagnostics.""" + +import pytest + +from homeassistant.components.climate import PRESET_COMFORT, PRESET_SLEEP +from homeassistant.components.vicare.types import HeatingProgram, VentilationMode + + +@pytest.mark.parametrize( + ("vicare_program", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + (HeatingProgram.COMFORT, PRESET_COMFORT), + (HeatingProgram.COMFORT_HEATING, PRESET_COMFORT), + ], +) +async def test_heating_program_to_ha_preset( + vicare_program: str | None, + expected_result: str | None, +) -> None: + """Testing ViCare HeatingProgram to HA Preset.""" + + assert HeatingProgram.to_ha_preset(vicare_program) == expected_result + + +@pytest.mark.parametrize( + ("ha_preset", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + (PRESET_SLEEP, HeatingProgram.REDUCED), + ], +) +async def test_ha_preset_to_heating_program( + ha_preset: str | None, + expected_result: str | None, +) -> None: + """Testing HA Preset tp ViCare HeatingProgram.""" + + supported_programs = [ + HeatingProgram.COMFORT, + HeatingProgram.ECO, + HeatingProgram.NORMAL, + HeatingProgram.REDUCED, + ] + assert ( + HeatingProgram.from_ha_preset(ha_preset, supported_programs) == expected_result + ) + + +@pytest.mark.parametrize( + ("vicare_mode", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + ("sensorOverride", VentilationMode.SENSOR_OVERRIDE), + ], +) +async def test_ventilation_mode_to_ha_mode( + vicare_mode: str | None, + expected_result: str | None, +) -> None: + """Testing ViCare mode to VentilationMode.""" + + assert VentilationMode.from_vicare_mode(vicare_mode) == expected_result + + +@pytest.mark.parametrize( + ("ha_mode", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + (VentilationMode.SENSOR_OVERRIDE, "sensorOverride"), + ], +) +async def test_ha_mode_to_ventilation_mode( + ha_mode: str | None, + expected_result: str | None, +) -> None: + """Testing VentilationMode to ViCare mode.""" + + assert VentilationMode.to_vicare_mode(ha_mode) == expected_result diff --git a/tests/components/vilfo/conftest.py b/tests/components/vilfo/conftest.py index 11b620b82e0..fbc48da28b3 100644 --- a/tests/components/vilfo/conftest.py +++ b/tests/components/vilfo/conftest.py @@ -1,9 +1,9 @@ """Vilfo tests conftest.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.vilfo import DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST diff --git a/tests/components/vizio/conftest.py b/tests/components/vizio/conftest.py index b06ce2e1eb7..923509dea2c 100644 --- a/tests/components/vizio/conftest.py +++ b/tests/components/vizio/conftest.py @@ -1,5 +1,6 @@ """Configure py.test.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest @@ -29,19 +30,19 @@ from .const import ( class MockInput: """Mock Vizio device input.""" - def __init__(self, name): + def __init__(self, name) -> None: """Initialize mock Vizio device input.""" self.meta_name = name self.name = name -def get_mock_inputs(input_list): +def get_mock_inputs(input_list) -> list[MockInput]: """Return list of MockInput.""" return [MockInput(device_input) for device_input in input_list] @pytest.fixture(name="vizio_get_unique_id", autouse=True) -def vizio_get_unique_id_fixture(): +def vizio_get_unique_id_fixture() -> Generator[None]: """Mock get vizio unique ID.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.get_unique_id", @@ -51,7 +52,7 @@ def vizio_get_unique_id_fixture(): @pytest.fixture(name="vizio_data_coordinator_update", autouse=True) -def vizio_data_coordinator_update_fixture(): +def vizio_data_coordinator_update_fixture() -> Generator[None]: """Mock get data coordinator update.""" with patch( "homeassistant.components.vizio.coordinator.gen_apps_list_from_url", @@ -61,7 +62,7 @@ def vizio_data_coordinator_update_fixture(): @pytest.fixture(name="vizio_data_coordinator_update_failure") -def vizio_data_coordinator_update_failure_fixture(): +def vizio_data_coordinator_update_failure_fixture() -> Generator[None]: """Mock get data coordinator update failure.""" with patch( "homeassistant.components.vizio.coordinator.gen_apps_list_from_url", @@ -71,7 +72,7 @@ def vizio_data_coordinator_update_failure_fixture(): @pytest.fixture(name="vizio_no_unique_id") -def vizio_no_unique_id_fixture(): +def vizio_no_unique_id_fixture() -> Generator[None]: """Mock no vizio unique ID returrned.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.get_unique_id", @@ -81,7 +82,7 @@ def vizio_no_unique_id_fixture(): @pytest.fixture(name="vizio_connect") -def vizio_connect_fixture(): +def vizio_connect_fixture() -> Generator[None]: """Mock valid vizio device and entry setup.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.validate_ha_config", @@ -91,7 +92,7 @@ def vizio_connect_fixture(): @pytest.fixture(name="vizio_complete_pairing") -def vizio_complete_pairing_fixture(): +def vizio_complete_pairing_fixture() -> Generator[None]: """Mock complete vizio pairing workflow.""" with ( patch( @@ -107,7 +108,7 @@ def vizio_complete_pairing_fixture(): @pytest.fixture(name="vizio_start_pairing_failure") -def vizio_start_pairing_failure_fixture(): +def vizio_start_pairing_failure_fixture() -> Generator[None]: """Mock vizio start pairing failure.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.start_pair", @@ -117,7 +118,7 @@ def vizio_start_pairing_failure_fixture(): @pytest.fixture(name="vizio_invalid_pin_failure") -def vizio_invalid_pin_failure_fixture(): +def vizio_invalid_pin_failure_fixture() -> Generator[None]: """Mock vizio failure due to invalid pin.""" with ( patch( @@ -133,14 +134,14 @@ def vizio_invalid_pin_failure_fixture(): @pytest.fixture(name="vizio_bypass_setup") -def vizio_bypass_setup_fixture(): +def vizio_bypass_setup_fixture() -> Generator[None]: """Mock component setup.""" with patch("homeassistant.components.vizio.async_setup_entry", return_value=True): yield @pytest.fixture(name="vizio_bypass_update") -def vizio_bypass_update_fixture(): +def vizio_bypass_update_fixture() -> Generator[None]: """Mock component update.""" with ( patch( @@ -153,7 +154,7 @@ def vizio_bypass_update_fixture(): @pytest.fixture(name="vizio_guess_device_type") -def vizio_guess_device_type_fixture(): +def vizio_guess_device_type_fixture() -> Generator[None]: """Mock vizio async_guess_device_type function.""" with patch( "homeassistant.components.vizio.config_flow.async_guess_device_type", @@ -163,7 +164,7 @@ def vizio_guess_device_type_fixture(): @pytest.fixture(name="vizio_cant_connect") -def vizio_cant_connect_fixture(): +def vizio_cant_connect_fixture() -> Generator[None]: """Mock vizio device can't connect with valid auth.""" with ( patch( @@ -179,7 +180,7 @@ def vizio_cant_connect_fixture(): @pytest.fixture(name="vizio_update") -def vizio_update_fixture(): +def vizio_update_fixture() -> Generator[None]: """Mock valid updates to vizio device.""" with ( patch( @@ -223,7 +224,7 @@ def vizio_update_fixture(): @pytest.fixture(name="vizio_update_with_apps") -def vizio_update_with_apps_fixture(vizio_update: pytest.fixture): +def vizio_update_with_apps_fixture(vizio_update: None) -> Generator[None]: """Mock valid updates to vizio device that supports apps.""" with ( patch( @@ -243,7 +244,7 @@ def vizio_update_with_apps_fixture(vizio_update: pytest.fixture): @pytest.fixture(name="vizio_update_with_apps_on_input") -def vizio_update_with_apps_on_input_fixture(vizio_update: pytest.fixture): +def vizio_update_with_apps_on_input_fixture(vizio_update: None) -> Generator[None]: """Mock valid updates to vizio device that supports apps but is on a TV input.""" with ( patch( @@ -263,7 +264,7 @@ def vizio_update_with_apps_on_input_fixture(vizio_update: pytest.fixture): @pytest.fixture(name="vizio_hostname_check") -def vizio_hostname_check(): +def vizio_hostname_check() -> Generator[None]: """Mock vizio hostname resolution.""" with patch( "homeassistant.components.vizio.config_flow.socket.gethostbyname", diff --git a/tests/components/vizio/test_config_flow.py b/tests/components/vizio/test_config_flow.py index 712dd2a31b5..42d4394ca80 100644 --- a/tests/components/vizio/test_config_flow.py +++ b/tests/components/vizio/test_config_flow.py @@ -57,11 +57,8 @@ from .const import ( from tests.common import MockConfigEntry -async def test_user_flow_minimum_fields( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_flow_minimum_fields(hass: HomeAssistant) -> None: """Test user config flow with minimum fields.""" # test form shows result = await hass.config_entries.flow.async_init( @@ -81,11 +78,8 @@ async def test_user_flow_minimum_fields( assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.SPEAKER -async def test_user_flow_all_fields( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_flow_all_fields(hass: HomeAssistant) -> None: """Test user config flow with all fields.""" # test form shows result = await hass.config_entries.flow.async_init( @@ -108,11 +102,8 @@ async def test_user_flow_all_fields( assert CONF_APPS not in result["data"] -async def test_speaker_options_flow( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_speaker_options_flow(hass: HomeAssistant) -> None: """Test options config flow for speaker.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_SPEAKER_CONFIG @@ -136,11 +127,8 @@ async def test_speaker_options_flow( assert CONF_APPS not in result["data"] -async def test_tv_options_flow_no_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_tv_options_flow_no_apps(hass: HomeAssistant) -> None: """Test options config flow for TV without providing apps option.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -167,11 +155,8 @@ async def test_tv_options_flow_no_apps( assert CONF_APPS not in result["data"] -async def test_tv_options_flow_with_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_tv_options_flow_with_apps(hass: HomeAssistant) -> None: """Test options config flow for TV with providing apps option.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -199,11 +184,8 @@ async def test_tv_options_flow_with_apps( assert result["data"][CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} -async def test_tv_options_flow_start_with_volume( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_tv_options_flow_start_with_volume(hass: HomeAssistant) -> None: """Test options config flow for TV with providing apps option after providing volume step in initial config.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -241,11 +223,8 @@ async def test_tv_options_flow_start_with_volume( assert result["data"][CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} -async def test_user_host_already_configured( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_host_already_configured(hass: HomeAssistant) -> None: """Test host is already configured during user setup.""" entry = MockConfigEntry( domain=DOMAIN, @@ -265,11 +244,8 @@ async def test_user_host_already_configured( assert result["errors"] == {CONF_HOST: "existing_config_entry_found"} -async def test_user_serial_number_already_exists( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_serial_number_already_exists(hass: HomeAssistant) -> None: """Test serial_number is already configured with different host and name during user setup.""" # Set up new entry MockConfigEntry( @@ -289,9 +265,8 @@ async def test_user_serial_number_already_exists( assert result["errors"] == {CONF_HOST: "existing_config_entry_found"} -async def test_user_error_on_could_not_connect( - hass: HomeAssistant, vizio_no_unique_id: pytest.fixture -) -> None: +@pytest.mark.usefixtures("vizio_no_unique_id") +async def test_user_error_on_could_not_connect(hass: HomeAssistant) -> None: """Test with could_not_connect during user setup due to no connectivity.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -301,8 +276,9 @@ async def test_user_error_on_could_not_connect( assert result["errors"] == {CONF_HOST: "cannot_connect"} +@pytest.mark.usefixtures("vizio_cant_connect") async def test_user_error_on_could_not_connect_invalid_token( - hass: HomeAssistant, vizio_cant_connect: pytest.fixture + hass: HomeAssistant, ) -> None: """Test with could_not_connect during user setup due to invalid token.""" result = await hass.config_entries.flow.async_init( @@ -313,12 +289,10 @@ async def test_user_error_on_could_not_connect_invalid_token( assert result["errors"] == {"base": "cannot_connect"} -async def test_user_tv_pairing_no_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_complete_pairing: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" +) +async def test_user_tv_pairing_no_apps(hass: HomeAssistant) -> None: """Test pairing config flow when access token not provided for tv during user entry and no apps configured.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -344,12 +318,10 @@ async def test_user_tv_pairing_no_apps( assert CONF_APPS not in result["data"] -async def test_user_start_pairing_failure( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_start_pairing_failure: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_start_pairing_failure" +) +async def test_user_start_pairing_failure(hass: HomeAssistant) -> None: """Test failure to start pairing from user config flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -360,12 +332,10 @@ async def test_user_start_pairing_failure( assert result["errors"] == {"base": "cannot_connect"} -async def test_user_invalid_pin( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_invalid_pin_failure: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_invalid_pin_failure" +) +async def test_user_invalid_pin(hass: HomeAssistant) -> None: """Test failure to complete pairing from user config flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -383,11 +353,8 @@ async def test_user_invalid_pin( assert result["errors"] == {CONF_PIN: "complete_pairing_failed"} -async def test_user_ignore( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_ignore(hass: HomeAssistant) -> None: """Test user config flow doesn't throw an error when there's an existing ignored source.""" entry = MockConfigEntry( domain=DOMAIN, @@ -403,11 +370,8 @@ async def test_user_ignore( assert result["type"] is FlowResultType.CREATE_ENTRY -async def test_import_flow_minimum_fields( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_import_flow_minimum_fields(hass: HomeAssistant) -> None: """Test import config flow with minimum fields.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -425,11 +389,8 @@ async def test_import_flow_minimum_fields( assert result["data"][CONF_VOLUME_STEP] == DEFAULT_VOLUME_STEP -async def test_import_flow_all_fields( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_import_flow_all_fields(hass: HomeAssistant) -> None: """Test import config flow with all fields.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -446,11 +407,8 @@ async def test_import_flow_all_fields( assert result["data"][CONF_VOLUME_STEP] == VOLUME_STEP -async def test_import_entity_already_configured( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_import_entity_already_configured(hass: HomeAssistant) -> None: """Test entity is already configured during import setup.""" entry = MockConfigEntry( domain=DOMAIN, @@ -468,11 +426,8 @@ async def test_import_entity_already_configured( assert result["reason"] == "already_configured_device" -async def test_import_flow_update_options( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_import_flow_update_options(hass: HomeAssistant) -> None: """Test import config flow with updated options.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -499,11 +454,8 @@ async def test_import_flow_update_options( assert config_entry.options[CONF_VOLUME_STEP] == VOLUME_STEP + 1 -async def test_import_flow_update_name_and_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_import_flow_update_name_and_apps(hass: HomeAssistant) -> None: """Test import config flow with updated name and apps.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -533,11 +485,8 @@ async def test_import_flow_update_name_and_apps( assert config_entry.options[CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} -async def test_import_flow_update_remove_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_import_flow_update_remove_apps(hass: HomeAssistant) -> None: """Test import config flow with removed apps.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -566,12 +515,10 @@ async def test_import_flow_update_remove_apps( assert CONF_APPS not in config_entry.options -async def test_import_needs_pairing( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_complete_pairing: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" +) +async def test_import_needs_pairing(hass: HomeAssistant) -> None: """Test pairing config flow when access token not provided for tv during import.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -603,12 +550,10 @@ async def test_import_needs_pairing( assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV -async def test_import_with_apps_needs_pairing( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_complete_pairing: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" +) +async def test_import_with_apps_needs_pairing(hass: HomeAssistant) -> None: """Test pairing config flow when access token not provided for tv but apps are included during import.""" import_config = MOCK_TV_CONFIG_NO_TOKEN.copy() import_config[CONF_APPS] = {CONF_INCLUDE: [CURRENT_APP]} @@ -646,11 +591,8 @@ async def test_import_with_apps_needs_pairing( assert result["data"][CONF_APPS][CONF_INCLUDE] == [CURRENT_APP] -async def test_import_flow_additional_configs( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_import_flow_additional_configs(hass: HomeAssistant) -> None: """Test import config flow with additional configs defined in CONF_APPS.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -666,10 +608,9 @@ async def test_import_flow_additional_configs( assert CONF_APPS not in config_entry.options +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") async def test_import_error( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test that error is logged when import config has an error.""" @@ -700,11 +641,8 @@ async def test_import_error( assert len(vizio_log_list) == 1 -async def test_import_ignore( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_import_ignore(hass: HomeAssistant) -> None: """Test import config flow doesn't throw an error when there's an existing ignored source.""" entry = MockConfigEntry( domain=DOMAIN, @@ -723,12 +661,10 @@ async def test_import_ignore( assert result["type"] is FlowResultType.CREATE_ENTRY -async def test_zeroconf_flow( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_flow(hass: HomeAssistant) -> None: """Test zeroconf config flow.""" discovery_info = dataclasses.replace(MOCK_ZEROCONF_SERVICE_INFO) result = await hass.config_entries.flow.async_init( @@ -760,12 +696,10 @@ async def test_zeroconf_flow( assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.SPEAKER -async def test_zeroconf_flow_already_configured( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_flow_already_configured(hass: HomeAssistant) -> None: """Test entity is already configured during zeroconf setup.""" entry = MockConfigEntry( domain=DOMAIN, @@ -786,12 +720,10 @@ async def test_zeroconf_flow_already_configured( assert result["reason"] == "already_configured" -async def test_zeroconf_flow_with_port_in_host( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_flow_with_port_in_host(hass: HomeAssistant) -> None: """Test entity is already configured during zeroconf setup when port is in host.""" entry = MockConfigEntry( domain=DOMAIN, @@ -814,12 +746,10 @@ async def test_zeroconf_flow_with_port_in_host( assert result["reason"] == "already_configured" -async def test_zeroconf_dupe_fail( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_dupe_fail(hass: HomeAssistant) -> None: """Test zeroconf config flow when device gets discovered multiple times.""" discovery_info = dataclasses.replace(MOCK_ZEROCONF_SERVICE_INFO) result = await hass.config_entries.flow.async_init( @@ -840,12 +770,10 @@ async def test_zeroconf_dupe_fail( assert result["reason"] == "already_in_progress" -async def test_zeroconf_ignore( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_ignore(hass: HomeAssistant) -> None: """Test zeroconf discovery doesn't throw an error when there's an existing ignored source.""" entry = MockConfigEntry( domain=DOMAIN, @@ -863,11 +791,8 @@ async def test_zeroconf_ignore( assert result["type"] is FlowResultType.FORM -async def test_zeroconf_no_unique_id( - hass: HomeAssistant, - vizio_guess_device_type: pytest.fixture, - vizio_no_unique_id: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_guess_device_type", "vizio_no_unique_id") +async def test_zeroconf_no_unique_id(hass: HomeAssistant) -> None: """Test zeroconf discovery aborts when unique_id is None.""" discovery_info = dataclasses.replace(MOCK_ZEROCONF_SERVICE_INFO) @@ -879,12 +804,10 @@ async def test_zeroconf_no_unique_id( assert result["reason"] == "cannot_connect" -async def test_zeroconf_abort_when_ignored( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_abort_when_ignored(hass: HomeAssistant) -> None: """Test zeroconf discovery aborts when the same host has been ignored.""" entry = MockConfigEntry( domain=DOMAIN, @@ -904,13 +827,13 @@ async def test_zeroconf_abort_when_ignored( assert result["reason"] == "already_configured" -async def test_zeroconf_flow_already_configured_hostname( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_hostname_check: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", + "vizio_bypass_setup", + "vizio_hostname_check", + "vizio_guess_device_type", +) +async def test_zeroconf_flow_already_configured_hostname(hass: HomeAssistant) -> None: """Test entity is already configured during zeroconf setup when existing entry uses hostname.""" config = MOCK_SPEAKER_CONFIG.copy() config[CONF_HOST] = "hostname" @@ -933,12 +856,8 @@ async def test_zeroconf_flow_already_configured_hostname( assert result["reason"] == "already_configured" -async def test_import_flow_already_configured_hostname( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_hostname_check: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup", "vizio_hostname_check") +async def test_import_flow_already_configured_hostname(hass: HomeAssistant) -> None: """Test entity is already configured during import setup when existing entry uses hostname.""" config = MOCK_SPEAKER_CONFIG.copy() config[CONF_HOST] = "hostname" diff --git a/tests/components/vizio/test_init.py b/tests/components/vizio/test_init.py index eba5af437b1..c2b19377809 100644 --- a/tests/components/vizio/test_init.py +++ b/tests/components/vizio/test_init.py @@ -15,11 +15,8 @@ from .const import MOCK_SPEAKER_CONFIG, MOCK_USER_VALID_TV_CONFIG, UNIQUE_ID from tests.common import MockConfigEntry, async_fire_time_changed -async def test_setup_component( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_setup_component(hass: HomeAssistant) -> None: """Test component setup.""" assert await async_setup_component( hass, DOMAIN, {DOMAIN: MOCK_USER_VALID_TV_CONFIG} @@ -28,11 +25,8 @@ async def test_setup_component( assert len(hass.states.async_entity_ids(Platform.MEDIA_PLAYER)) == 1 -async def test_tv_load_and_unload( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_tv_load_and_unload(hass: HomeAssistant) -> None: """Test loading and unloading TV entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID @@ -52,11 +46,8 @@ async def test_tv_load_and_unload( assert DOMAIN not in hass.data -async def test_speaker_load_and_unload( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_speaker_load_and_unload(hass: HomeAssistant) -> None: """Test loading and unloading speaker entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID @@ -76,11 +67,11 @@ async def test_speaker_load_and_unload( assert DOMAIN not in hass.data +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_update", "vizio_data_coordinator_update_failure" +) async def test_coordinator_update_failure( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, - vizio_data_coordinator_update_failure: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test coordinator update failure after 10 days.""" diff --git a/tests/components/vizio/test_media_player.py b/tests/components/vizio/test_media_player.py index 52a5732706d..12e19077c8e 100644 --- a/tests/components/vizio/test_media_player.py +++ b/tests/components/vizio/test_media_player.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from datetime import timedelta from typing import Any @@ -129,7 +130,7 @@ def _get_attr_and_assert_base_attr( @asynccontextmanager async def _cm_for_test_setup_without_apps( all_settings: dict[str, Any], vizio_power_state: bool | None -) -> None: +) -> AsyncIterator[None]: """Context manager to setup test for Vizio devices without including app specific patches.""" with ( patch( @@ -211,7 +212,7 @@ async def _test_setup_speaker( @asynccontextmanager async def _cm_for_test_setup_tv_with_apps( hass: HomeAssistant, device_config: dict[str, Any], app_config: dict[str, Any] -) -> None: +) -> AsyncIterator[None]: """Context manager to setup test for Vizio TV with support for apps.""" config_entry = MockConfigEntry( domain=DOMAIN, data=vol.Schema(VIZIO_SCHEMA)(device_config), unique_id=UNIQUE_ID @@ -280,63 +281,46 @@ async def _test_service( assert service_call.call_args == call(*args, **kwargs) -async def test_speaker_on( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_speaker_on(hass: HomeAssistant) -> None: """Test Vizio Speaker entity setup when on.""" await _test_setup_speaker(hass, True) -async def test_speaker_off( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_speaker_off(hass: HomeAssistant) -> None: """Test Vizio Speaker entity setup when off.""" await _test_setup_speaker(hass, False) +@pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_speaker_unavailable( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, ) -> None: """Test Vizio Speaker entity setup when unavailable.""" await _test_setup_speaker(hass, None) -async def test_init_tv_on( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_init_tv_on(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when on.""" await _test_setup_tv(hass, True) -async def test_init_tv_off( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_init_tv_off(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when off.""" await _test_setup_tv(hass, False) -async def test_init_tv_unavailable( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_init_tv_unavailable(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when unavailable.""" await _test_setup_tv(hass, None) -async def test_setup_unavailable_speaker( - hass: HomeAssistant, vizio_cant_connect: pytest.fixture -) -> None: +@pytest.mark.usefixtures("vizio_cant_connect") +async def test_setup_unavailable_speaker(hass: HomeAssistant) -> None: """Test speaker entity sets up as unavailable.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID @@ -346,9 +330,8 @@ async def test_setup_unavailable_speaker( assert hass.states.get("media_player.vizio").state == STATE_UNAVAILABLE -async def test_setup_unavailable_tv( - hass: HomeAssistant, vizio_cant_connect: pytest.fixture -) -> None: +@pytest.mark.usefixtures("vizio_cant_connect") +async def test_setup_unavailable_tv(hass: HomeAssistant) -> None: """Test TV entity sets up as unavailable.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID @@ -358,11 +341,8 @@ async def test_setup_unavailable_tv( assert hass.states.get("media_player.vizio").state == STATE_UNAVAILABLE -async def test_services( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_services(hass: HomeAssistant) -> None: """Test all Vizio media player entity services.""" await _test_setup_tv(hass, True) @@ -449,11 +429,8 @@ async def test_services( await _test_service(hass, MP_DOMAIN, "pause", SERVICE_MEDIA_PAUSE, None) -async def test_options_update( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_options_update(hass: HomeAssistant) -> None: """Test when config entry update event fires.""" await _test_setup_speaker(hass, True) config_entry = hass.config_entries.async_entries(DOMAIN)[0] @@ -476,7 +453,7 @@ async def _test_update_availability_switch( hass: HomeAssistant, initial_power_state: bool | None, final_power_state: bool | None, - caplog: pytest.fixture, + caplog: pytest.LogCaptureFixture, ) -> None: now = dt_util.utcnow() future_interval = timedelta(minutes=1) @@ -516,30 +493,27 @@ async def _test_update_availability_switch( assert len(vizio_log_list) == 1 +@pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_update_unavailable_to_available( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device becomes available after being unavailable.""" await _test_update_availability_switch(hass, None, True, caplog) +@pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_update_available_to_unavailable( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device becomes unavailable after being available.""" await _test_update_availability_switch(hass, True, None, caplog) +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps.""" @@ -564,10 +538,9 @@ async def test_setup_with_apps( ) +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps_include( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps and apps["include"] in config.""" @@ -582,10 +555,9 @@ async def test_setup_with_apps_include( assert "app_id" not in attr +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps_exclude( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps and apps["exclude"] in config.""" @@ -600,10 +572,9 @@ async def test_setup_with_apps_exclude( assert "app_id" not in attr +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps_additional_apps_config( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps and apps["additional_configs"] in config.""" @@ -679,10 +650,9 @@ def test_invalid_apps_config(hass: HomeAssistant) -> None: vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_SPEAKER_APPS_FAILURE) +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_unknown_app_config( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps where app config returned is unknown.""" @@ -696,10 +666,9 @@ async def test_setup_with_unknown_app_config( assert attr["app_id"] == UNKNOWN_APP_CONFIG +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_no_running_app( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps where no app is running.""" @@ -713,11 +682,8 @@ async def test_setup_with_no_running_app( assert "app_name" not in attr -async def test_setup_tv_without_mute( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_setup_tv_without_mute(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when mute property isn't returned by Vizio API.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -737,10 +703,9 @@ async def test_setup_tv_without_mute( assert "is_volume_muted" not in attr +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_apps_update( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps where no app is running.""" @@ -772,9 +737,8 @@ async def test_apps_update( assert len(apps) == len(APP_LIST) -async def test_vizio_update_with_apps_on_input( - hass: HomeAssistant, vizio_connect, vizio_update_with_apps_on_input -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps_on_input") +async def test_vizio_update_with_apps_on_input(hass: HomeAssistant) -> None: """Test a vizio TV with apps that is on a TV input.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/voicerss/test_tts.py b/tests/components/voicerss/test_tts.py index 1a2ad002586..776c0ac153a 100644 --- a/tests/components/voicerss/test_tts.py +++ b/tests/components/voicerss/test_tts.py @@ -36,9 +36,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index 6c292241237..aab35bfd029 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -19,9 +19,8 @@ _MEDIA_ID = "12345" @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir def _empty_wav() -> bytes: @@ -43,9 +42,12 @@ async def test_pipeline( """Test that pipeline function is called from RTP protocol.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -98,8 +100,8 @@ async def test_pipeline( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -238,9 +240,12 @@ async def test_tts_timeout( """Test that TTS will time out based on its length.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -298,8 +303,8 @@ async def test_tts_timeout( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -361,9 +366,12 @@ async def test_tts_wrong_extension( """Test that TTS will only stream WAV audio.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -403,8 +411,8 @@ async def test_tts_wrong_extension( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -456,9 +464,12 @@ async def test_tts_wrong_wav_format( """Test that TTS will only stream WAV audio with a specific format.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -505,8 +516,8 @@ async def test_tts_wrong_wav_format( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -558,9 +569,12 @@ async def test_empty_tts_output( """Test that TTS will not stream when output is empty.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] @@ -591,8 +605,8 @@ async def test_empty_tts_output( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", diff --git a/tests/components/wake_on_lan/conftest.py b/tests/components/wake_on_lan/conftest.py index cec3076d83e..8a1cb3f41eb 100644 --- a/tests/components/wake_on_lan/conftest.py +++ b/tests/components/wake_on_lan/conftest.py @@ -2,14 +2,24 @@ from __future__ import annotations +from collections.abc import Generator +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator + +from homeassistant.components.wake_on_lan.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +DEFAULT_MAC = "00:01:02:03:04:05" @pytest.fixture -def mock_send_magic_packet() -> AsyncMock: +def mock_send_magic_packet() -> Generator[AsyncMock]: """Mock magic packet.""" with patch("wakeonlan.send_magic_packet") as mock_send: yield mock_send @@ -27,3 +37,48 @@ def mock_subprocess_call(subprocess_call_return_value: int) -> Generator[MagicMo with patch("homeassistant.components.wake_on_lan.switch.sp.call") as mock_sp: mock_sp.return_value = subprocess_call_return_value yield mock_sp + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically path uuid generator.""" + with patch( + "homeassistant.components.wake_on_lan.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "255.255.255.255", + CONF_BROADCAST_PORT: 9, + } + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the Statistics integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + title=f"Wake on LAN {DEFAULT_MAC}", + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/wake_on_lan/test_button.py b/tests/components/wake_on_lan/test_button.py new file mode 100644 index 00000000000..abcae686a1b --- /dev/null +++ b/tests/components/wake_on_lan/test_button.py @@ -0,0 +1,54 @@ +"""The tests for the wake on lan button platform.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util import dt as dt_util + +from tests.common import MockConfigEntry + + +async def test_state( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + loaded_entry: MockConfigEntry, +) -> None: + """Test button default state.""" + + state = hass.states.get("button.wake_on_lan_00_01_02_03_04_05") + assert state is not None + assert state.state == STATE_UNKNOWN + + entry = entity_registry.async_get("button.wake_on_lan_00_01_02_03_04_05") + assert entry + assert entry.unique_id == "00:01:02:03:04:05" + + +async def test_service_calls( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + loaded_entry: MockConfigEntry, + mock_send_magic_packet: AsyncMock, +) -> None: + """Test service call.""" + + now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") + freezer.move_to(now) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.wake_on_lan_00_01_02_03_04_05"}, + blocking=True, + ) + + assert ( + hass.states.get("button.wake_on_lan_00_01_02_03_04_05").state == now.isoformat() + ) diff --git a/tests/components/wake_on_lan/test_config_flow.py b/tests/components/wake_on_lan/test_config_flow.py new file mode 100644 index 00000000000..b565fba505e --- /dev/null +++ b/tests/components/wake_on_lan/test_config_flow.py @@ -0,0 +1,109 @@ +"""Test the Scrape config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant import config_entries +from homeassistant.components.wake_on_lan.const import DOMAIN +from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import DEFAULT_MAC + +from tests.common import MockConfigEntry + + +async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "255.255.255.255", + CONF_BROADCAST_PORT: 9, + }, + ) + await hass.async_block_till_done(wait_background_tasks=True) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "255.255.255.255", + CONF_BROADCAST_PORT: 9, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_BROADCAST_ADDRESS: "192.168.255.255", + CONF_BROADCAST_PORT: 10, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "192.168.255.255", + CONF_BROADCAST_PORT: 10, + } + + await hass.async_block_till_done() + + assert loaded_entry.options == { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "192.168.255.255", + CONF_BROADCAST_PORT: 10, + } + + # Check the entity was updated, no new entity was created + assert len(hass.states.async_all()) == 1 + + state = hass.states.get("button.wake_on_lan_00_01_02_03_04_05") + assert state is not None + + +async def test_entry_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "255.255.255.255", + CONF_BROADCAST_PORT: 9, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/wake_on_lan/test_init.py b/tests/components/wake_on_lan/test_init.py index 8cfb0e6491e..1784f8ef12d 100644 --- a/tests/components/wake_on_lan/test_init.py +++ b/tests/components/wake_on_lan/test_init.py @@ -8,9 +8,21 @@ import pytest import voluptuous as vol from homeassistant.components.wake_on_lan import DOMAIN, SERVICE_SEND_MAGIC_PACKET +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED + async def test_send_magic_packet(hass: HomeAssistant) -> None: """Test of send magic packet service call.""" diff --git a/tests/components/wake_on_lan/test_switch.py b/tests/components/wake_on_lan/test_switch.py index 77e1ba55519..9a478b46175 100644 --- a/tests/components/wake_on_lan/test_switch.py +++ b/tests/components/wake_on_lan/test_switch.py @@ -13,6 +13,7 @@ from homeassistant.const import ( STATE_ON, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component from tests.common import async_mock_service @@ -64,7 +65,7 @@ async def test_broadcast_config_ip_and_port( hass: HomeAssistant, mock_send_magic_packet: AsyncMock ) -> None: """Test with broadcast address and broadcast port config.""" - mac = "00-01-02-03-04-05" + mac = "00:01:02:03:04:05" broadcast_address = "255.255.255.255" port = 999 @@ -92,6 +93,7 @@ async def test_broadcast_config_ip_and_port( blocking=True, ) + mac = dr.format_mac(mac) mock_send_magic_packet.assert_called_with( mac, ip_address=broadcast_address, port=port ) @@ -102,7 +104,7 @@ async def test_broadcast_config_ip( ) -> None: """Test with only broadcast address.""" - mac = "00-01-02-03-04-05" + mac = "00:01:02:03:04:05" broadcast_address = "255.255.255.255" assert await async_setup_component( @@ -128,6 +130,7 @@ async def test_broadcast_config_ip( blocking=True, ) + mac = dr.format_mac(mac) mock_send_magic_packet.assert_called_with(mac, ip_address=broadcast_address) @@ -136,7 +139,7 @@ async def test_broadcast_config_port( ) -> None: """Test with only broadcast port config.""" - mac = "00-01-02-03-04-05" + mac = "00:01:02:03:04:05" port = 999 assert await async_setup_component( @@ -156,6 +159,7 @@ async def test_broadcast_config_port( blocking=True, ) + mac = dr.format_mac(mac) mock_send_magic_packet.assert_called_with(mac, port=port) diff --git a/tests/components/wake_word/test_init.py b/tests/components/wake_word/test_init.py index c19d3e7032f..cdaf7e0e3f0 100644 --- a/tests/components/wake_word/test_init.py +++ b/tests/components/wake_word/test_init.py @@ -1,14 +1,13 @@ """Test wake_word component setup.""" import asyncio -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Generator from functools import partial from pathlib import Path from unittest.mock import patch from freezegun import freeze_time import pytest -from typing_extensions import Generator from homeassistant.components import wake_word from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow diff --git a/tests/components/waqi/conftest.py b/tests/components/waqi/conftest.py index b2e1a7d77d4..75709d4f56e 100644 --- a/tests/components/waqi/conftest.py +++ b/tests/components/waqi/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the World Air Quality Index (WAQI) tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.waqi.const import CONF_STATION_NUMBER, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/water_heater/conftest.py b/tests/components/water_heater/conftest.py index 619d5e5c359..df16e5cc6da 100644 --- a/tests/components/water_heater/conftest.py +++ b/tests/components/water_heater/conftest.py @@ -1,7 +1,8 @@ """Fixtures for water heater platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/water_heater/test_init.py b/tests/components/water_heater/test_init.py index f883cf47b19..4e0f860366c 100644 --- a/tests/components/water_heater/test_init.py +++ b/tests/components/water_heater/test_init.py @@ -22,6 +22,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from tests.common import ( @@ -42,7 +43,7 @@ async def test_set_temp_schema_no_req( """Test the set temperature schema with missing required data.""" domain = "climate" service = "test_set_temperature" - schema = SET_TEMPERATURE_SCHEMA + schema = cv.make_entity_service_schema(SET_TEMPERATURE_SCHEMA) calls = async_mock_service(hass, domain, service, schema) data = {"hvac_mode": "off", "entity_id": ["climate.test_id"]} @@ -59,7 +60,7 @@ async def test_set_temp_schema( """Test the set temperature schema with ok required data.""" domain = "water_heater" service = "test_set_temperature" - schema = SET_TEMPERATURE_SCHEMA + schema = cv.make_entity_service_schema(SET_TEMPERATURE_SCHEMA) calls = async_mock_service(hass, domain, service, schema) data = { diff --git a/tests/components/watttime/conftest.py b/tests/components/watttime/conftest.py index 0b7403d45fc..650d07b36a1 100644 --- a/tests/components/watttime/conftest.py +++ b/tests/components/watttime/conftest.py @@ -1,6 +1,7 @@ """Define test fixtures for WattTime.""" -import json +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest @@ -20,13 +21,17 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, ) +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture(name="client") -def client_fixture(get_grid_region, data_realtime_emissions): +def client_fixture( + get_grid_region: AsyncMock, data_realtime_emissions: JsonObjectType +) -> Mock: """Define an aiowatttime client.""" client = Mock() client.emissions.async_get_grid_region = get_grid_region @@ -37,7 +42,7 @@ def client_fixture(get_grid_region, data_realtime_emissions): @pytest.fixture(name="config_auth") -def config_auth_fixture(hass): +def config_auth_fixture() -> dict[str, Any]: """Define an auth config entry data fixture.""" return { CONF_USERNAME: "user", @@ -46,7 +51,7 @@ def config_auth_fixture(hass): @pytest.fixture(name="config_coordinates") -def config_coordinates_fixture(hass): +def config_coordinates_fixture() -> dict[str, Any]: """Define a coordinates config entry data fixture.""" return { CONF_LATITUDE: 32.87336, @@ -55,7 +60,7 @@ def config_coordinates_fixture(hass): @pytest.fixture(name="config_location_type") -def config_location_type_fixture(hass): +def config_location_type_fixture() -> dict[str, Any]: """Define a location type config entry data fixture.""" return { CONF_LOCATION_TYPE: LOCATION_TYPE_COORDINATES, @@ -63,7 +68,9 @@ def config_location_type_fixture(hass): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config_auth, config_coordinates): +def config_entry_fixture( + hass: HomeAssistant, config_auth: dict[str, Any], config_coordinates: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -82,25 +89,30 @@ def config_entry_fixture(hass, config_auth, config_coordinates): @pytest.fixture(name="data_grid_region", scope="package") -def data_grid_region_fixture(): +def data_grid_region_fixture() -> JsonObjectType: """Define grid region data.""" - return json.loads(load_fixture("grid_region_data.json", "watttime")) + return load_json_object_fixture("grid_region_data.json", "watttime") @pytest.fixture(name="data_realtime_emissions", scope="package") -def data_realtime_emissions_fixture(): +def data_realtime_emissions_fixture() -> JsonObjectType: """Define realtime emissions data.""" - return json.loads(load_fixture("realtime_emissions_data.json", "watttime")) + return load_json_object_fixture("realtime_emissions_data.json", "watttime") @pytest.fixture(name="get_grid_region") -def get_grid_region_fixture(data_grid_region): +def get_grid_region_fixture(data_grid_region: JsonObjectType) -> AsyncMock: """Define an aiowatttime method to get grid region data.""" return AsyncMock(return_value=data_grid_region) @pytest.fixture(name="setup_watttime") -async def setup_watttime_fixture(hass, client, config_auth, config_coordinates): +async def setup_watttime_fixture( + hass: HomeAssistant, + client: Mock, + config_auth: dict[str, Any], + config_coordinates: dict[str, Any], +) -> AsyncGenerator[None]: """Define a fixture to set up WattTime.""" with ( patch( diff --git a/tests/components/watttime/test_diagnostics.py b/tests/components/watttime/test_diagnostics.py index 0526a64aedc..f4465a44d26 100644 --- a/tests/components/watttime/test_diagnostics.py +++ b/tests/components/watttime/test_diagnostics.py @@ -19,4 +19,4 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry - ) == snapshot(exclude=props("entry_id")) + ) == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/weather/__init__.py b/tests/components/weather/__init__.py index c24baad5237..2dbffbbd617 100644 --- a/tests/components/weather/__init__.py +++ b/tests/components/weather/__init__.py @@ -61,7 +61,7 @@ class MockWeatherTest(WeatherPlatform.MockWeather): async def create_entity( hass: HomeAssistant, - mock_weather: WeatherPlatform.MockWeather, + mock_weather: type[WeatherPlatform.MockWeather], manifest_extra: dict[str, Any] | None, **kwargs, ) -> WeatherPlatform.MockWeather: diff --git a/tests/components/weather/conftest.py b/tests/components/weather/conftest.py index e3e790300a0..78389381ff3 100644 --- a/tests/components/weather/conftest.py +++ b/tests/components/weather/conftest.py @@ -1,7 +1,8 @@ """Fixtures for Weather platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/weatherflow/conftest.py b/tests/components/weatherflow/conftest.py index c0811597228..21c251d39b5 100644 --- a/tests/components/weatherflow/conftest.py +++ b/tests/components/weatherflow/conftest.py @@ -1,12 +1,12 @@ """Fixtures for Weatherflow integration tests.""" import asyncio +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from pyweatherflowudp.client import EVENT_DEVICE_DISCOVERED from pyweatherflowudp.device import WeatherFlowDevice -from typing_extensions import Generator from homeassistant.components.weatherflow.const import DOMAIN diff --git a/tests/components/weatherflow_cloud/__init__.py b/tests/components/weatherflow_cloud/__init__.py index c251e7868cc..31004a27f64 100644 --- a/tests/components/weatherflow_cloud/__init__.py +++ b/tests/components/weatherflow_cloud/__init__.py @@ -1 +1,13 @@ """Tests for the WeatherflowCloud integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/weatherflow_cloud/conftest.py b/tests/components/weatherflow_cloud/conftest.py index d47da3c7d1b..36b42bf24a8 100644 --- a/tests/components/weatherflow_cloud/conftest.py +++ b/tests/components/weatherflow_cloud/conftest.py @@ -1,10 +1,19 @@ """Common fixtures for the WeatherflowCloud tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from aiohttp import ClientResponseError import pytest -from typing_extensions import Generator +from weatherflow4py.models.rest.forecast import WeatherDataForecastREST +from weatherflow4py.models.rest.observation import ObservationStationREST +from weatherflow4py.models.rest.stations import StationsResponseREST +from weatherflow4py.models.rest.unified import WeatherFlowDataREST + +from homeassistant.components.weatherflow_cloud.const import DOMAIN +from homeassistant.const import CONF_API_TOKEN + +from tests.common import MockConfigEntry, load_fixture @pytest.fixture @@ -56,3 +65,51 @@ def mock_get_stations_401_error() -> Generator[AsyncMock]: side_effect=side_effects, ) as mock_get_stations: yield mock_get_stations + + +MOCK_API_TOKEN = "1234567890" + + +@pytest.fixture +async def mock_config_entry() -> MockConfigEntry: + """Fixture for MockConfigEntry.""" + return MockConfigEntry( + domain=DOMAIN, + data={CONF_API_TOKEN: MOCK_API_TOKEN}, + version=1, + ) + + +@pytest.fixture +def mock_api(): + """Fixture for Mock WeatherFlowRestAPI.""" + get_stations_response_data = StationsResponseREST.from_json( + load_fixture("stations.json", DOMAIN) + ) + get_forecast_response_data = WeatherDataForecastREST.from_json( + load_fixture("forecast.json", DOMAIN) + ) + get_observation_response_data = ObservationStationREST.from_json( + load_fixture("station_observation.json", DOMAIN) + ) + + data = { + 24432: WeatherFlowDataREST( + weather=get_forecast_response_data, + observation=get_observation_response_data, + station=get_stations_response_data.stations[0], + device_observations=None, + ) + } + + with patch( + "homeassistant.components.weatherflow_cloud.coordinator.WeatherFlowRestAPI", + autospec=True, + ) as mock_api_class: + # Create an instance of AsyncMock for the API + mock_api = AsyncMock() + mock_api.get_all_data.return_value = data + # Patch the class to return our mock_api instance + mock_api_class.return_value = mock_api + + yield mock_api diff --git a/tests/components/weatherflow_cloud/fixtures/forecast.json b/tests/components/weatherflow_cloud/fixtures/forecast.json new file mode 100644 index 00000000000..62793983327 --- /dev/null +++ b/tests/components/weatherflow_cloud/fixtures/forecast.json @@ -0,0 +1,4783 @@ +{ + "current_conditions": { + "air_density": 1.0, + "air_temperature": 4.0, + "brightness": 59768, + "conditions": "Clear", + "delta_t": 6.0, + "dew_point": -13.0, + "feels_like": 3.0, + "icon": "clear-day", + "is_precip_local_day_rain_check": true, + "is_precip_local_yesterday_rain_check": true, + "lightning_strike_count_last_1hr": 0, + "lightning_strike_count_last_3hr": 0, + "lightning_strike_last_distance": 39, + "lightning_strike_last_distance_msg": "37 - 41 km", + "lightning_strike_last_epoch": 1698522523, + "precip_accum_local_day": 0, + "precip_accum_local_yesterday": 0, + "precip_minutes_local_day": 0, + "precip_minutes_local_yesterday": 0, + "pressure_trend": "rising", + "relative_humidity": 27, + "sea_level_pressure": 1022.1, + "solar_radiation": 498, + "station_pressure": 795.8, + "time": 1703785918, + "uv": 2, + "wet_bulb_globe_temperature": 2.0, + "wet_bulb_temperature": -1.0, + "wind_avg": 2.0, + "wind_direction": 40, + "wind_direction_cardinal": "NE", + "wind_gust": 4.0 + }, + "forecast": { + "daily": [ + { + "air_temp_high": 5.0, + "air_temp_low": -6.0, + "conditions": "Clear", + "day_num": 28, + "day_start_local": 1703746800, + "icon": "clear-day", + "month_num": 12, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "sunrise": 1703773057, + "sunset": 1703807070 + }, + { + "air_temp_high": 7.0, + "air_temp_low": -1.0, + "conditions": "Clear", + "day_num": 29, + "day_start_local": 1703833200, + "icon": "clear-day", + "month_num": 12, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "sunrise": 1703859473, + "sunset": 1703893513 + }, + { + "air_temp_high": 10.0, + "air_temp_low": -1.0, + "conditions": "Partly Cloudy", + "day_num": 30, + "day_start_local": 1703919600, + "icon": "partly-cloudy-day", + "month_num": 12, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "sunrise": 1703945887, + "sunset": 1703979957 + }, + { + "air_temp_high": 2.0, + "air_temp_low": -3.0, + "conditions": "Partly Cloudy", + "day_num": 31, + "day_start_local": 1704006000, + "icon": "partly-cloudy-day", + "month_num": 12, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704032299, + "sunset": 1704066403 + }, + { + "air_temp_high": 5.0, + "air_temp_low": -4.0, + "conditions": "Partly Cloudy", + "day_num": 1, + "day_start_local": 1704092400, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704118709, + "sunset": 1704152851 + }, + { + "air_temp_high": 4.0, + "air_temp_low": -4.0, + "conditions": "Partly Cloudy", + "day_num": 2, + "day_start_local": 1704178800, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704205116, + "sunset": 1704239300 + }, + { + "air_temp_high": 3.0, + "air_temp_low": -5.0, + "conditions": "Partly Cloudy", + "day_num": 3, + "day_start_local": 1704265200, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704291522, + "sunset": 1704325751 + }, + { + "air_temp_high": 4.0, + "air_temp_low": -4.0, + "conditions": "Wintry Mix Possible", + "day_num": 4, + "day_start_local": 1704351600, + "icon": "possibly-sleet-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 20, + "precip_type": "sleet", + "sunrise": 1704377925, + "sunset": 1704412203 + }, + { + "air_temp_high": 1.0, + "air_temp_low": -5.0, + "conditions": "Partly Cloudy", + "day_num": 5, + "day_start_local": 1704438000, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704464327, + "sunset": 1704498656 + }, + { + "air_temp_high": 4.0, + "air_temp_low": -5.0, + "conditions": "Partly Cloudy", + "day_num": 6, + "day_start_local": 1704524400, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704550726, + "sunset": 1704585111 + } + ], + "hourly": [ + { + "air_temperature": 4.0, + "conditions": "Clear", + "feels_like": -1.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 50, + "sea_level_pressure": 1021.3, + "time": 1703786400, + "uv": 4.0, + "wind_avg": 8.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 12.0 + }, + { + "air_temperature": 4.0, + "conditions": "Clear", + "feels_like": 0.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 50, + "sea_level_pressure": 1020.5, + "time": 1703790000, + "uv": 5.0, + "wind_avg": 7.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 11.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 0.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 49, + "sea_level_pressure": 1019.3, + "time": 1703793600, + "uv": 5.0, + "wind_avg": 7.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 11.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 1.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 47, + "sea_level_pressure": 1018.9, + "time": 1703797200, + "uv": 4.0, + "wind_avg": 8.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 11.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 1.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 46, + "sea_level_pressure": 1019.9, + "time": 1703800800, + "uv": 3.0, + "wind_avg": 8.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 11.0 + }, + { + "air_temperature": 4.0, + "conditions": "Clear", + "feels_like": -1.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 52, + "sea_level_pressure": 1021.9, + "time": 1703804400, + "uv": 1.0, + "wind_avg": 6.0, + "wind_direction": 340, + "wind_direction_cardinal": "NNW", + "wind_gust": 9.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 64, + "sea_level_pressure": 1025.4, + "time": 1703808000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 69, + "sea_level_pressure": 1026.1, + "time": 1703811600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 65, + "sea_level_pressure": 1026.6, + "time": 1703815200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 66, + "sea_level_pressure": 1026.6, + "time": 1703818800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 63, + "sea_level_pressure": 1026.7, + "time": 1703822400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 61, + "sea_level_pressure": 1026.6, + "time": 1703826000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 61, + "sea_level_pressure": 1026.7, + "time": 1703829600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 64, + "sea_level_pressure": 1026.2, + "time": 1703833200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 63, + "sea_level_pressure": 1025.9, + "time": 1703836800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 62, + "sea_level_pressure": 1026.1, + "time": 1703840400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 61, + "sea_level_pressure": 1026.0, + "time": 1703844000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 66, + "sea_level_pressure": 1025.9, + "time": 1703847600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 67, + "sea_level_pressure": 1026.3, + "time": 1703851200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 64, + "sea_level_pressure": 1026.8, + "time": 1703854800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 2.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 60, + "sea_level_pressure": 1027.3, + "time": 1703858400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 5.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 2.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 48, + "sea_level_pressure": 1026.2, + "time": 1703862000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 6.0, + "conditions": "Clear", + "feels_like": 3.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 45, + "sea_level_pressure": 1023.4, + "time": 1703865600, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 2.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 47, + "sea_level_pressure": 1021.9, + "time": 1703869200, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 6.0 + }, + { + "air_temperature": 6.0, + "conditions": "Clear", + "feels_like": 3.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 44, + "sea_level_pressure": 1020.8, + "time": 1703872800, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 7.0, + "conditions": "Clear", + "feels_like": 4.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 42, + "sea_level_pressure": 1019.3, + "time": 1703876400, + "uv": 5.0, + "wind_avg": 4.0, + "wind_direction": 360, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 7.0, + "conditions": "Clear", + "feels_like": 5.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 40, + "sea_level_pressure": 1018.1, + "time": 1703880000, + "uv": 5.0, + "wind_avg": 4.0, + "wind_direction": 0, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 7.0, + "conditions": "Clear", + "feels_like": 5.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 40, + "sea_level_pressure": 1017.8, + "time": 1703883600, + "uv": 4.0, + "wind_avg": 3.0, + "wind_direction": 10, + "wind_direction_cardinal": "N", + "wind_gust": 5.0 + }, + { + "air_temperature": 7.0, + "conditions": "Clear", + "feels_like": 5.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 41, + "sea_level_pressure": 1018.0, + "time": 1703887200, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 180, + "wind_direction_cardinal": "S", + "wind_gust": 4.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 3.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 46, + "sea_level_pressure": 1018.8, + "time": 1703890800, + "uv": 1.0, + "wind_avg": 3.0, + "wind_direction": 180, + "wind_direction_cardinal": "S", + "wind_gust": 4.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-night", + "local_day": 29, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 55, + "sea_level_pressure": 1020.6, + "time": 1703894400, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 4.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -2.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 62, + "sea_level_pressure": 1020.7, + "time": 1703898000, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 66, + "sea_level_pressure": 1020.7, + "time": 1703901600, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 69, + "sea_level_pressure": 1020.8, + "time": 1703905200, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 72, + "sea_level_pressure": 1020.3, + "time": 1703908800, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 73, + "sea_level_pressure": 1019.9, + "time": 1703912400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 74, + "sea_level_pressure": 1019.4, + "time": 1703916000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 75, + "sea_level_pressure": 1019.0, + "time": 1703919600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 75, + "sea_level_pressure": 1018.5, + "time": 1703923200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 74, + "sea_level_pressure": 1018.1, + "time": 1703926800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 74, + "sea_level_pressure": 1017.7, + "time": 1703930400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 74, + "sea_level_pressure": 1017.4, + "time": 1703934000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 73, + "sea_level_pressure": 1017.0, + "time": 1703937600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 72, + "sea_level_pressure": 1016.8, + "time": 1703941200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 69, + "sea_level_pressure": 1016.5, + "time": 1703944800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 63, + "sea_level_pressure": 1016.3, + "time": 1703948400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 55, + "sea_level_pressure": 1015.0, + "time": 1703952000, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 6.0, + "conditions": "Partly Cloudy", + "feels_like": 3.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 45, + "sea_level_pressure": 1013.7, + "time": 1703955600, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 8.0, + "conditions": "Partly Cloudy", + "feels_like": 5.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 39, + "sea_level_pressure": 1012.4, + "time": 1703959200, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 9.0, + "conditions": "Partly Cloudy", + "feels_like": 7.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 36, + "sea_level_pressure": 1011.5, + "time": 1703962800, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 5.0 + }, + { + "air_temperature": 10.0, + "conditions": "Partly Cloudy", + "feels_like": 8.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 35, + "sea_level_pressure": 1010.7, + "time": 1703966400, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 5.0 + }, + { + "air_temperature": 9.0, + "conditions": "Partly Cloudy", + "feels_like": 7.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 35, + "sea_level_pressure": 1009.8, + "time": 1703970000, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 5.0 + }, + { + "air_temperature": 8.0, + "conditions": "Partly Cloudy", + "feels_like": 6.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 38, + "sea_level_pressure": 1010.7, + "time": 1703973600, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": 6.0, + "conditions": "Partly Cloudy", + "feels_like": 4.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 43, + "sea_level_pressure": 1011.6, + "time": 1703977200, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": 5.0, + "conditions": "Partly Cloudy", + "feels_like": 3.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 48, + "sea_level_pressure": 1012.5, + "time": 1703980800, + "uv": 2.0, + "wind_avg": 2.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 3.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 1.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 53, + "sea_level_pressure": 1013.1, + "time": 1703984400, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 56, + "sea_level_pressure": 1013.7, + "time": 1703988000, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 60, + "sea_level_pressure": 1014.4, + "time": 1703991600, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 63, + "sea_level_pressure": 1014.7, + "time": 1703995200, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 66, + "sea_level_pressure": 1015.1, + "time": 1703998800, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 67, + "sea_level_pressure": 1015.5, + "time": 1704002400, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1015.3, + "time": 1704006000, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1015.0, + "time": 1704009600, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 69, + "sea_level_pressure": 1014.7, + "time": 1704013200, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1015.1, + "time": 1704016800, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1015.5, + "time": 1704020400, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 74, + "sea_level_pressure": 1015.9, + "time": 1704024000, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 74, + "sea_level_pressure": 1016.7, + "time": 1704027600, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1017.4, + "time": 1704031200, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1018.2, + "time": 1704034800, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 3.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1018.2, + "time": 1704038400, + "uv": 1.0, + "wind_avg": 2.0, + "wind_direction": 120, + "wind_direction_cardinal": "ESE", + "wind_gust": 4.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 64, + "sea_level_pressure": 1018.2, + "time": 1704042000, + "uv": 1.0, + "wind_avg": 3.0, + "wind_direction": 120, + "wind_direction_cardinal": "ESE", + "wind_gust": 4.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1018.1, + "time": 1704045600, + "uv": 1.0, + "wind_avg": 3.0, + "wind_direction": 120, + "wind_direction_cardinal": "ESE", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 59, + "sea_level_pressure": 1017.6, + "time": 1704049200, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 100, + "wind_direction_cardinal": "E", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 58, + "sea_level_pressure": 1017.0, + "time": 1704052800, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 100, + "wind_direction_cardinal": "E", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 58, + "sea_level_pressure": 1016.4, + "time": 1704056400, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 100, + "wind_direction_cardinal": "E", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 60, + "sea_level_pressure": 1017.9, + "time": 1704060000, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 130, + "wind_direction_cardinal": "SE", + "wind_gust": 5.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 63, + "sea_level_pressure": 1019.4, + "time": 1704063600, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 130, + "wind_direction_cardinal": "SE", + "wind_gust": 4.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 67, + "sea_level_pressure": 1021.0, + "time": 1704067200, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 130, + "wind_direction_cardinal": "SE", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 72, + "sea_level_pressure": 1021.8, + "time": 1704070800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1022.7, + "time": 1704074400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 82, + "sea_level_pressure": 1023.6, + "time": 1704078000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1023.6, + "time": 1704081600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1023.6, + "time": 1704085200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 87, + "sea_level_pressure": 1023.6, + "time": 1704088800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 87, + "sea_level_pressure": 1024.0, + "time": 1704092400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1024.5, + "time": 1704096000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1024.9, + "time": 1704099600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 90, + "sea_level_pressure": 1024.8, + "time": 1704103200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 91, + "sea_level_pressure": 1024.6, + "time": 1704106800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 90, + "sea_level_pressure": 1024.5, + "time": 1704110400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1024.4, + "time": 1704114000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1024.4, + "time": 1704117600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 79, + "sea_level_pressure": 1024.4, + "time": 1704121200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 69, + "sea_level_pressure": 1022.7, + "time": 1704124800, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 59, + "sea_level_pressure": 1021.1, + "time": 1704128400, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 6.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1019.5, + "time": 1704132000, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 6.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 1.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 49, + "sea_level_pressure": 1018.5, + "time": 1704135600, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 170, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 5.0, + "conditions": "Partly Cloudy", + "feels_like": 2.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 47, + "sea_level_pressure": 1017.4, + "time": 1704139200, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 170, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 5.0, + "conditions": "Partly Cloudy", + "feels_like": 2.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 48, + "sea_level_pressure": 1016.4, + "time": 1704142800, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 170, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 1.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 50, + "sea_level_pressure": 1017.7, + "time": 1704146400, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1018.9, + "time": 1704150000, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 60, + "sea_level_pressure": 1020.2, + "time": 1704153600, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 5.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 66, + "sea_level_pressure": 1020.8, + "time": 1704157200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1021.4, + "time": 1704160800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 75, + "sea_level_pressure": 1022.0, + "time": 1704164400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1021.9, + "time": 1704168000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1021.7, + "time": 1704171600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1021.6, + "time": 1704175200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1020.8, + "time": 1704178800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 77, + "sea_level_pressure": 1020.1, + "time": 1704182400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 77, + "sea_level_pressure": 1019.3, + "time": 1704186000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 77, + "sea_level_pressure": 1019.0, + "time": 1704189600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1018.7, + "time": 1704193200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1018.4, + "time": 1704196800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 76, + "sea_level_pressure": 1018.5, + "time": 1704200400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 74, + "sea_level_pressure": 1018.7, + "time": 1704204000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1018.9, + "time": 1704207600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 67, + "sea_level_pressure": 1018.2, + "time": 1704211200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 62, + "sea_level_pressure": 1017.5, + "time": 1704214800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 58, + "sea_level_pressure": 1016.8, + "time": 1704218400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1015.7, + "time": 1704222000, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 10, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1014.7, + "time": 1704225600, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 10, + "wind_direction_cardinal": "N", + "wind_gust": 7.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1013.6, + "time": 1704229200, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 10, + "wind_direction_cardinal": "N", + "wind_gust": 7.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 56, + "sea_level_pressure": 1014.8, + "time": 1704232800, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 360, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1016.1, + "time": 1704236400, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 360, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 67, + "sea_level_pressure": 1017.4, + "time": 1704240000, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 360, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1017.7, + "time": 1704243600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1018.1, + "time": 1704247200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 5.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 82, + "sea_level_pressure": 1018.5, + "time": 1704250800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 5.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1018.4, + "time": 1704254400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1018.4, + "time": 1704258000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 90, + "sea_level_pressure": 1018.4, + "time": 1704261600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 91, + "sea_level_pressure": 1018.4, + "time": 1704265200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 91, + "sea_level_pressure": 1018.3, + "time": 1704268800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 93, + "sea_level_pressure": 1018.3, + "time": 1704272400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 95, + "sea_level_pressure": 1018.1, + "time": 1704276000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 97, + "sea_level_pressure": 1017.8, + "time": 1704279600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 98, + "sea_level_pressure": 1017.6, + "time": 1704283200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 96, + "sea_level_pressure": 1017.7, + "time": 1704286800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 93, + "sea_level_pressure": 1017.8, + "time": 1704290400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1017.9, + "time": 1704294000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1016.1, + "time": 1704297600, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1014.3, + "time": 1704301200, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 62, + "sea_level_pressure": 1012.5, + "time": 1704304800, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 6.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 57, + "sea_level_pressure": 1011.4, + "time": 1704308400, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1010.3, + "time": 1704312000, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1009.1, + "time": 1704315600, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 57, + "sea_level_pressure": 1010.4, + "time": 1704319200, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 63, + "sea_level_pressure": 1011.7, + "time": 1704322800, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1012.9, + "time": 1704326400, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 74, + "sea_level_pressure": 1013.2, + "time": 1704330000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 79, + "sea_level_pressure": 1013.5, + "time": 1704333600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 82, + "sea_level_pressure": 1013.8, + "time": 1704337200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1014.0, + "time": 1704340800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1014.1, + "time": 1704344400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1014.3, + "time": 1704348000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 87, + "sea_level_pressure": 1014.6, + "time": 1704351600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 87, + "sea_level_pressure": 1015.0, + "time": 1704355200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1015.3, + "time": 1704358800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1015.7, + "time": 1704362400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1016.0, + "time": 1704366000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1016.4, + "time": 1704369600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1016.9, + "time": 1704373200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 84, + "sea_level_pressure": 1017.4, + "time": 1704376800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1018.0, + "time": 1704380400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 72, + "sea_level_pressure": 1016.3, + "time": 1704384000, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 63, + "sea_level_pressure": 1014.6, + "time": 1704387600, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 58, + "sea_level_pressure": 1013.0, + "time": 1704391200, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 54, + "sea_level_pressure": 1011.6, + "time": 1704394800, + "uv": 4.0, + "wind_avg": 5.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1010.2, + "time": 1704398400, + "uv": 4.0, + "wind_avg": 5.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1008.8, + "time": 1704402000, + "uv": 4.0, + "wind_avg": 5.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 8.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1009.6, + "time": 1704405600, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 60, + "sea_level_pressure": 1010.3, + "time": 1704409200, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 65, + "sea_level_pressure": 1011.0, + "time": 1704412800, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 70, + "sea_level_pressure": 1011.2, + "time": 1704416400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 75, + "sea_level_pressure": 1011.4, + "time": 1704420000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 80, + "sea_level_pressure": 1011.6, + "time": 1704423600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 84, + "sea_level_pressure": 1011.7, + "time": 1704427200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1011.8, + "time": 1704430800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1011.9, + "time": 1704434400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 90, + "sea_level_pressure": 1012.6, + "time": 1704438000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 92, + "sea_level_pressure": 1013.3, + "time": 1704441600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 93, + "sea_level_pressure": 1014.0, + "time": 1704445200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 95, + "sea_level_pressure": 1014.7, + "time": 1704448800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 96, + "sea_level_pressure": 1015.4, + "time": 1704452400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 95, + "sea_level_pressure": 1016.1, + "time": 1704456000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 91, + "sea_level_pressure": 1015.9, + "time": 1704459600, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1015.7, + "time": 1704463200, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1015.4, + "time": 1704466800, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1015.2, + "time": 1704470400, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 66, + "sea_level_pressure": 1015.0, + "time": 1704474000, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 8.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 63, + "sea_level_pressure": 1014.7, + "time": 1704477600, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 8.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1015.0, + "time": 1704481200, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 8.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1015.2, + "time": 1704484800, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 62, + "sea_level_pressure": 1015.5, + "time": 1704488400, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 65, + "sea_level_pressure": 1015.7, + "time": 1704492000, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 67, + "sea_level_pressure": 1015.9, + "time": 1704495600, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 70, + "sea_level_pressure": 1016.2, + "time": 1704499200, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1016.1, + "time": 1704502800, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 77, + "sea_level_pressure": 1016.0, + "time": 1704506400, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 80, + "sea_level_pressure": 1015.8, + "time": 1704510000, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1015.7, + "time": 1704513600, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1015.6, + "time": 1704517200, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 92, + "sea_level_pressure": 1015.5, + "time": 1704520800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 95, + "sea_level_pressure": 1015.6, + "time": 1704524400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -11.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 98, + "sea_level_pressure": 1015.7, + "time": 1704528000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -11.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 99, + "sea_level_pressure": 1015.7, + "time": 1704531600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -11.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 100, + "sea_level_pressure": 1015.8, + "time": 1704535200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -11.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 98, + "sea_level_pressure": 1015.9, + "time": 1704538800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 93, + "sea_level_pressure": 1016.0, + "time": 1704542400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1015.8, + "time": 1704546000, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1015.7, + "time": 1704549600, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 69, + "sea_level_pressure": 1015.6, + "time": 1704553200, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1015.5, + "time": 1704556800, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 56, + "sea_level_pressure": 1015.4, + "time": 1704560400, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1015.2, + "time": 1704564000, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 51, + "sea_level_pressure": 1015.1, + "time": 1704567600, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 51, + "sea_level_pressure": 1015.0, + "time": 1704571200, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1014.8, + "time": 1704574800, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 7.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1014.7, + "time": 1704578400, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 57, + "sea_level_pressure": 1014.5, + "time": 1704582000, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 59, + "sea_level_pressure": 1014.4, + "time": 1704585600, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 62, + "sea_level_pressure": 1013.9, + "time": 1704589200, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 65, + "sea_level_pressure": 1013.4, + "time": 1704592800, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1012.9, + "time": 1704596400, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1012.4, + "time": 1704600000, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1011.9, + "time": 1704603600, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 75, + "sea_level_pressure": 1011.4, + "time": 1704607200, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + } + ] + }, + "latitude": 43.94962, + "location_name": "My Home Station", + "longitude": -102.86831, + "source_id_conditions": 5, + "status": { + "status_code": 0, + "status_message": "SUCCESS" + }, + "timezone": "America/Denver", + "timezone_offset_minutes": -420, + "units": { + "units_air_density": "kg/m3", + "units_brightness": "lux", + "units_distance": "km", + "units_other": "metric", + "units_precip": "mm", + "units_pressure": "mb", + "units_solar_radiation": "w/m2", + "units_temp": "c", + "units_wind": "mps" + } +} diff --git a/tests/components/weatherflow_cloud/fixtures/station_observation.json b/tests/components/weatherflow_cloud/fixtures/station_observation.json new file mode 100644 index 00000000000..148b180df73 --- /dev/null +++ b/tests/components/weatherflow_cloud/fixtures/station_observation.json @@ -0,0 +1,100 @@ +{ + "elevation": 2063.150146484375, + "is_public": true, + "latitude": 43.94962, + "longitude": -102.86831, + "obs": [ + { + "air_density": 0.96139, + "air_temperature": 10.5, + "barometric_pressure": 782.8, + "brightness": 757, + "delta_t": 8.4, + "dew_point": -10.4, + "feels_like": 10.5, + "heat_index": 10.5, + "lightning_strike_count": 0, + "lightning_strike_count_last_1hr": 0, + "lightning_strike_count_last_3hr": 0, + "lightning_strike_last_distance": 26, + "lightning_strike_last_epoch": 1707346875, + "precip": 0.0, + "precip_accum_last_1hr": 0.0, + "precip_accum_local_day": 0.0, + "precip_accum_local_day_final": 0.0, + "precip_accum_local_yesterday": 0.0, + "precip_accum_local_yesterday_final": 0.0, + "precip_analysis_type_yesterday": 0, + "precip_minutes_local_day": 0, + "precip_minutes_local_yesterday": 0, + "precip_minutes_local_yesterday_final": 0, + "pressure_trend": "steady", + "relative_humidity": 22, + "sea_level_pressure": 1006.2, + "solar_radiation": 6, + "station_pressure": 782.8, + "timestamp": 1708994629, + "uv": 0.03, + "wet_bulb_globe_temperature": 4.6, + "wet_bulb_temperature": 2.1, + "wind_avg": 1.4, + "wind_chill": 10.5, + "wind_direction": 203, + "wind_gust": 3.2, + "wind_lull": 0.3 + } + ], + "outdoor_keys": [ + "timestamp", + "air_temperature", + "barometric_pressure", + "station_pressure", + "pressure_trend", + "sea_level_pressure", + "relative_humidity", + "precip", + "precip_accum_last_1hr", + "precip_accum_local_day", + "precip_accum_local_day_final", + "precip_accum_local_yesterday_final", + "precip_minutes_local_day", + "precip_minutes_local_yesterday_final", + "wind_avg", + "wind_direction", + "wind_gust", + "wind_lull", + "solar_radiation", + "uv", + "brightness", + "lightning_strike_last_epoch", + "lightning_strike_last_distance", + "lightning_strike_count", + "lightning_strike_count_last_1hr", + "lightning_strike_count_last_3hr", + "feels_like", + "heat_index", + "wind_chill", + "dew_point", + "wet_bulb_temperature", + "wet_bulb_globe_temperature", + "delta_t", + "air_density" + ], + "public_name": "My Home Station", + "station_id": 24432, + "station_name": "My Home Station", + "station_units": { + "units_direction": "degrees", + "units_distance": "mi", + "units_other": "metric", + "units_precip": "in", + "units_pressure": "hpa", + "units_temp": "f", + "units_wind": "bft" + }, + "status": { + "status_code": 0, + "status_message": "SUCCESS" + }, + "timezone": "America/Denver" +} diff --git a/tests/components/weatherflow_cloud/fixtures/stations.json b/tests/components/weatherflow_cloud/fixtures/stations.json new file mode 100644 index 00000000000..e0ca96bd240 --- /dev/null +++ b/tests/components/weatherflow_cloud/fixtures/stations.json @@ -0,0 +1,132 @@ +{ + "stations": [ + { + "created_epoch": 1658343273, + "devices": [ + { + "device_id": 7654321, + "device_meta": { + "agl": 1.8288, + "environment": "indoor", + "name": "HB-00068123", + "wifi_network_name": "" + }, + "device_type": "HB", + "firmware_revision": "177", + "hardware_revision": "1", + "location_id": 24432, + "serial_number": "HB-00068123" + }, + { + "device_id": 123456, + "device_meta": { + "agl": 1.8288, + "environment": "outdoor", + "name": "ST-11084623", + "wifi_network_name": "" + }, + "device_settings": { + "show_precip_final": true + }, + "device_type": "ST", + "firmware_revision": "172", + "hardware_revision": "1", + "location_id": 24432, + "serial_number": "ST-11084623" + } + ], + "is_local_mode": false, + "last_modified_epoch": 1658344464, + "latitude": 43.94962, + "location_id": 24432, + "longitude": -102.86831, + "name": "My Home Station", + "public_name": "My Home Station", + "station_id": 24432, + "station_items": [ + { + "device_id": 123456, + "item": "air_temperature_humidity", + "location_id": 24432, + "location_item_id": 657904, + "sort": 0, + "station_id": 24432, + "station_item_id": 657904 + }, + { + "device_id": 123456, + "item": "barometric_pressure", + "location_id": 24432, + "location_item_id": 657906, + "sort": 3, + "station_id": 24432, + "station_item_id": 657906 + }, + { + "device_id": 7654321, + "item": "diagnostics", + "location_id": 24432, + "location_item_id": 657912, + "station_id": 24432, + "station_item_id": 657912 + }, + { + "device_id": 123456, + "item": "diagnostics", + "location_id": 24432, + "location_item_id": 657913, + "sort": 6, + "station_id": 24432, + "station_item_id": 657913 + }, + { + "device_id": 123456, + "item": "light", + "location_id": 24432, + "location_item_id": 657908, + "sort": 2, + "station_id": 24432, + "station_item_id": 657908 + }, + { + "device_id": 123456, + "item": "lightning", + "location_id": 24432, + "location_item_id": 657905, + "sort": 4, + "station_id": 24432, + "station_item_id": 657905 + }, + { + "device_id": 123456, + "item": "rain", + "location_id": 24432, + "location_item_id": 657907, + "sort": 5, + "station_id": 24432, + "station_item_id": 657907 + }, + { + "device_id": 123456, + "item": "wind", + "location_id": 24432, + "location_item_id": 657909, + "sort": 1, + "station_id": 24432, + "station_item_id": 657909 + } + ], + "station_meta": { + "elevation": 2063.150146484375, + "share_with_wf": true, + "share_with_wu": true + }, + "timezone": "America/Denver", + "timezone_offset_minutes": -420 + } + ], + "status": { + "status_code": 0, + "status_message": "SUCCESS" + } +} diff --git a/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr b/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..95be86664a2 --- /dev/null +++ b/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr @@ -0,0 +1,806 @@ +# serializer version: 1 +# name: test_all_entities[sensor.my_home_station_air_density-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_air_density', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 5, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Air density', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_density', + 'unique_id': '24432_air_density', + 'unit_of_measurement': 'kg/m³', + }) +# --- +# name: test_all_entities[sensor.my_home_station_air_density-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station Air density', + 'state_class': , + 'unit_of_measurement': 'kg/m³', + }), + 'context': , + 'entity_id': 'sensor.my_home_station_air_density', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.96139', + }) +# --- +# name: test_all_entities[sensor.my_home_station_dew_point-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_dew_point', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Dew point', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dew_point', + 'unique_id': '24432_dew_point', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_dew_point-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Dew point', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_dew_point', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-10.4', + }) +# --- +# name: test_all_entities[sensor.my_home_station_feels_like-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_feels_like', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Feels like', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'feels_like', + 'unique_id': '24432_feels_like', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_feels_like-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Feels like', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_feels_like', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- +# name: test_all_entities[sensor.my_home_station_heat_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_heat_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heat index', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heat_index', + 'unique_id': '24432_heat_index', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_heat_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Heat index', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_heat_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_count', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lightning count', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_count', + 'unique_id': '24432_lightning_strike_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station Lightning count', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_count', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count_last_1_hr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_count_last_1_hr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lightning count last 1 hr', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_count_last_1hr', + 'unique_id': '24432_lightning_strike_count_last_1hr', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count_last_1_hr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station Lightning count last 1 hr', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_count_last_1_hr', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count_last_3_hr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_count_last_3_hr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lightning count last 3 hr', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_count_last_3hr', + 'unique_id': '24432_lightning_strike_count_last_3hr', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count_last_3_hr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station Lightning count last 3 hr', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_count_last_3_hr', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_last_distance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_last_distance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lightning last distance', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_last_distance', + 'unique_id': '24432_lightning_strike_last_distance', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_last_distance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'distance', + 'friendly_name': 'My Home Station Lightning last distance', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_last_distance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_last_strike-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_last_strike', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lightning last strike', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_last_epoch', + 'unique_id': '24432_lightning_strike_last_epoch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_last_strike-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'timestamp', + 'friendly_name': 'My Home Station Lightning last strike', + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_last_strike', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-02-07T23:01:15+00:00', + }) +# --- +# name: test_all_entities[sensor.my_home_station_pressure_barometric-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_pressure_barometric', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure barometric', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'barometric_pressure', + 'unique_id': '24432_barometric_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_pressure_barometric-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'atmospheric_pressure', + 'friendly_name': 'My Home Station Pressure barometric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_pressure_barometric', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '782.8', + }) +# --- +# name: test_all_entities[sensor.my_home_station_pressure_sea_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_pressure_sea_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure sea level', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sea_level_pressure', + 'unique_id': '24432_sea_level_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_pressure_sea_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'atmospheric_pressure', + 'friendly_name': 'My Home Station Pressure sea level', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_pressure_sea_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1006.2', + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_temperature', + 'unique_id': '24432_air_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- +# name: test_all_entities[sensor.my_home_station_wet_bulb_globe_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_wet_bulb_globe_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wet bulb globe temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wet_bulb_globe_temperature', + 'unique_id': '24432_wet_bulb_globe_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_wet_bulb_globe_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Wet bulb globe temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_wet_bulb_globe_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.6', + }) +# --- +# name: test_all_entities[sensor.my_home_station_wet_bulb_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_wet_bulb_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wet bulb temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wet_bulb_temperature', + 'unique_id': '24432_wet_bulb_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_wet_bulb_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Wet bulb temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_wet_bulb_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.1', + }) +# --- +# name: test_all_entities[sensor.my_home_station_wind_chill-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_wind_chill', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind chill', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wind_chill', + 'unique_id': '24432_wind_chill', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_wind_chill-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Wind chill', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_wind_chill', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- diff --git a/tests/components/weatherflow_cloud/snapshots/test_weather.ambr b/tests/components/weatherflow_cloud/snapshots/test_weather.ambr new file mode 100644 index 00000000000..569b744529c --- /dev/null +++ b/tests/components/weatherflow_cloud/snapshots/test_weather.ambr @@ -0,0 +1,62 @@ +# serializer version: 1 +# name: test_weather[weather.my_home_station-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'weather', + 'entity_category': None, + 'entity_id': 'weather.my_home_station', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'weatherflow_forecast_24432', + 'unit_of_measurement': None, + }) +# --- +# name: test_weather[weather.my_home_station-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'dew_point': -13.0, + 'friendly_name': 'My Home Station', + 'humidity': 27, + 'precipitation_unit': , + 'pressure': 795.8, + 'pressure_unit': , + 'supported_features': , + 'temperature': 4.0, + 'temperature_unit': , + 'uv_index': 2, + 'visibility_unit': , + 'wind_bearing': 40.0, + 'wind_gust_speed': 14.4, + 'wind_speed': 7.2, + 'wind_speed_unit': , + }), + 'context': , + 'entity_id': 'weather.my_home_station', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'sunny', + }) +# --- diff --git a/tests/components/weatherflow_cloud/test_sensor.py b/tests/components/weatherflow_cloud/test_sensor.py new file mode 100644 index 00000000000..35ce098f5a7 --- /dev/null +++ b/tests/components/weatherflow_cloud/test_sensor.py @@ -0,0 +1,29 @@ +"""Tests for the WeatherFlow Cloud sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_api: AsyncMock, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.weatherflow_cloud.PLATFORMS", [Platform.SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/weatherflow_cloud/test_weather.py b/tests/components/weatherflow_cloud/test_weather.py new file mode 100644 index 00000000000..04da96df423 --- /dev/null +++ b/tests/components/weatherflow_cloud/test_weather.py @@ -0,0 +1,29 @@ +"""Tests for the WeatherFlow Cloud weather platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_weather( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_api: AsyncMock, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.weatherflow_cloud.PLATFORMS", [Platform.WEATHER] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/weatherkit/conftest.py b/tests/components/weatherkit/conftest.py index d4b849115f6..14d96d28347 100644 --- a/tests/components/weatherkit/conftest.py +++ b/tests/components/weatherkit/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Apple WeatherKit tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/weatherkit/snapshots/test_weather.ambr b/tests/components/weatherkit/snapshots/test_weather.ambr index 1fbe5389e98..f6fa2f1514b 100644 --- a/tests/components/weatherkit/snapshots/test_weather.ambr +++ b/tests/components/weatherkit/snapshots/test_weather.ambr @@ -1,294 +1,4 @@ # serializer version: 1 -# name: test_daily_forecast - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 28.6, - 'templow': 21.2, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-09T15:00:00Z', - 'precipitation': 3.6, - 'precipitation_probability': 45.0, - 'temperature': 30.6, - 'templow': 21.0, - 'uv_index': 6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-11T15:00:00Z', - 'precipitation': 0.7, - 'precipitation_probability': 47.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 5, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-12T15:00:00Z', - 'precipitation': 7.7, - 'precipitation_probability': 37.0, - 'temperature': 30.4, - 'templow': 22.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-13T15:00:00Z', - 'precipitation': 0.6, - 'precipitation_probability': 45.0, - 'temperature': 31.0, - 'templow': 22.6, - 'uv_index': 6, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'temperature': 31.5, - 'templow': 22.4, - 'uv_index': 7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2023-09-15T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 31.8, - 'templow': 23.3, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-16T15:00:00Z', - 'precipitation': 5.3, - 'precipitation_probability': 35.0, - 'temperature': 30.7, - 'templow': 23.2, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-17T15:00:00Z', - 'precipitation': 2.1, - 'precipitation_probability': 49.0, - 'temperature': 28.1, - 'templow': 22.5, - 'uv_index': 6, - }), - ]), - }) -# --- -# name: test_daily_forecast[forecast] - dict({ - 'weather.home': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 28.6, - 'templow': 21.2, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-09T15:00:00Z', - 'precipitation': 3.6, - 'precipitation_probability': 45.0, - 'temperature': 30.6, - 'templow': 21.0, - 'uv_index': 6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-11T15:00:00Z', - 'precipitation': 0.7, - 'precipitation_probability': 47.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 5, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-12T15:00:00Z', - 'precipitation': 7.7, - 'precipitation_probability': 37.0, - 'temperature': 30.4, - 'templow': 22.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-13T15:00:00Z', - 'precipitation': 0.6, - 'precipitation_probability': 45.0, - 'temperature': 31.0, - 'templow': 22.6, - 'uv_index': 6, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'temperature': 31.5, - 'templow': 22.4, - 'uv_index': 7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2023-09-15T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 31.8, - 'templow': 23.3, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-16T15:00:00Z', - 'precipitation': 5.3, - 'precipitation_probability': 35.0, - 'temperature': 30.7, - 'templow': 23.2, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-17T15:00:00Z', - 'precipitation': 2.1, - 'precipitation_probability': 49.0, - 'temperature': 28.1, - 'templow': 22.5, - 'uv_index': 6, - }), - ]), - }), - }) -# --- -# name: test_daily_forecast[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 28.6, - 'templow': 21.2, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-09T15:00:00Z', - 'precipitation': 3.6, - 'precipitation_probability': 45.0, - 'temperature': 30.6, - 'templow': 21.0, - 'uv_index': 6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-11T15:00:00Z', - 'precipitation': 0.7, - 'precipitation_probability': 47.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 5, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-12T15:00:00Z', - 'precipitation': 7.7, - 'precipitation_probability': 37.0, - 'temperature': 30.4, - 'templow': 22.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-13T15:00:00Z', - 'precipitation': 0.6, - 'precipitation_probability': 45.0, - 'temperature': 31.0, - 'templow': 22.6, - 'uv_index': 6, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'temperature': 31.5, - 'templow': 22.4, - 'uv_index': 7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2023-09-15T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 31.8, - 'templow': 23.3, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-16T15:00:00Z', - 'precipitation': 5.3, - 'precipitation_probability': 35.0, - 'temperature': 30.7, - 'templow': 23.2, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-17T15:00:00Z', - 'precipitation': 2.1, - 'precipitation_probability': 49.0, - 'temperature': 28.1, - 'templow': 22.5, - 'uv_index': 6, - }), - ]), - }) -# --- # name: test_daily_forecast[get_forecasts] dict({ 'weather.home': dict({ @@ -387,11978 +97,6 @@ }), }) # --- -# name: test_hourly_forecast - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T14:00:00Z', - 'dew_point': 21.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 264, - 'wind_gust_speed': 13.44, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 80.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 261, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.64, - }), - dict({ - 'apparent_temperature': 23.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.12, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 252, - 'wind_gust_speed': 11.15, - 'wind_speed': 6.14, - }), - dict({ - 'apparent_temperature': 23.5, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.7, - 'uv_index': 0, - 'wind_bearing': 248, - 'wind_gust_speed': 11.57, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T18:00:00Z', - 'dew_point': 20.8, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.05, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 12.42, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 23.0, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.3, - 'uv_index': 0, - 'wind_bearing': 224, - 'wind_gust_speed': 11.3, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T20:00:00Z', - 'dew_point': 20.4, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.31, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 221, - 'wind_gust_speed': 10.57, - 'wind_speed': 5.13, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T21:00:00Z', - 'dew_point': 20.5, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.55, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 10.63, - 'wind_speed': 5.7, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.79, - 'temperature': 22.8, - 'uv_index': 1, - 'wind_bearing': 258, - 'wind_gust_speed': 10.47, - 'wind_speed': 5.22, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T23:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.95, - 'temperature': 24.0, - 'uv_index': 2, - 'wind_bearing': 282, - 'wind_gust_speed': 12.74, - 'wind_speed': 5.71, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T00:00:00Z', - 'dew_point': 21.5, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.35, - 'temperature': 25.1, - 'uv_index': 3, - 'wind_bearing': 294, - 'wind_gust_speed': 13.87, - 'wind_speed': 6.53, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T01:00:00Z', - 'dew_point': 21.8, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 26.5, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 16.04, - 'wind_speed': 6.54, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T02:00:00Z', - 'dew_point': 22.0, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.23, - 'temperature': 27.6, - 'uv_index': 6, - 'wind_bearing': 314, - 'wind_gust_speed': 18.1, - 'wind_speed': 7.32, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T03:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.86, - 'temperature': 28.3, - 'uv_index': 6, - 'wind_bearing': 317, - 'wind_gust_speed': 20.77, - 'wind_speed': 9.1, - }), - dict({ - 'apparent_temperature': 31.5, - 'cloud_coverage': 69.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T04:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.65, - 'temperature': 28.6, - 'uv_index': 6, - 'wind_bearing': 311, - 'wind_gust_speed': 21.27, - 'wind_speed': 10.21, - }), - dict({ - 'apparent_temperature': 31.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T05:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.48, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 317, - 'wind_gust_speed': 19.62, - 'wind_speed': 10.53, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.54, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 335, - 'wind_gust_speed': 18.98, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.76, - 'temperature': 27.1, - 'uv_index': 2, - 'wind_bearing': 338, - 'wind_gust_speed': 17.04, - 'wind_speed': 7.75, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.05, - 'temperature': 26.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 14.75, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 344, - 'wind_gust_speed': 10.43, - 'wind_speed': 5.2, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.73, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 6.95, - 'wind_speed': 3.59, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 326, - 'wind_gust_speed': 5.27, - 'wind_speed': 2.1, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.52, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 5.48, - 'wind_speed': 0.93, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T13:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 188, - 'wind_gust_speed': 4.44, - 'wind_speed': 1.79, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 4.49, - 'wind_speed': 2.19, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.21, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 179, - 'wind_gust_speed': 5.32, - 'wind_speed': 2.65, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 173, - 'wind_gust_speed': 5.81, - 'wind_speed': 3.2, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.88, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 5.53, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.94, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 6.09, - 'wind_speed': 3.36, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T19:00:00Z', - 'dew_point': 20.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.96, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 6.83, - 'wind_speed': 3.71, - }), - dict({ - 'apparent_temperature': 22.5, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T20:00:00Z', - 'dew_point': 20.0, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 21.0, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 7.98, - 'wind_speed': 4.27, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T21:00:00Z', - 'dew_point': 20.2, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.61, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 8.4, - 'wind_speed': 4.69, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.87, - 'temperature': 23.1, - 'uv_index': 1, - 'wind_bearing': 150, - 'wind_gust_speed': 7.66, - 'wind_speed': 4.33, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 123, - 'wind_gust_speed': 9.63, - 'wind_speed': 3.91, - }), - dict({ - 'apparent_temperature': 30.4, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 105, - 'wind_gust_speed': 12.59, - 'wind_speed': 3.96, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T01:00:00Z', - 'dew_point': 22.9, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.79, - 'temperature': 28.9, - 'uv_index': 5, - 'wind_bearing': 99, - 'wind_gust_speed': 14.17, - 'wind_speed': 4.06, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T02:00:00Z', - 'dew_point': 22.9, - 'humidity': 66, - 'precipitation': 0.3, - 'precipitation_probability': 7.000000000000001, - 'pressure': 1011.29, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 93, - 'wind_gust_speed': 17.75, - 'wind_speed': 4.87, - }), - dict({ - 'apparent_temperature': 34.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T03:00:00Z', - 'dew_point': 23.1, - 'humidity': 64, - 'precipitation': 0.3, - 'precipitation_probability': 11.0, - 'pressure': 1010.78, - 'temperature': 30.6, - 'uv_index': 6, - 'wind_bearing': 78, - 'wind_gust_speed': 17.43, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T04:00:00Z', - 'dew_point': 23.2, - 'humidity': 66, - 'precipitation': 0.4, - 'precipitation_probability': 15.0, - 'pressure': 1010.37, - 'temperature': 30.3, - 'uv_index': 5, - 'wind_bearing': 60, - 'wind_gust_speed': 15.24, - 'wind_speed': 4.9, - }), - dict({ - 'apparent_temperature': 33.7, - 'cloud_coverage': 79.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T05:00:00Z', - 'dew_point': 23.3, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 17.0, - 'pressure': 1010.09, - 'temperature': 30.0, - 'uv_index': 4, - 'wind_bearing': 80, - 'wind_gust_speed': 13.53, - 'wind_speed': 5.98, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T06:00:00Z', - 'dew_point': 23.4, - 'humidity': 70, - 'precipitation': 1.0, - 'precipitation_probability': 17.0, - 'pressure': 1010.0, - 'temperature': 29.5, - 'uv_index': 3, - 'wind_bearing': 83, - 'wind_gust_speed': 12.55, - 'wind_speed': 6.84, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 88.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 73, - 'precipitation': 0.4, - 'precipitation_probability': 16.0, - 'pressure': 1010.27, - 'temperature': 28.7, - 'uv_index': 2, - 'wind_bearing': 90, - 'wind_gust_speed': 10.16, - 'wind_speed': 6.07, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T08:00:00Z', - 'dew_point': 23.2, - 'humidity': 77, - 'precipitation': 0.5, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.71, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 101, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.82, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 93.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T09:00:00Z', - 'dew_point': 23.2, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.9, - 'temperature': 26.5, - 'uv_index': 0, - 'wind_bearing': 128, - 'wind_gust_speed': 8.89, - 'wind_speed': 4.95, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T10:00:00Z', - 'dew_point': 23.0, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.12, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 134, - 'wind_gust_speed': 10.03, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.43, - 'temperature': 25.1, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 12.4, - 'wind_speed': 5.41, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T12:00:00Z', - 'dew_point': 22.5, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.58, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 16.36, - 'wind_speed': 6.31, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T13:00:00Z', - 'dew_point': 22.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 19.66, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.4, - 'temperature': 24.3, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 21.15, - 'wind_speed': 7.46, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'dew_point': 22.0, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.26, - 'wind_speed': 7.84, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.01, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 23.53, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T17:00:00Z', - 'dew_point': 21.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.78, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 22.83, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T18:00:00Z', - 'dew_point': 21.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.69, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.7, - 'wind_speed': 8.7, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T19:00:00Z', - 'dew_point': 21.4, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.77, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 24.24, - 'wind_speed': 8.74, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.89, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 23.99, - 'wind_speed': 8.81, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T21:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.1, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 25.55, - 'wind_speed': 9.05, - }), - dict({ - 'apparent_temperature': 27.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 24.6, - 'uv_index': 1, - 'wind_bearing': 140, - 'wind_gust_speed': 29.08, - 'wind_speed': 10.37, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.36, - 'temperature': 25.9, - 'uv_index': 2, - 'wind_bearing': 140, - 'wind_gust_speed': 34.13, - 'wind_speed': 12.56, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T00:00:00Z', - 'dew_point': 22.3, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 27.2, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 38.2, - 'wind_speed': 15.65, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T01:00:00Z', - 'dew_point': 22.3, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 37.55, - 'wind_speed': 15.78, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 143, - 'wind_gust_speed': 35.86, - 'wind_speed': 15.41, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T03:00:00Z', - 'dew_point': 22.5, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.61, - 'temperature': 30.3, - 'uv_index': 6, - 'wind_bearing': 141, - 'wind_gust_speed': 35.88, - 'wind_speed': 15.51, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T04:00:00Z', - 'dew_point': 22.6, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.36, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 140, - 'wind_gust_speed': 35.99, - 'wind_speed': 15.75, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T05:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.11, - 'temperature': 30.1, - 'uv_index': 4, - 'wind_bearing': 137, - 'wind_gust_speed': 33.61, - 'wind_speed': 15.36, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T06:00:00Z', - 'dew_point': 22.5, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.98, - 'temperature': 30.0, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 32.61, - 'wind_speed': 14.98, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.13, - 'temperature': 29.2, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 28.1, - 'wind_speed': 13.88, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 28.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 24.22, - 'wind_speed': 13.02, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T09:00:00Z', - 'dew_point': 21.9, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.81, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 22.5, - 'wind_speed': 11.94, - }), - dict({ - 'apparent_temperature': 28.8, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T10:00:00Z', - 'dew_point': 21.7, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 21.47, - 'wind_speed': 11.25, - }), - dict({ - 'apparent_temperature': 28.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.71, - 'wind_speed': 12.39, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.67, - 'wind_speed': 12.83, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T13:00:00Z', - 'dew_point': 21.7, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 23.34, - 'wind_speed': 12.62, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.83, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.9, - 'wind_speed': 12.07, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T15:00:00Z', - 'dew_point': 21.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.74, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.01, - 'wind_speed': 11.19, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T16:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.56, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 21.29, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T17:00:00Z', - 'dew_point': 21.5, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.35, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 20.52, - 'wind_speed': 10.5, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 20.04, - 'wind_speed': 10.51, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T19:00:00Z', - 'dew_point': 21.3, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 12.0, - 'pressure': 1011.37, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 18.07, - 'wind_speed': 10.13, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T20:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.2, - 'precipitation_probability': 13.0, - 'pressure': 1011.53, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 16.86, - 'wind_speed': 10.34, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T21:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.71, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 16.66, - 'wind_speed': 10.68, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T22:00:00Z', - 'dew_point': 21.9, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 24.4, - 'uv_index': 1, - 'wind_bearing': 137, - 'wind_gust_speed': 17.21, - 'wind_speed': 10.61, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.05, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 19.23, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 29.5, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.07, - 'temperature': 26.6, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 20.61, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 82.0, - 'condition': 'rainy', - 'datetime': '2023-09-12T01:00:00Z', - 'dew_point': 23.1, - 'humidity': 75, - 'precipitation': 0.2, - 'precipitation_probability': 16.0, - 'pressure': 1011.89, - 'temperature': 27.9, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 23.35, - 'wind_speed': 11.98, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 29.0, - 'uv_index': 5, - 'wind_bearing': 143, - 'wind_gust_speed': 26.45, - 'wind_speed': 13.01, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.15, - 'temperature': 29.8, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 28.95, - 'wind_speed': 13.9, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.79, - 'temperature': 30.2, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 27.9, - 'wind_speed': 13.95, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T05:00:00Z', - 'dew_point': 23.1, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.43, - 'temperature': 30.4, - 'uv_index': 4, - 'wind_bearing': 140, - 'wind_gust_speed': 26.53, - 'wind_speed': 13.78, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T06:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.21, - 'temperature': 30.1, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 24.56, - 'wind_speed': 13.74, - }), - dict({ - 'apparent_temperature': 32.0, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.26, - 'temperature': 29.1, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 22.78, - 'wind_speed': 13.21, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.51, - 'temperature': 28.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 19.92, - 'wind_speed': 12.0, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T09:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.8, - 'temperature': 27.2, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 17.65, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T10:00:00Z', - 'dew_point': 21.4, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 15.87, - 'wind_speed': 10.23, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T11:00:00Z', - 'dew_point': 21.3, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1011.79, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 13.9, - 'wind_speed': 9.39, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T12:00:00Z', - 'dew_point': 21.2, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 47.0, - 'pressure': 1012.12, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.32, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1012.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.18, - 'wind_speed': 8.59, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T14:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.09, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.84, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T15:00:00Z', - 'dew_point': 21.3, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.99, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.93, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T16:00:00Z', - 'dew_point': 21.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 16.74, - 'wind_speed': 9.49, - }), - dict({ - 'apparent_temperature': 24.7, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T17:00:00Z', - 'dew_point': 20.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.75, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 17.45, - 'wind_speed': 9.12, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.04, - 'wind_speed': 8.68, - }), - dict({ - 'apparent_temperature': 24.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 16.8, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T20:00:00Z', - 'dew_point': 20.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.23, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.35, - 'wind_speed': 8.36, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T21:00:00Z', - 'dew_point': 20.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.49, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 14.09, - 'wind_speed': 7.77, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T22:00:00Z', - 'dew_point': 21.0, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.72, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 152, - 'wind_gust_speed': 14.04, - 'wind_speed': 7.25, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T23:00:00Z', - 'dew_point': 21.4, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 25.5, - 'uv_index': 2, - 'wind_bearing': 149, - 'wind_gust_speed': 15.31, - 'wind_speed': 7.14, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-13T00:00:00Z', - 'dew_point': 21.8, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 27.1, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 16.42, - 'wind_speed': 6.89, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T01:00:00Z', - 'dew_point': 22.0, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.65, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 137, - 'wind_gust_speed': 18.64, - 'wind_speed': 6.65, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T02:00:00Z', - 'dew_point': 21.9, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.26, - 'temperature': 29.4, - 'uv_index': 5, - 'wind_bearing': 128, - 'wind_gust_speed': 21.69, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 33.0, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T03:00:00Z', - 'dew_point': 21.9, - 'humidity': 62, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.88, - 'temperature': 30.1, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 23.41, - 'wind_speed': 7.33, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T04:00:00Z', - 'dew_point': 22.0, - 'humidity': 61, - 'precipitation': 0.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.55, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 56, - 'wind_gust_speed': 23.1, - 'wind_speed': 8.09, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 61, - 'precipitation': 1.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.29, - 'temperature': 30.2, - 'uv_index': 4, - 'wind_bearing': 20, - 'wind_gust_speed': 21.81, - 'wind_speed': 9.46, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T06:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 2.3, - 'precipitation_probability': 11.0, - 'pressure': 1011.17, - 'temperature': 29.7, - 'uv_index': 3, - 'wind_bearing': 20, - 'wind_gust_speed': 19.72, - 'wind_speed': 9.8, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 69.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T07:00:00Z', - 'dew_point': 22.4, - 'humidity': 68, - 'precipitation': 1.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.32, - 'temperature': 28.8, - 'uv_index': 1, - 'wind_bearing': 18, - 'wind_gust_speed': 17.55, - 'wind_speed': 9.23, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T08:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.6, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 27, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.05, - }), - dict({ - 'apparent_temperature': 29.4, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T09:00:00Z', - 'dew_point': 23.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 32, - 'wind_gust_speed': 12.17, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T10:00:00Z', - 'dew_point': 22.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.3, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 69, - 'wind_gust_speed': 11.64, - 'wind_speed': 6.69, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.71, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.23, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.96, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.47, - 'wind_speed': 5.73, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T13:00:00Z', - 'dew_point': 22.3, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.03, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 13.57, - 'wind_speed': 5.66, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.99, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 15.07, - 'wind_speed': 5.83, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T15:00:00Z', - 'dew_point': 22.2, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.95, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 16.06, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T16:00:00Z', - 'dew_point': 22.0, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.9, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 16.05, - 'wind_speed': 5.75, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T17:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.52, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T18:00:00Z', - 'dew_point': 21.8, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.87, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.01, - 'wind_speed': 5.32, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 22.8, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.39, - 'wind_speed': 5.33, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.22, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.79, - 'wind_speed': 5.43, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.12, - 'wind_speed': 5.52, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T22:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.59, - 'temperature': 24.3, - 'uv_index': 1, - 'wind_bearing': 147, - 'wind_gust_speed': 16.14, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T23:00:00Z', - 'dew_point': 22.4, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.74, - 'temperature': 25.7, - 'uv_index': 2, - 'wind_bearing': 146, - 'wind_gust_speed': 19.09, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.78, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 143, - 'wind_gust_speed': 21.6, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T01:00:00Z', - 'dew_point': 23.2, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.61, - 'temperature': 28.7, - 'uv_index': 5, - 'wind_bearing': 138, - 'wind_gust_speed': 23.36, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T02:00:00Z', - 'dew_point': 23.2, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.32, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 24.72, - 'wind_speed': 4.99, - }), - dict({ - 'apparent_temperature': 34.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T03:00:00Z', - 'dew_point': 23.3, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.04, - 'temperature': 30.7, - 'uv_index': 6, - 'wind_bearing': 354, - 'wind_gust_speed': 25.23, - 'wind_speed': 4.74, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.77, - 'temperature': 31.0, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 24.6, - 'wind_speed': 4.79, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 60.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T05:00:00Z', - 'dew_point': 23.2, - 'humidity': 64, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1012.53, - 'temperature': 30.7, - 'uv_index': 5, - 'wind_bearing': 336, - 'wind_gust_speed': 23.28, - 'wind_speed': 5.07, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 59.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T06:00:00Z', - 'dew_point': 23.1, - 'humidity': 66, - 'precipitation': 0.2, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1012.49, - 'temperature': 30.2, - 'uv_index': 3, - 'wind_bearing': 336, - 'wind_gust_speed': 22.05, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 32.9, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T07:00:00Z', - 'dew_point': 23.0, - 'humidity': 68, - 'precipitation': 0.2, - 'precipitation_probability': 40.0, - 'pressure': 1012.73, - 'temperature': 29.5, - 'uv_index': 2, - 'wind_bearing': 339, - 'wind_gust_speed': 21.18, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T08:00:00Z', - 'dew_point': 22.8, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 45.0, - 'pressure': 1013.16, - 'temperature': 28.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 20.35, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T09:00:00Z', - 'dew_point': 22.5, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1013.62, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 347, - 'wind_gust_speed': 19.42, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T10:00:00Z', - 'dew_point': 22.4, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.09, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 348, - 'wind_gust_speed': 18.19, - 'wind_speed': 5.31, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T11:00:00Z', - 'dew_point': 22.4, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.56, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 177, - 'wind_gust_speed': 16.79, - 'wind_speed': 4.28, - }), - dict({ - 'apparent_temperature': 27.5, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.87, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 15.61, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T13:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.91, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 14.7, - 'wind_speed': 4.11, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T14:00:00Z', - 'dew_point': 21.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.8, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 13.81, - 'wind_speed': 4.97, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T15:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.66, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 170, - 'wind_gust_speed': 12.88, - 'wind_speed': 5.57, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T16:00:00Z', - 'dew_point': 21.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.54, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 12.0, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T17:00:00Z', - 'dew_point': 21.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.45, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 11.43, - 'wind_speed': 5.48, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 44.0, - 'pressure': 1014.45, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 11.42, - 'wind_speed': 5.38, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T19:00:00Z', - 'dew_point': 21.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'pressure': 1014.63, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.15, - 'wind_speed': 5.39, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T20:00:00Z', - 'dew_point': 21.8, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 51.0, - 'pressure': 1014.91, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 13.54, - 'wind_speed': 5.45, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T21:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 42.0, - 'pressure': 1015.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 15.48, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T22:00:00Z', - 'dew_point': 22.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 28.999999999999996, - 'pressure': 1015.4, - 'temperature': 25.7, - 'uv_index': 1, - 'wind_bearing': 158, - 'wind_gust_speed': 17.86, - 'wind_speed': 5.84, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 77, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.54, - 'temperature': 27.2, - 'uv_index': 2, - 'wind_bearing': 155, - 'wind_gust_speed': 20.19, - 'wind_speed': 6.09, - }), - dict({ - 'apparent_temperature': 32.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T00:00:00Z', - 'dew_point': 23.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.55, - 'temperature': 28.6, - 'uv_index': 4, - 'wind_bearing': 152, - 'wind_gust_speed': 21.83, - 'wind_speed': 6.42, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T01:00:00Z', - 'dew_point': 23.5, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.35, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 144, - 'wind_gust_speed': 22.56, - 'wind_speed': 6.91, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.0, - 'temperature': 30.4, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.83, - 'wind_speed': 7.47, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.62, - 'temperature': 30.9, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.98, - 'wind_speed': 7.95, - }), - dict({ - 'apparent_temperature': 35.4, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T04:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 31.3, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 23.21, - 'wind_speed': 8.44, - }), - dict({ - 'apparent_temperature': 35.6, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T05:00:00Z', - 'dew_point': 23.7, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.95, - 'temperature': 31.5, - 'uv_index': 5, - 'wind_bearing': 344, - 'wind_gust_speed': 23.46, - 'wind_speed': 8.95, - }), - dict({ - 'apparent_temperature': 35.1, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T06:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.83, - 'temperature': 31.1, - 'uv_index': 3, - 'wind_bearing': 347, - 'wind_gust_speed': 23.64, - 'wind_speed': 9.13, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.96, - 'temperature': 30.3, - 'uv_index': 2, - 'wind_bearing': 350, - 'wind_gust_speed': 23.66, - 'wind_speed': 8.78, - }), - dict({ - 'apparent_temperature': 32.4, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T08:00:00Z', - 'dew_point': 23.1, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 29.0, - 'uv_index': 0, - 'wind_bearing': 356, - 'wind_gust_speed': 23.51, - 'wind_speed': 8.13, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T09:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.61, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 3, - 'wind_gust_speed': 23.21, - 'wind_speed': 7.48, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T10:00:00Z', - 'dew_point': 22.8, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.02, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 20, - 'wind_gust_speed': 22.68, - 'wind_speed': 6.83, - }), - dict({ - 'apparent_temperature': 29.2, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.43, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 129, - 'wind_gust_speed': 22.04, - 'wind_speed': 6.1, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T12:00:00Z', - 'dew_point': 22.7, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.71, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.64, - 'wind_speed': 5.6, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T13:00:00Z', - 'dew_point': 23.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.52, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 16.35, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T14:00:00Z', - 'dew_point': 22.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.37, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 17.11, - 'wind_speed': 5.79, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.21, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 17.32, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 16.6, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T17:00:00Z', - 'dew_point': 22.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.95, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 219, - 'wind_gust_speed': 15.52, - 'wind_speed': 4.62, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T18:00:00Z', - 'dew_point': 22.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.88, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 216, - 'wind_gust_speed': 14.64, - 'wind_speed': 4.32, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T19:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.91, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 198, - 'wind_gust_speed': 14.06, - 'wind_speed': 4.73, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T20:00:00Z', - 'dew_point': 22.4, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.99, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 189, - 'wind_gust_speed': 13.7, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T21:00:00Z', - 'dew_point': 22.5, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 13.77, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.12, - 'temperature': 25.5, - 'uv_index': 1, - 'wind_bearing': 179, - 'wind_gust_speed': 14.38, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 52.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.13, - 'temperature': 26.9, - 'uv_index': 2, - 'wind_bearing': 170, - 'wind_gust_speed': 15.2, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.04, - 'temperature': 28.0, - 'uv_index': 4, - 'wind_bearing': 155, - 'wind_gust_speed': 15.85, - 'wind_speed': 4.76, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 24.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T01:00:00Z', - 'dew_point': 22.6, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.52, - 'temperature': 29.2, - 'uv_index': 6, - 'wind_bearing': 110, - 'wind_gust_speed': 16.27, - 'wind_speed': 6.81, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 16.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.01, - 'temperature': 30.2, - 'uv_index': 8, - 'wind_bearing': 30, - 'wind_gust_speed': 16.55, - 'wind_speed': 6.86, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T03:00:00Z', - 'dew_point': 22.0, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.45, - 'temperature': 31.1, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.52, - 'wind_speed': 6.8, - }), - dict({ - 'apparent_temperature': 34.7, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T04:00:00Z', - 'dew_point': 21.9, - 'humidity': 57, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 31.5, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.08, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.39, - 'temperature': 31.8, - 'uv_index': 6, - 'wind_bearing': 20, - 'wind_gust_speed': 15.48, - 'wind_speed': 6.45, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T06:00:00Z', - 'dew_point': 21.7, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.11, - 'temperature': 31.4, - 'uv_index': 4, - 'wind_bearing': 26, - 'wind_gust_speed': 15.08, - 'wind_speed': 6.43, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 7.000000000000001, - 'condition': 'sunny', - 'datetime': '2023-09-16T07:00:00Z', - 'dew_point': 21.7, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.15, - 'temperature': 30.7, - 'uv_index': 2, - 'wind_bearing': 39, - 'wind_gust_speed': 14.88, - 'wind_speed': 6.61, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.41, - 'temperature': 29.6, - 'uv_index': 0, - 'wind_bearing': 72, - 'wind_gust_speed': 14.82, - 'wind_speed': 6.95, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T09:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.75, - 'temperature': 28.5, - 'uv_index': 0, - 'wind_bearing': 116, - 'wind_gust_speed': 15.13, - 'wind_speed': 7.45, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 13.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T10:00:00Z', - 'dew_point': 22.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.13, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 16.09, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.47, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.37, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 29.3, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T12:00:00Z', - 'dew_point': 22.9, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.6, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 18.29, - 'wind_speed': 9.21, - }), - dict({ - 'apparent_temperature': 28.7, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T13:00:00Z', - 'dew_point': 23.0, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 25.7, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 18.49, - 'wind_speed': 8.96, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T14:00:00Z', - 'dew_point': 22.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.47, - 'wind_speed': 8.45, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.79, - 'wind_speed': 8.1, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.1, - 'temperature': 24.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 19.81, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T17:00:00Z', - 'dew_point': 22.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.68, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 20.96, - 'wind_speed': 8.3, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T18:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.41, - 'wind_speed': 8.24, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T19:00:00Z', - 'dew_point': 22.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 20.42, - 'wind_speed': 7.62, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T20:00:00Z', - 'dew_point': 22.6, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 18.61, - 'wind_speed': 6.66, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T21:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 17.14, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 26.0, - 'uv_index': 1, - 'wind_bearing': 161, - 'wind_gust_speed': 16.78, - 'wind_speed': 5.5, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.51, - 'temperature': 27.5, - 'uv_index': 2, - 'wind_bearing': 165, - 'wind_gust_speed': 17.21, - 'wind_speed': 5.56, - }), - dict({ - 'apparent_temperature': 31.7, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T00:00:00Z', - 'dew_point': 22.8, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 28.5, - 'uv_index': 4, - 'wind_bearing': 174, - 'wind_gust_speed': 17.96, - 'wind_speed': 6.04, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T01:00:00Z', - 'dew_point': 22.7, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.4, - 'uv_index': 6, - 'wind_bearing': 192, - 'wind_gust_speed': 19.15, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 28.999999999999996, - 'condition': 'sunny', - 'datetime': '2023-09-17T02:00:00Z', - 'dew_point': 22.8, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 30.1, - 'uv_index': 7, - 'wind_bearing': 225, - 'wind_gust_speed': 20.89, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T03:00:00Z', - 'dew_point': 22.8, - 'humidity': 63, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1009.75, - 'temperature': 30.7, - 'uv_index': 8, - 'wind_bearing': 264, - 'wind_gust_speed': 22.67, - 'wind_speed': 10.27, - }), - dict({ - 'apparent_temperature': 33.9, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T04:00:00Z', - 'dew_point': 22.5, - 'humidity': 62, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1009.18, - 'temperature': 30.5, - 'uv_index': 7, - 'wind_bearing': 293, - 'wind_gust_speed': 23.93, - 'wind_speed': 10.82, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T05:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.6, - 'precipitation_probability': 12.0, - 'pressure': 1008.71, - 'temperature': 30.1, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 24.39, - 'wind_speed': 10.72, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 64, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.46, - 'temperature': 29.6, - 'uv_index': 3, - 'wind_bearing': 312, - 'wind_gust_speed': 23.9, - 'wind_speed': 10.28, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 47.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.53, - 'temperature': 28.9, - 'uv_index': 1, - 'wind_bearing': 312, - 'wind_gust_speed': 22.3, - 'wind_speed': 9.59, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 70, - 'precipitation': 0.6, - 'precipitation_probability': 15.0, - 'pressure': 1008.82, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 19.73, - 'wind_speed': 8.58, - }), - dict({ - 'apparent_temperature': 29.6, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 74, - 'precipitation': 0.5, - 'precipitation_probability': 15.0, - 'pressure': 1009.21, - 'temperature': 27.0, - 'uv_index': 0, - 'wind_bearing': 291, - 'wind_gust_speed': 16.49, - 'wind_speed': 7.34, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 78, - 'precipitation': 0.4, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1009.65, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 12.71, - 'wind_speed': 5.91, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T11:00:00Z', - 'dew_point': 21.9, - 'humidity': 82, - 'precipitation': 0.3, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.04, - 'temperature': 25.3, - 'uv_index': 0, - 'wind_bearing': 212, - 'wind_gust_speed': 9.16, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T12:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.3, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1010.24, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 192, - 'wind_gust_speed': 7.09, - 'wind_speed': 3.62, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T13:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1010.15, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 185, - 'wind_gust_speed': 7.2, - 'wind_speed': 3.27, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 44.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T14:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1009.87, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.22, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 49.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T15:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.2, - 'precipitation_probability': 31.0, - 'pressure': 1009.56, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 180, - 'wind_gust_speed': 9.21, - 'wind_speed': 3.3, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 94, - 'precipitation': 0.2, - 'precipitation_probability': 33.0, - 'pressure': 1009.29, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 9.0, - 'wind_speed': 3.46, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T17:00:00Z', - 'dew_point': 21.7, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 35.0, - 'pressure': 1009.09, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 186, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T18:00:00Z', - 'dew_point': 21.6, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 37.0, - 'pressure': 1009.01, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 7.99, - 'wind_speed': 4.07, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.07, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 258, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.55, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T20:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.23, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 8.77, - 'wind_speed': 5.17, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 38.0, - 'pressure': 1009.47, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 318, - 'wind_gust_speed': 9.69, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 30.0, - 'pressure': 1009.77, - 'temperature': 24.2, - 'uv_index': 1, - 'wind_bearing': 324, - 'wind_gust_speed': 10.88, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 83, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.09, - 'temperature': 25.1, - 'uv_index': 2, - 'wind_bearing': 329, - 'wind_gust_speed': 12.21, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T00:00:00Z', - 'dew_point': 21.9, - 'humidity': 80, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.33, - 'temperature': 25.7, - 'uv_index': 3, - 'wind_bearing': 332, - 'wind_gust_speed': 13.52, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T01:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1007.43, - 'temperature': 27.2, - 'uv_index': 5, - 'wind_bearing': 330, - 'wind_gust_speed': 11.36, - 'wind_speed': 11.36, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T02:00:00Z', - 'dew_point': 21.6, - 'humidity': 70, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1007.05, - 'temperature': 27.5, - 'uv_index': 6, - 'wind_bearing': 332, - 'wind_gust_speed': 12.06, - 'wind_speed': 12.06, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T03:00:00Z', - 'dew_point': 21.6, - 'humidity': 69, - 'precipitation': 0.5, - 'precipitation_probability': 10.0, - 'pressure': 1006.67, - 'temperature': 27.8, - 'uv_index': 6, - 'wind_bearing': 333, - 'wind_gust_speed': 12.81, - 'wind_speed': 12.81, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T04:00:00Z', - 'dew_point': 21.5, - 'humidity': 68, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1006.28, - 'temperature': 28.0, - 'uv_index': 5, - 'wind_bearing': 335, - 'wind_gust_speed': 13.68, - 'wind_speed': 13.68, - }), - dict({ - 'apparent_temperature': 30.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T05:00:00Z', - 'dew_point': 21.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1005.89, - 'temperature': 28.1, - 'uv_index': 4, - 'wind_bearing': 336, - 'wind_gust_speed': 14.61, - 'wind_speed': 14.61, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T06:00:00Z', - 'dew_point': 21.2, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 27.0, - 'pressure': 1005.67, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 338, - 'wind_gust_speed': 15.25, - 'wind_speed': 15.25, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T07:00:00Z', - 'dew_point': 21.3, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1005.74, - 'temperature': 27.4, - 'uv_index': 1, - 'wind_bearing': 339, - 'wind_gust_speed': 15.45, - 'wind_speed': 15.45, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T08:00:00Z', - 'dew_point': 21.4, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1005.98, - 'temperature': 26.7, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.38, - 'wind_speed': 15.38, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T09:00:00Z', - 'dew_point': 21.6, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.22, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.27, - 'wind_speed': 15.27, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T10:00:00Z', - 'dew_point': 21.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.44, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 15.09, - 'wind_speed': 15.09, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T11:00:00Z', - 'dew_point': 21.7, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.66, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 336, - 'wind_gust_speed': 14.88, - 'wind_speed': 14.88, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.79, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 333, - 'wind_gust_speed': 14.91, - 'wind_speed': 14.91, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.36, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 83, - 'wind_gust_speed': 4.58, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T14:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.96, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 4.74, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 24.5, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T15:00:00Z', - 'dew_point': 20.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.6, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 152, - 'wind_gust_speed': 5.63, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T16:00:00Z', - 'dew_point': 20.7, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 22.3, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 6.02, - 'wind_speed': 6.02, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T17:00:00Z', - 'dew_point': 20.4, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.2, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 6.15, - 'wind_speed': 6.15, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T18:00:00Z', - 'dew_point': 20.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.08, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 167, - 'wind_gust_speed': 6.48, - 'wind_speed': 6.48, - }), - dict({ - 'apparent_temperature': 23.2, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T19:00:00Z', - 'dew_point': 19.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.04, - 'temperature': 21.8, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 7.51, - 'wind_speed': 7.51, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 99.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T20:00:00Z', - 'dew_point': 19.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.05, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 8.73, - 'wind_speed': 8.73, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 98.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T21:00:00Z', - 'dew_point': 19.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.06, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 9.21, - 'wind_speed': 9.11, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 96.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T22:00:00Z', - 'dew_point': 19.7, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 171, - 'wind_gust_speed': 9.03, - 'wind_speed': 7.91, - }), - ]), - }) -# --- -# name: test_hourly_forecast[forecast] - dict({ - 'weather.home': dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T14:00:00Z', - 'dew_point': 21.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 264, - 'wind_gust_speed': 13.44, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 80.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 261, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.64, - }), - dict({ - 'apparent_temperature': 23.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.12, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 252, - 'wind_gust_speed': 11.15, - 'wind_speed': 6.14, - }), - dict({ - 'apparent_temperature': 23.5, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.7, - 'uv_index': 0, - 'wind_bearing': 248, - 'wind_gust_speed': 11.57, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T18:00:00Z', - 'dew_point': 20.8, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.05, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 12.42, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 23.0, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.3, - 'uv_index': 0, - 'wind_bearing': 224, - 'wind_gust_speed': 11.3, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T20:00:00Z', - 'dew_point': 20.4, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.31, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 221, - 'wind_gust_speed': 10.57, - 'wind_speed': 5.13, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T21:00:00Z', - 'dew_point': 20.5, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.55, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 10.63, - 'wind_speed': 5.7, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.79, - 'temperature': 22.8, - 'uv_index': 1, - 'wind_bearing': 258, - 'wind_gust_speed': 10.47, - 'wind_speed': 5.22, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T23:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.95, - 'temperature': 24.0, - 'uv_index': 2, - 'wind_bearing': 282, - 'wind_gust_speed': 12.74, - 'wind_speed': 5.71, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T00:00:00Z', - 'dew_point': 21.5, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.35, - 'temperature': 25.1, - 'uv_index': 3, - 'wind_bearing': 294, - 'wind_gust_speed': 13.87, - 'wind_speed': 6.53, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T01:00:00Z', - 'dew_point': 21.8, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 26.5, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 16.04, - 'wind_speed': 6.54, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T02:00:00Z', - 'dew_point': 22.0, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.23, - 'temperature': 27.6, - 'uv_index': 6, - 'wind_bearing': 314, - 'wind_gust_speed': 18.1, - 'wind_speed': 7.32, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T03:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.86, - 'temperature': 28.3, - 'uv_index': 6, - 'wind_bearing': 317, - 'wind_gust_speed': 20.77, - 'wind_speed': 9.1, - }), - dict({ - 'apparent_temperature': 31.5, - 'cloud_coverage': 69.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T04:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.65, - 'temperature': 28.6, - 'uv_index': 6, - 'wind_bearing': 311, - 'wind_gust_speed': 21.27, - 'wind_speed': 10.21, - }), - dict({ - 'apparent_temperature': 31.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T05:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.48, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 317, - 'wind_gust_speed': 19.62, - 'wind_speed': 10.53, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.54, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 335, - 'wind_gust_speed': 18.98, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.76, - 'temperature': 27.1, - 'uv_index': 2, - 'wind_bearing': 338, - 'wind_gust_speed': 17.04, - 'wind_speed': 7.75, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.05, - 'temperature': 26.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 14.75, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 344, - 'wind_gust_speed': 10.43, - 'wind_speed': 5.2, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.73, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 6.95, - 'wind_speed': 3.59, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 326, - 'wind_gust_speed': 5.27, - 'wind_speed': 2.1, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.52, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 5.48, - 'wind_speed': 0.93, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T13:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 188, - 'wind_gust_speed': 4.44, - 'wind_speed': 1.79, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 4.49, - 'wind_speed': 2.19, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.21, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 179, - 'wind_gust_speed': 5.32, - 'wind_speed': 2.65, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 173, - 'wind_gust_speed': 5.81, - 'wind_speed': 3.2, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.88, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 5.53, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.94, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 6.09, - 'wind_speed': 3.36, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T19:00:00Z', - 'dew_point': 20.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.96, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 6.83, - 'wind_speed': 3.71, - }), - dict({ - 'apparent_temperature': 22.5, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T20:00:00Z', - 'dew_point': 20.0, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 21.0, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 7.98, - 'wind_speed': 4.27, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T21:00:00Z', - 'dew_point': 20.2, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.61, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 8.4, - 'wind_speed': 4.69, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.87, - 'temperature': 23.1, - 'uv_index': 1, - 'wind_bearing': 150, - 'wind_gust_speed': 7.66, - 'wind_speed': 4.33, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 123, - 'wind_gust_speed': 9.63, - 'wind_speed': 3.91, - }), - dict({ - 'apparent_temperature': 30.4, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 105, - 'wind_gust_speed': 12.59, - 'wind_speed': 3.96, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T01:00:00Z', - 'dew_point': 22.9, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.79, - 'temperature': 28.9, - 'uv_index': 5, - 'wind_bearing': 99, - 'wind_gust_speed': 14.17, - 'wind_speed': 4.06, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T02:00:00Z', - 'dew_point': 22.9, - 'humidity': 66, - 'precipitation': 0.3, - 'precipitation_probability': 7.000000000000001, - 'pressure': 1011.29, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 93, - 'wind_gust_speed': 17.75, - 'wind_speed': 4.87, - }), - dict({ - 'apparent_temperature': 34.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T03:00:00Z', - 'dew_point': 23.1, - 'humidity': 64, - 'precipitation': 0.3, - 'precipitation_probability': 11.0, - 'pressure': 1010.78, - 'temperature': 30.6, - 'uv_index': 6, - 'wind_bearing': 78, - 'wind_gust_speed': 17.43, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T04:00:00Z', - 'dew_point': 23.2, - 'humidity': 66, - 'precipitation': 0.4, - 'precipitation_probability': 15.0, - 'pressure': 1010.37, - 'temperature': 30.3, - 'uv_index': 5, - 'wind_bearing': 60, - 'wind_gust_speed': 15.24, - 'wind_speed': 4.9, - }), - dict({ - 'apparent_temperature': 33.7, - 'cloud_coverage': 79.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T05:00:00Z', - 'dew_point': 23.3, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 17.0, - 'pressure': 1010.09, - 'temperature': 30.0, - 'uv_index': 4, - 'wind_bearing': 80, - 'wind_gust_speed': 13.53, - 'wind_speed': 5.98, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T06:00:00Z', - 'dew_point': 23.4, - 'humidity': 70, - 'precipitation': 1.0, - 'precipitation_probability': 17.0, - 'pressure': 1010.0, - 'temperature': 29.5, - 'uv_index': 3, - 'wind_bearing': 83, - 'wind_gust_speed': 12.55, - 'wind_speed': 6.84, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 88.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 73, - 'precipitation': 0.4, - 'precipitation_probability': 16.0, - 'pressure': 1010.27, - 'temperature': 28.7, - 'uv_index': 2, - 'wind_bearing': 90, - 'wind_gust_speed': 10.16, - 'wind_speed': 6.07, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T08:00:00Z', - 'dew_point': 23.2, - 'humidity': 77, - 'precipitation': 0.5, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.71, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 101, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.82, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 93.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T09:00:00Z', - 'dew_point': 23.2, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.9, - 'temperature': 26.5, - 'uv_index': 0, - 'wind_bearing': 128, - 'wind_gust_speed': 8.89, - 'wind_speed': 4.95, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T10:00:00Z', - 'dew_point': 23.0, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.12, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 134, - 'wind_gust_speed': 10.03, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.43, - 'temperature': 25.1, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 12.4, - 'wind_speed': 5.41, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T12:00:00Z', - 'dew_point': 22.5, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.58, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 16.36, - 'wind_speed': 6.31, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T13:00:00Z', - 'dew_point': 22.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 19.66, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.4, - 'temperature': 24.3, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 21.15, - 'wind_speed': 7.46, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'dew_point': 22.0, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.26, - 'wind_speed': 7.84, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.01, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 23.53, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T17:00:00Z', - 'dew_point': 21.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.78, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 22.83, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T18:00:00Z', - 'dew_point': 21.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.69, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.7, - 'wind_speed': 8.7, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T19:00:00Z', - 'dew_point': 21.4, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.77, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 24.24, - 'wind_speed': 8.74, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.89, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 23.99, - 'wind_speed': 8.81, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T21:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.1, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 25.55, - 'wind_speed': 9.05, - }), - dict({ - 'apparent_temperature': 27.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 24.6, - 'uv_index': 1, - 'wind_bearing': 140, - 'wind_gust_speed': 29.08, - 'wind_speed': 10.37, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.36, - 'temperature': 25.9, - 'uv_index': 2, - 'wind_bearing': 140, - 'wind_gust_speed': 34.13, - 'wind_speed': 12.56, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T00:00:00Z', - 'dew_point': 22.3, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 27.2, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 38.2, - 'wind_speed': 15.65, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T01:00:00Z', - 'dew_point': 22.3, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 37.55, - 'wind_speed': 15.78, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 143, - 'wind_gust_speed': 35.86, - 'wind_speed': 15.41, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T03:00:00Z', - 'dew_point': 22.5, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.61, - 'temperature': 30.3, - 'uv_index': 6, - 'wind_bearing': 141, - 'wind_gust_speed': 35.88, - 'wind_speed': 15.51, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T04:00:00Z', - 'dew_point': 22.6, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.36, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 140, - 'wind_gust_speed': 35.99, - 'wind_speed': 15.75, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T05:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.11, - 'temperature': 30.1, - 'uv_index': 4, - 'wind_bearing': 137, - 'wind_gust_speed': 33.61, - 'wind_speed': 15.36, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T06:00:00Z', - 'dew_point': 22.5, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.98, - 'temperature': 30.0, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 32.61, - 'wind_speed': 14.98, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.13, - 'temperature': 29.2, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 28.1, - 'wind_speed': 13.88, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 28.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 24.22, - 'wind_speed': 13.02, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T09:00:00Z', - 'dew_point': 21.9, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.81, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 22.5, - 'wind_speed': 11.94, - }), - dict({ - 'apparent_temperature': 28.8, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T10:00:00Z', - 'dew_point': 21.7, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 21.47, - 'wind_speed': 11.25, - }), - dict({ - 'apparent_temperature': 28.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.71, - 'wind_speed': 12.39, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.67, - 'wind_speed': 12.83, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T13:00:00Z', - 'dew_point': 21.7, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 23.34, - 'wind_speed': 12.62, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.83, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.9, - 'wind_speed': 12.07, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T15:00:00Z', - 'dew_point': 21.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.74, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.01, - 'wind_speed': 11.19, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T16:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.56, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 21.29, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T17:00:00Z', - 'dew_point': 21.5, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.35, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 20.52, - 'wind_speed': 10.5, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 20.04, - 'wind_speed': 10.51, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T19:00:00Z', - 'dew_point': 21.3, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 12.0, - 'pressure': 1011.37, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 18.07, - 'wind_speed': 10.13, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T20:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.2, - 'precipitation_probability': 13.0, - 'pressure': 1011.53, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 16.86, - 'wind_speed': 10.34, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T21:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.71, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 16.66, - 'wind_speed': 10.68, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T22:00:00Z', - 'dew_point': 21.9, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 24.4, - 'uv_index': 1, - 'wind_bearing': 137, - 'wind_gust_speed': 17.21, - 'wind_speed': 10.61, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.05, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 19.23, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 29.5, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.07, - 'temperature': 26.6, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 20.61, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 82.0, - 'condition': 'rainy', - 'datetime': '2023-09-12T01:00:00Z', - 'dew_point': 23.1, - 'humidity': 75, - 'precipitation': 0.2, - 'precipitation_probability': 16.0, - 'pressure': 1011.89, - 'temperature': 27.9, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 23.35, - 'wind_speed': 11.98, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 29.0, - 'uv_index': 5, - 'wind_bearing': 143, - 'wind_gust_speed': 26.45, - 'wind_speed': 13.01, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.15, - 'temperature': 29.8, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 28.95, - 'wind_speed': 13.9, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.79, - 'temperature': 30.2, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 27.9, - 'wind_speed': 13.95, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T05:00:00Z', - 'dew_point': 23.1, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.43, - 'temperature': 30.4, - 'uv_index': 4, - 'wind_bearing': 140, - 'wind_gust_speed': 26.53, - 'wind_speed': 13.78, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T06:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.21, - 'temperature': 30.1, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 24.56, - 'wind_speed': 13.74, - }), - dict({ - 'apparent_temperature': 32.0, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.26, - 'temperature': 29.1, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 22.78, - 'wind_speed': 13.21, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.51, - 'temperature': 28.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 19.92, - 'wind_speed': 12.0, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T09:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.8, - 'temperature': 27.2, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 17.65, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T10:00:00Z', - 'dew_point': 21.4, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 15.87, - 'wind_speed': 10.23, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T11:00:00Z', - 'dew_point': 21.3, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1011.79, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 13.9, - 'wind_speed': 9.39, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T12:00:00Z', - 'dew_point': 21.2, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 47.0, - 'pressure': 1012.12, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.32, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1012.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.18, - 'wind_speed': 8.59, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T14:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.09, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.84, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T15:00:00Z', - 'dew_point': 21.3, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.99, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.93, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T16:00:00Z', - 'dew_point': 21.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 16.74, - 'wind_speed': 9.49, - }), - dict({ - 'apparent_temperature': 24.7, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T17:00:00Z', - 'dew_point': 20.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.75, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 17.45, - 'wind_speed': 9.12, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.04, - 'wind_speed': 8.68, - }), - dict({ - 'apparent_temperature': 24.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 16.8, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T20:00:00Z', - 'dew_point': 20.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.23, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.35, - 'wind_speed': 8.36, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T21:00:00Z', - 'dew_point': 20.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.49, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 14.09, - 'wind_speed': 7.77, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T22:00:00Z', - 'dew_point': 21.0, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.72, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 152, - 'wind_gust_speed': 14.04, - 'wind_speed': 7.25, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T23:00:00Z', - 'dew_point': 21.4, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 25.5, - 'uv_index': 2, - 'wind_bearing': 149, - 'wind_gust_speed': 15.31, - 'wind_speed': 7.14, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-13T00:00:00Z', - 'dew_point': 21.8, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 27.1, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 16.42, - 'wind_speed': 6.89, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T01:00:00Z', - 'dew_point': 22.0, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.65, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 137, - 'wind_gust_speed': 18.64, - 'wind_speed': 6.65, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T02:00:00Z', - 'dew_point': 21.9, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.26, - 'temperature': 29.4, - 'uv_index': 5, - 'wind_bearing': 128, - 'wind_gust_speed': 21.69, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 33.0, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T03:00:00Z', - 'dew_point': 21.9, - 'humidity': 62, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.88, - 'temperature': 30.1, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 23.41, - 'wind_speed': 7.33, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T04:00:00Z', - 'dew_point': 22.0, - 'humidity': 61, - 'precipitation': 0.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.55, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 56, - 'wind_gust_speed': 23.1, - 'wind_speed': 8.09, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 61, - 'precipitation': 1.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.29, - 'temperature': 30.2, - 'uv_index': 4, - 'wind_bearing': 20, - 'wind_gust_speed': 21.81, - 'wind_speed': 9.46, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T06:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 2.3, - 'precipitation_probability': 11.0, - 'pressure': 1011.17, - 'temperature': 29.7, - 'uv_index': 3, - 'wind_bearing': 20, - 'wind_gust_speed': 19.72, - 'wind_speed': 9.8, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 69.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T07:00:00Z', - 'dew_point': 22.4, - 'humidity': 68, - 'precipitation': 1.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.32, - 'temperature': 28.8, - 'uv_index': 1, - 'wind_bearing': 18, - 'wind_gust_speed': 17.55, - 'wind_speed': 9.23, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T08:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.6, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 27, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.05, - }), - dict({ - 'apparent_temperature': 29.4, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T09:00:00Z', - 'dew_point': 23.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 32, - 'wind_gust_speed': 12.17, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T10:00:00Z', - 'dew_point': 22.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.3, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 69, - 'wind_gust_speed': 11.64, - 'wind_speed': 6.69, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.71, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.23, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.96, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.47, - 'wind_speed': 5.73, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T13:00:00Z', - 'dew_point': 22.3, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.03, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 13.57, - 'wind_speed': 5.66, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.99, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 15.07, - 'wind_speed': 5.83, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T15:00:00Z', - 'dew_point': 22.2, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.95, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 16.06, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T16:00:00Z', - 'dew_point': 22.0, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.9, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 16.05, - 'wind_speed': 5.75, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T17:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.52, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T18:00:00Z', - 'dew_point': 21.8, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.87, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.01, - 'wind_speed': 5.32, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 22.8, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.39, - 'wind_speed': 5.33, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.22, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.79, - 'wind_speed': 5.43, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.12, - 'wind_speed': 5.52, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T22:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.59, - 'temperature': 24.3, - 'uv_index': 1, - 'wind_bearing': 147, - 'wind_gust_speed': 16.14, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T23:00:00Z', - 'dew_point': 22.4, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.74, - 'temperature': 25.7, - 'uv_index': 2, - 'wind_bearing': 146, - 'wind_gust_speed': 19.09, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.78, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 143, - 'wind_gust_speed': 21.6, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T01:00:00Z', - 'dew_point': 23.2, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.61, - 'temperature': 28.7, - 'uv_index': 5, - 'wind_bearing': 138, - 'wind_gust_speed': 23.36, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T02:00:00Z', - 'dew_point': 23.2, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.32, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 24.72, - 'wind_speed': 4.99, - }), - dict({ - 'apparent_temperature': 34.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T03:00:00Z', - 'dew_point': 23.3, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.04, - 'temperature': 30.7, - 'uv_index': 6, - 'wind_bearing': 354, - 'wind_gust_speed': 25.23, - 'wind_speed': 4.74, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.77, - 'temperature': 31.0, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 24.6, - 'wind_speed': 4.79, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 60.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T05:00:00Z', - 'dew_point': 23.2, - 'humidity': 64, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1012.53, - 'temperature': 30.7, - 'uv_index': 5, - 'wind_bearing': 336, - 'wind_gust_speed': 23.28, - 'wind_speed': 5.07, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 59.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T06:00:00Z', - 'dew_point': 23.1, - 'humidity': 66, - 'precipitation': 0.2, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1012.49, - 'temperature': 30.2, - 'uv_index': 3, - 'wind_bearing': 336, - 'wind_gust_speed': 22.05, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 32.9, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T07:00:00Z', - 'dew_point': 23.0, - 'humidity': 68, - 'precipitation': 0.2, - 'precipitation_probability': 40.0, - 'pressure': 1012.73, - 'temperature': 29.5, - 'uv_index': 2, - 'wind_bearing': 339, - 'wind_gust_speed': 21.18, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T08:00:00Z', - 'dew_point': 22.8, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 45.0, - 'pressure': 1013.16, - 'temperature': 28.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 20.35, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T09:00:00Z', - 'dew_point': 22.5, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1013.62, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 347, - 'wind_gust_speed': 19.42, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T10:00:00Z', - 'dew_point': 22.4, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.09, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 348, - 'wind_gust_speed': 18.19, - 'wind_speed': 5.31, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T11:00:00Z', - 'dew_point': 22.4, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.56, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 177, - 'wind_gust_speed': 16.79, - 'wind_speed': 4.28, - }), - dict({ - 'apparent_temperature': 27.5, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.87, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 15.61, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T13:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.91, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 14.7, - 'wind_speed': 4.11, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T14:00:00Z', - 'dew_point': 21.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.8, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 13.81, - 'wind_speed': 4.97, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T15:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.66, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 170, - 'wind_gust_speed': 12.88, - 'wind_speed': 5.57, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T16:00:00Z', - 'dew_point': 21.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.54, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 12.0, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T17:00:00Z', - 'dew_point': 21.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.45, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 11.43, - 'wind_speed': 5.48, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 44.0, - 'pressure': 1014.45, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 11.42, - 'wind_speed': 5.38, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T19:00:00Z', - 'dew_point': 21.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'pressure': 1014.63, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.15, - 'wind_speed': 5.39, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T20:00:00Z', - 'dew_point': 21.8, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 51.0, - 'pressure': 1014.91, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 13.54, - 'wind_speed': 5.45, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T21:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 42.0, - 'pressure': 1015.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 15.48, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T22:00:00Z', - 'dew_point': 22.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 28.999999999999996, - 'pressure': 1015.4, - 'temperature': 25.7, - 'uv_index': 1, - 'wind_bearing': 158, - 'wind_gust_speed': 17.86, - 'wind_speed': 5.84, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 77, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.54, - 'temperature': 27.2, - 'uv_index': 2, - 'wind_bearing': 155, - 'wind_gust_speed': 20.19, - 'wind_speed': 6.09, - }), - dict({ - 'apparent_temperature': 32.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T00:00:00Z', - 'dew_point': 23.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.55, - 'temperature': 28.6, - 'uv_index': 4, - 'wind_bearing': 152, - 'wind_gust_speed': 21.83, - 'wind_speed': 6.42, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T01:00:00Z', - 'dew_point': 23.5, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.35, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 144, - 'wind_gust_speed': 22.56, - 'wind_speed': 6.91, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.0, - 'temperature': 30.4, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.83, - 'wind_speed': 7.47, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.62, - 'temperature': 30.9, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.98, - 'wind_speed': 7.95, - }), - dict({ - 'apparent_temperature': 35.4, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T04:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 31.3, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 23.21, - 'wind_speed': 8.44, - }), - dict({ - 'apparent_temperature': 35.6, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T05:00:00Z', - 'dew_point': 23.7, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.95, - 'temperature': 31.5, - 'uv_index': 5, - 'wind_bearing': 344, - 'wind_gust_speed': 23.46, - 'wind_speed': 8.95, - }), - dict({ - 'apparent_temperature': 35.1, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T06:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.83, - 'temperature': 31.1, - 'uv_index': 3, - 'wind_bearing': 347, - 'wind_gust_speed': 23.64, - 'wind_speed': 9.13, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.96, - 'temperature': 30.3, - 'uv_index': 2, - 'wind_bearing': 350, - 'wind_gust_speed': 23.66, - 'wind_speed': 8.78, - }), - dict({ - 'apparent_temperature': 32.4, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T08:00:00Z', - 'dew_point': 23.1, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 29.0, - 'uv_index': 0, - 'wind_bearing': 356, - 'wind_gust_speed': 23.51, - 'wind_speed': 8.13, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T09:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.61, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 3, - 'wind_gust_speed': 23.21, - 'wind_speed': 7.48, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T10:00:00Z', - 'dew_point': 22.8, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.02, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 20, - 'wind_gust_speed': 22.68, - 'wind_speed': 6.83, - }), - dict({ - 'apparent_temperature': 29.2, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.43, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 129, - 'wind_gust_speed': 22.04, - 'wind_speed': 6.1, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T12:00:00Z', - 'dew_point': 22.7, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.71, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.64, - 'wind_speed': 5.6, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T13:00:00Z', - 'dew_point': 23.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.52, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 16.35, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T14:00:00Z', - 'dew_point': 22.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.37, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 17.11, - 'wind_speed': 5.79, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.21, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 17.32, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 16.6, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T17:00:00Z', - 'dew_point': 22.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.95, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 219, - 'wind_gust_speed': 15.52, - 'wind_speed': 4.62, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T18:00:00Z', - 'dew_point': 22.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.88, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 216, - 'wind_gust_speed': 14.64, - 'wind_speed': 4.32, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T19:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.91, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 198, - 'wind_gust_speed': 14.06, - 'wind_speed': 4.73, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T20:00:00Z', - 'dew_point': 22.4, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.99, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 189, - 'wind_gust_speed': 13.7, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T21:00:00Z', - 'dew_point': 22.5, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 13.77, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.12, - 'temperature': 25.5, - 'uv_index': 1, - 'wind_bearing': 179, - 'wind_gust_speed': 14.38, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 52.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.13, - 'temperature': 26.9, - 'uv_index': 2, - 'wind_bearing': 170, - 'wind_gust_speed': 15.2, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.04, - 'temperature': 28.0, - 'uv_index': 4, - 'wind_bearing': 155, - 'wind_gust_speed': 15.85, - 'wind_speed': 4.76, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 24.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T01:00:00Z', - 'dew_point': 22.6, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.52, - 'temperature': 29.2, - 'uv_index': 6, - 'wind_bearing': 110, - 'wind_gust_speed': 16.27, - 'wind_speed': 6.81, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 16.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.01, - 'temperature': 30.2, - 'uv_index': 8, - 'wind_bearing': 30, - 'wind_gust_speed': 16.55, - 'wind_speed': 6.86, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T03:00:00Z', - 'dew_point': 22.0, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.45, - 'temperature': 31.1, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.52, - 'wind_speed': 6.8, - }), - dict({ - 'apparent_temperature': 34.7, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T04:00:00Z', - 'dew_point': 21.9, - 'humidity': 57, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 31.5, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.08, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.39, - 'temperature': 31.8, - 'uv_index': 6, - 'wind_bearing': 20, - 'wind_gust_speed': 15.48, - 'wind_speed': 6.45, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T06:00:00Z', - 'dew_point': 21.7, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.11, - 'temperature': 31.4, - 'uv_index': 4, - 'wind_bearing': 26, - 'wind_gust_speed': 15.08, - 'wind_speed': 6.43, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 7.000000000000001, - 'condition': 'sunny', - 'datetime': '2023-09-16T07:00:00Z', - 'dew_point': 21.7, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.15, - 'temperature': 30.7, - 'uv_index': 2, - 'wind_bearing': 39, - 'wind_gust_speed': 14.88, - 'wind_speed': 6.61, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.41, - 'temperature': 29.6, - 'uv_index': 0, - 'wind_bearing': 72, - 'wind_gust_speed': 14.82, - 'wind_speed': 6.95, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T09:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.75, - 'temperature': 28.5, - 'uv_index': 0, - 'wind_bearing': 116, - 'wind_gust_speed': 15.13, - 'wind_speed': 7.45, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 13.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T10:00:00Z', - 'dew_point': 22.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.13, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 16.09, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.47, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.37, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 29.3, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T12:00:00Z', - 'dew_point': 22.9, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.6, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 18.29, - 'wind_speed': 9.21, - }), - dict({ - 'apparent_temperature': 28.7, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T13:00:00Z', - 'dew_point': 23.0, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 25.7, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 18.49, - 'wind_speed': 8.96, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T14:00:00Z', - 'dew_point': 22.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.47, - 'wind_speed': 8.45, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.79, - 'wind_speed': 8.1, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.1, - 'temperature': 24.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 19.81, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T17:00:00Z', - 'dew_point': 22.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.68, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 20.96, - 'wind_speed': 8.3, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T18:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.41, - 'wind_speed': 8.24, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T19:00:00Z', - 'dew_point': 22.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 20.42, - 'wind_speed': 7.62, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T20:00:00Z', - 'dew_point': 22.6, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 18.61, - 'wind_speed': 6.66, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T21:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 17.14, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 26.0, - 'uv_index': 1, - 'wind_bearing': 161, - 'wind_gust_speed': 16.78, - 'wind_speed': 5.5, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.51, - 'temperature': 27.5, - 'uv_index': 2, - 'wind_bearing': 165, - 'wind_gust_speed': 17.21, - 'wind_speed': 5.56, - }), - dict({ - 'apparent_temperature': 31.7, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T00:00:00Z', - 'dew_point': 22.8, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 28.5, - 'uv_index': 4, - 'wind_bearing': 174, - 'wind_gust_speed': 17.96, - 'wind_speed': 6.04, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T01:00:00Z', - 'dew_point': 22.7, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.4, - 'uv_index': 6, - 'wind_bearing': 192, - 'wind_gust_speed': 19.15, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 28.999999999999996, - 'condition': 'sunny', - 'datetime': '2023-09-17T02:00:00Z', - 'dew_point': 22.8, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 30.1, - 'uv_index': 7, - 'wind_bearing': 225, - 'wind_gust_speed': 20.89, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T03:00:00Z', - 'dew_point': 22.8, - 'humidity': 63, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1009.75, - 'temperature': 30.7, - 'uv_index': 8, - 'wind_bearing': 264, - 'wind_gust_speed': 22.67, - 'wind_speed': 10.27, - }), - dict({ - 'apparent_temperature': 33.9, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T04:00:00Z', - 'dew_point': 22.5, - 'humidity': 62, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1009.18, - 'temperature': 30.5, - 'uv_index': 7, - 'wind_bearing': 293, - 'wind_gust_speed': 23.93, - 'wind_speed': 10.82, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T05:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.6, - 'precipitation_probability': 12.0, - 'pressure': 1008.71, - 'temperature': 30.1, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 24.39, - 'wind_speed': 10.72, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 64, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.46, - 'temperature': 29.6, - 'uv_index': 3, - 'wind_bearing': 312, - 'wind_gust_speed': 23.9, - 'wind_speed': 10.28, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 47.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.53, - 'temperature': 28.9, - 'uv_index': 1, - 'wind_bearing': 312, - 'wind_gust_speed': 22.3, - 'wind_speed': 9.59, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 70, - 'precipitation': 0.6, - 'precipitation_probability': 15.0, - 'pressure': 1008.82, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 19.73, - 'wind_speed': 8.58, - }), - dict({ - 'apparent_temperature': 29.6, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 74, - 'precipitation': 0.5, - 'precipitation_probability': 15.0, - 'pressure': 1009.21, - 'temperature': 27.0, - 'uv_index': 0, - 'wind_bearing': 291, - 'wind_gust_speed': 16.49, - 'wind_speed': 7.34, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 78, - 'precipitation': 0.4, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1009.65, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 12.71, - 'wind_speed': 5.91, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T11:00:00Z', - 'dew_point': 21.9, - 'humidity': 82, - 'precipitation': 0.3, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.04, - 'temperature': 25.3, - 'uv_index': 0, - 'wind_bearing': 212, - 'wind_gust_speed': 9.16, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T12:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.3, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1010.24, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 192, - 'wind_gust_speed': 7.09, - 'wind_speed': 3.62, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T13:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1010.15, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 185, - 'wind_gust_speed': 7.2, - 'wind_speed': 3.27, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 44.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T14:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1009.87, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.22, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 49.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T15:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.2, - 'precipitation_probability': 31.0, - 'pressure': 1009.56, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 180, - 'wind_gust_speed': 9.21, - 'wind_speed': 3.3, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 94, - 'precipitation': 0.2, - 'precipitation_probability': 33.0, - 'pressure': 1009.29, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 9.0, - 'wind_speed': 3.46, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T17:00:00Z', - 'dew_point': 21.7, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 35.0, - 'pressure': 1009.09, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 186, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T18:00:00Z', - 'dew_point': 21.6, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 37.0, - 'pressure': 1009.01, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 7.99, - 'wind_speed': 4.07, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.07, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 258, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.55, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T20:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.23, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 8.77, - 'wind_speed': 5.17, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 38.0, - 'pressure': 1009.47, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 318, - 'wind_gust_speed': 9.69, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 30.0, - 'pressure': 1009.77, - 'temperature': 24.2, - 'uv_index': 1, - 'wind_bearing': 324, - 'wind_gust_speed': 10.88, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 83, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.09, - 'temperature': 25.1, - 'uv_index': 2, - 'wind_bearing': 329, - 'wind_gust_speed': 12.21, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T00:00:00Z', - 'dew_point': 21.9, - 'humidity': 80, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.33, - 'temperature': 25.7, - 'uv_index': 3, - 'wind_bearing': 332, - 'wind_gust_speed': 13.52, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T01:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1007.43, - 'temperature': 27.2, - 'uv_index': 5, - 'wind_bearing': 330, - 'wind_gust_speed': 11.36, - 'wind_speed': 11.36, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T02:00:00Z', - 'dew_point': 21.6, - 'humidity': 70, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1007.05, - 'temperature': 27.5, - 'uv_index': 6, - 'wind_bearing': 332, - 'wind_gust_speed': 12.06, - 'wind_speed': 12.06, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T03:00:00Z', - 'dew_point': 21.6, - 'humidity': 69, - 'precipitation': 0.5, - 'precipitation_probability': 10.0, - 'pressure': 1006.67, - 'temperature': 27.8, - 'uv_index': 6, - 'wind_bearing': 333, - 'wind_gust_speed': 12.81, - 'wind_speed': 12.81, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T04:00:00Z', - 'dew_point': 21.5, - 'humidity': 68, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1006.28, - 'temperature': 28.0, - 'uv_index': 5, - 'wind_bearing': 335, - 'wind_gust_speed': 13.68, - 'wind_speed': 13.68, - }), - dict({ - 'apparent_temperature': 30.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T05:00:00Z', - 'dew_point': 21.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1005.89, - 'temperature': 28.1, - 'uv_index': 4, - 'wind_bearing': 336, - 'wind_gust_speed': 14.61, - 'wind_speed': 14.61, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T06:00:00Z', - 'dew_point': 21.2, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 27.0, - 'pressure': 1005.67, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 338, - 'wind_gust_speed': 15.25, - 'wind_speed': 15.25, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T07:00:00Z', - 'dew_point': 21.3, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1005.74, - 'temperature': 27.4, - 'uv_index': 1, - 'wind_bearing': 339, - 'wind_gust_speed': 15.45, - 'wind_speed': 15.45, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T08:00:00Z', - 'dew_point': 21.4, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1005.98, - 'temperature': 26.7, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.38, - 'wind_speed': 15.38, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T09:00:00Z', - 'dew_point': 21.6, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.22, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.27, - 'wind_speed': 15.27, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T10:00:00Z', - 'dew_point': 21.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.44, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 15.09, - 'wind_speed': 15.09, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T11:00:00Z', - 'dew_point': 21.7, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.66, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 336, - 'wind_gust_speed': 14.88, - 'wind_speed': 14.88, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.79, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 333, - 'wind_gust_speed': 14.91, - 'wind_speed': 14.91, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.36, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 83, - 'wind_gust_speed': 4.58, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T14:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.96, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 4.74, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 24.5, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T15:00:00Z', - 'dew_point': 20.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.6, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 152, - 'wind_gust_speed': 5.63, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T16:00:00Z', - 'dew_point': 20.7, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 22.3, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 6.02, - 'wind_speed': 6.02, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T17:00:00Z', - 'dew_point': 20.4, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.2, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 6.15, - 'wind_speed': 6.15, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T18:00:00Z', - 'dew_point': 20.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.08, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 167, - 'wind_gust_speed': 6.48, - 'wind_speed': 6.48, - }), - dict({ - 'apparent_temperature': 23.2, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T19:00:00Z', - 'dew_point': 19.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.04, - 'temperature': 21.8, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 7.51, - 'wind_speed': 7.51, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 99.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T20:00:00Z', - 'dew_point': 19.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.05, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 8.73, - 'wind_speed': 8.73, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 98.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T21:00:00Z', - 'dew_point': 19.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.06, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 9.21, - 'wind_speed': 9.11, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 96.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T22:00:00Z', - 'dew_point': 19.7, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 171, - 'wind_gust_speed': 9.03, - 'wind_speed': 7.91, - }), - ]), - }), - }) -# --- -# name: test_hourly_forecast[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T14:00:00Z', - 'dew_point': 21.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 264, - 'wind_gust_speed': 13.44, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 80.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 261, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.64, - }), - dict({ - 'apparent_temperature': 23.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.12, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 252, - 'wind_gust_speed': 11.15, - 'wind_speed': 6.14, - }), - dict({ - 'apparent_temperature': 23.5, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.7, - 'uv_index': 0, - 'wind_bearing': 248, - 'wind_gust_speed': 11.57, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T18:00:00Z', - 'dew_point': 20.8, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.05, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 12.42, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 23.0, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.3, - 'uv_index': 0, - 'wind_bearing': 224, - 'wind_gust_speed': 11.3, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T20:00:00Z', - 'dew_point': 20.4, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.31, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 221, - 'wind_gust_speed': 10.57, - 'wind_speed': 5.13, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T21:00:00Z', - 'dew_point': 20.5, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.55, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 10.63, - 'wind_speed': 5.7, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.79, - 'temperature': 22.8, - 'uv_index': 1, - 'wind_bearing': 258, - 'wind_gust_speed': 10.47, - 'wind_speed': 5.22, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T23:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.95, - 'temperature': 24.0, - 'uv_index': 2, - 'wind_bearing': 282, - 'wind_gust_speed': 12.74, - 'wind_speed': 5.71, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T00:00:00Z', - 'dew_point': 21.5, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.35, - 'temperature': 25.1, - 'uv_index': 3, - 'wind_bearing': 294, - 'wind_gust_speed': 13.87, - 'wind_speed': 6.53, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T01:00:00Z', - 'dew_point': 21.8, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 26.5, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 16.04, - 'wind_speed': 6.54, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T02:00:00Z', - 'dew_point': 22.0, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.23, - 'temperature': 27.6, - 'uv_index': 6, - 'wind_bearing': 314, - 'wind_gust_speed': 18.1, - 'wind_speed': 7.32, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T03:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.86, - 'temperature': 28.3, - 'uv_index': 6, - 'wind_bearing': 317, - 'wind_gust_speed': 20.77, - 'wind_speed': 9.1, - }), - dict({ - 'apparent_temperature': 31.5, - 'cloud_coverage': 69.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T04:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.65, - 'temperature': 28.6, - 'uv_index': 6, - 'wind_bearing': 311, - 'wind_gust_speed': 21.27, - 'wind_speed': 10.21, - }), - dict({ - 'apparent_temperature': 31.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T05:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.48, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 317, - 'wind_gust_speed': 19.62, - 'wind_speed': 10.53, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.54, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 335, - 'wind_gust_speed': 18.98, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.76, - 'temperature': 27.1, - 'uv_index': 2, - 'wind_bearing': 338, - 'wind_gust_speed': 17.04, - 'wind_speed': 7.75, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.05, - 'temperature': 26.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 14.75, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 344, - 'wind_gust_speed': 10.43, - 'wind_speed': 5.2, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.73, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 6.95, - 'wind_speed': 3.59, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 326, - 'wind_gust_speed': 5.27, - 'wind_speed': 2.1, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.52, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 5.48, - 'wind_speed': 0.93, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T13:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 188, - 'wind_gust_speed': 4.44, - 'wind_speed': 1.79, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 4.49, - 'wind_speed': 2.19, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.21, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 179, - 'wind_gust_speed': 5.32, - 'wind_speed': 2.65, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 173, - 'wind_gust_speed': 5.81, - 'wind_speed': 3.2, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.88, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 5.53, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.94, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 6.09, - 'wind_speed': 3.36, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T19:00:00Z', - 'dew_point': 20.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.96, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 6.83, - 'wind_speed': 3.71, - }), - dict({ - 'apparent_temperature': 22.5, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T20:00:00Z', - 'dew_point': 20.0, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 21.0, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 7.98, - 'wind_speed': 4.27, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T21:00:00Z', - 'dew_point': 20.2, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.61, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 8.4, - 'wind_speed': 4.69, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.87, - 'temperature': 23.1, - 'uv_index': 1, - 'wind_bearing': 150, - 'wind_gust_speed': 7.66, - 'wind_speed': 4.33, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 123, - 'wind_gust_speed': 9.63, - 'wind_speed': 3.91, - }), - dict({ - 'apparent_temperature': 30.4, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 105, - 'wind_gust_speed': 12.59, - 'wind_speed': 3.96, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T01:00:00Z', - 'dew_point': 22.9, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.79, - 'temperature': 28.9, - 'uv_index': 5, - 'wind_bearing': 99, - 'wind_gust_speed': 14.17, - 'wind_speed': 4.06, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T02:00:00Z', - 'dew_point': 22.9, - 'humidity': 66, - 'precipitation': 0.3, - 'precipitation_probability': 7.000000000000001, - 'pressure': 1011.29, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 93, - 'wind_gust_speed': 17.75, - 'wind_speed': 4.87, - }), - dict({ - 'apparent_temperature': 34.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T03:00:00Z', - 'dew_point': 23.1, - 'humidity': 64, - 'precipitation': 0.3, - 'precipitation_probability': 11.0, - 'pressure': 1010.78, - 'temperature': 30.6, - 'uv_index': 6, - 'wind_bearing': 78, - 'wind_gust_speed': 17.43, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T04:00:00Z', - 'dew_point': 23.2, - 'humidity': 66, - 'precipitation': 0.4, - 'precipitation_probability': 15.0, - 'pressure': 1010.37, - 'temperature': 30.3, - 'uv_index': 5, - 'wind_bearing': 60, - 'wind_gust_speed': 15.24, - 'wind_speed': 4.9, - }), - dict({ - 'apparent_temperature': 33.7, - 'cloud_coverage': 79.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T05:00:00Z', - 'dew_point': 23.3, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 17.0, - 'pressure': 1010.09, - 'temperature': 30.0, - 'uv_index': 4, - 'wind_bearing': 80, - 'wind_gust_speed': 13.53, - 'wind_speed': 5.98, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T06:00:00Z', - 'dew_point': 23.4, - 'humidity': 70, - 'precipitation': 1.0, - 'precipitation_probability': 17.0, - 'pressure': 1010.0, - 'temperature': 29.5, - 'uv_index': 3, - 'wind_bearing': 83, - 'wind_gust_speed': 12.55, - 'wind_speed': 6.84, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 88.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 73, - 'precipitation': 0.4, - 'precipitation_probability': 16.0, - 'pressure': 1010.27, - 'temperature': 28.7, - 'uv_index': 2, - 'wind_bearing': 90, - 'wind_gust_speed': 10.16, - 'wind_speed': 6.07, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T08:00:00Z', - 'dew_point': 23.2, - 'humidity': 77, - 'precipitation': 0.5, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.71, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 101, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.82, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 93.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T09:00:00Z', - 'dew_point': 23.2, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.9, - 'temperature': 26.5, - 'uv_index': 0, - 'wind_bearing': 128, - 'wind_gust_speed': 8.89, - 'wind_speed': 4.95, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T10:00:00Z', - 'dew_point': 23.0, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.12, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 134, - 'wind_gust_speed': 10.03, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.43, - 'temperature': 25.1, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 12.4, - 'wind_speed': 5.41, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T12:00:00Z', - 'dew_point': 22.5, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.58, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 16.36, - 'wind_speed': 6.31, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T13:00:00Z', - 'dew_point': 22.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 19.66, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.4, - 'temperature': 24.3, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 21.15, - 'wind_speed': 7.46, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'dew_point': 22.0, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.26, - 'wind_speed': 7.84, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.01, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 23.53, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T17:00:00Z', - 'dew_point': 21.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.78, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 22.83, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T18:00:00Z', - 'dew_point': 21.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.69, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.7, - 'wind_speed': 8.7, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T19:00:00Z', - 'dew_point': 21.4, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.77, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 24.24, - 'wind_speed': 8.74, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.89, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 23.99, - 'wind_speed': 8.81, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T21:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.1, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 25.55, - 'wind_speed': 9.05, - }), - dict({ - 'apparent_temperature': 27.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 24.6, - 'uv_index': 1, - 'wind_bearing': 140, - 'wind_gust_speed': 29.08, - 'wind_speed': 10.37, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.36, - 'temperature': 25.9, - 'uv_index': 2, - 'wind_bearing': 140, - 'wind_gust_speed': 34.13, - 'wind_speed': 12.56, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T00:00:00Z', - 'dew_point': 22.3, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 27.2, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 38.2, - 'wind_speed': 15.65, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T01:00:00Z', - 'dew_point': 22.3, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 37.55, - 'wind_speed': 15.78, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 143, - 'wind_gust_speed': 35.86, - 'wind_speed': 15.41, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T03:00:00Z', - 'dew_point': 22.5, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.61, - 'temperature': 30.3, - 'uv_index': 6, - 'wind_bearing': 141, - 'wind_gust_speed': 35.88, - 'wind_speed': 15.51, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T04:00:00Z', - 'dew_point': 22.6, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.36, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 140, - 'wind_gust_speed': 35.99, - 'wind_speed': 15.75, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T05:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.11, - 'temperature': 30.1, - 'uv_index': 4, - 'wind_bearing': 137, - 'wind_gust_speed': 33.61, - 'wind_speed': 15.36, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T06:00:00Z', - 'dew_point': 22.5, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.98, - 'temperature': 30.0, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 32.61, - 'wind_speed': 14.98, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.13, - 'temperature': 29.2, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 28.1, - 'wind_speed': 13.88, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 28.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 24.22, - 'wind_speed': 13.02, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T09:00:00Z', - 'dew_point': 21.9, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.81, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 22.5, - 'wind_speed': 11.94, - }), - dict({ - 'apparent_temperature': 28.8, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T10:00:00Z', - 'dew_point': 21.7, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 21.47, - 'wind_speed': 11.25, - }), - dict({ - 'apparent_temperature': 28.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.71, - 'wind_speed': 12.39, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.67, - 'wind_speed': 12.83, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T13:00:00Z', - 'dew_point': 21.7, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 23.34, - 'wind_speed': 12.62, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.83, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.9, - 'wind_speed': 12.07, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T15:00:00Z', - 'dew_point': 21.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.74, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.01, - 'wind_speed': 11.19, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T16:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.56, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 21.29, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T17:00:00Z', - 'dew_point': 21.5, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.35, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 20.52, - 'wind_speed': 10.5, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 20.04, - 'wind_speed': 10.51, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T19:00:00Z', - 'dew_point': 21.3, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 12.0, - 'pressure': 1011.37, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 18.07, - 'wind_speed': 10.13, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T20:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.2, - 'precipitation_probability': 13.0, - 'pressure': 1011.53, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 16.86, - 'wind_speed': 10.34, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T21:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.71, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 16.66, - 'wind_speed': 10.68, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T22:00:00Z', - 'dew_point': 21.9, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 24.4, - 'uv_index': 1, - 'wind_bearing': 137, - 'wind_gust_speed': 17.21, - 'wind_speed': 10.61, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.05, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 19.23, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 29.5, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.07, - 'temperature': 26.6, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 20.61, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 82.0, - 'condition': 'rainy', - 'datetime': '2023-09-12T01:00:00Z', - 'dew_point': 23.1, - 'humidity': 75, - 'precipitation': 0.2, - 'precipitation_probability': 16.0, - 'pressure': 1011.89, - 'temperature': 27.9, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 23.35, - 'wind_speed': 11.98, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 29.0, - 'uv_index': 5, - 'wind_bearing': 143, - 'wind_gust_speed': 26.45, - 'wind_speed': 13.01, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.15, - 'temperature': 29.8, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 28.95, - 'wind_speed': 13.9, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.79, - 'temperature': 30.2, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 27.9, - 'wind_speed': 13.95, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T05:00:00Z', - 'dew_point': 23.1, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.43, - 'temperature': 30.4, - 'uv_index': 4, - 'wind_bearing': 140, - 'wind_gust_speed': 26.53, - 'wind_speed': 13.78, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T06:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.21, - 'temperature': 30.1, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 24.56, - 'wind_speed': 13.74, - }), - dict({ - 'apparent_temperature': 32.0, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.26, - 'temperature': 29.1, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 22.78, - 'wind_speed': 13.21, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.51, - 'temperature': 28.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 19.92, - 'wind_speed': 12.0, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T09:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.8, - 'temperature': 27.2, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 17.65, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T10:00:00Z', - 'dew_point': 21.4, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 15.87, - 'wind_speed': 10.23, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T11:00:00Z', - 'dew_point': 21.3, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1011.79, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 13.9, - 'wind_speed': 9.39, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T12:00:00Z', - 'dew_point': 21.2, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 47.0, - 'pressure': 1012.12, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.32, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1012.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.18, - 'wind_speed': 8.59, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T14:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.09, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.84, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T15:00:00Z', - 'dew_point': 21.3, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.99, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.93, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T16:00:00Z', - 'dew_point': 21.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 16.74, - 'wind_speed': 9.49, - }), - dict({ - 'apparent_temperature': 24.7, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T17:00:00Z', - 'dew_point': 20.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.75, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 17.45, - 'wind_speed': 9.12, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.04, - 'wind_speed': 8.68, - }), - dict({ - 'apparent_temperature': 24.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 16.8, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T20:00:00Z', - 'dew_point': 20.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.23, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.35, - 'wind_speed': 8.36, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T21:00:00Z', - 'dew_point': 20.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.49, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 14.09, - 'wind_speed': 7.77, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T22:00:00Z', - 'dew_point': 21.0, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.72, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 152, - 'wind_gust_speed': 14.04, - 'wind_speed': 7.25, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T23:00:00Z', - 'dew_point': 21.4, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 25.5, - 'uv_index': 2, - 'wind_bearing': 149, - 'wind_gust_speed': 15.31, - 'wind_speed': 7.14, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-13T00:00:00Z', - 'dew_point': 21.8, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 27.1, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 16.42, - 'wind_speed': 6.89, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T01:00:00Z', - 'dew_point': 22.0, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.65, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 137, - 'wind_gust_speed': 18.64, - 'wind_speed': 6.65, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T02:00:00Z', - 'dew_point': 21.9, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.26, - 'temperature': 29.4, - 'uv_index': 5, - 'wind_bearing': 128, - 'wind_gust_speed': 21.69, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 33.0, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T03:00:00Z', - 'dew_point': 21.9, - 'humidity': 62, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.88, - 'temperature': 30.1, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 23.41, - 'wind_speed': 7.33, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T04:00:00Z', - 'dew_point': 22.0, - 'humidity': 61, - 'precipitation': 0.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.55, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 56, - 'wind_gust_speed': 23.1, - 'wind_speed': 8.09, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 61, - 'precipitation': 1.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.29, - 'temperature': 30.2, - 'uv_index': 4, - 'wind_bearing': 20, - 'wind_gust_speed': 21.81, - 'wind_speed': 9.46, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T06:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 2.3, - 'precipitation_probability': 11.0, - 'pressure': 1011.17, - 'temperature': 29.7, - 'uv_index': 3, - 'wind_bearing': 20, - 'wind_gust_speed': 19.72, - 'wind_speed': 9.8, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 69.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T07:00:00Z', - 'dew_point': 22.4, - 'humidity': 68, - 'precipitation': 1.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.32, - 'temperature': 28.8, - 'uv_index': 1, - 'wind_bearing': 18, - 'wind_gust_speed': 17.55, - 'wind_speed': 9.23, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T08:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.6, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 27, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.05, - }), - dict({ - 'apparent_temperature': 29.4, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T09:00:00Z', - 'dew_point': 23.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 32, - 'wind_gust_speed': 12.17, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T10:00:00Z', - 'dew_point': 22.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.3, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 69, - 'wind_gust_speed': 11.64, - 'wind_speed': 6.69, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.71, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.23, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.96, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.47, - 'wind_speed': 5.73, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T13:00:00Z', - 'dew_point': 22.3, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.03, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 13.57, - 'wind_speed': 5.66, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.99, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 15.07, - 'wind_speed': 5.83, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T15:00:00Z', - 'dew_point': 22.2, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.95, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 16.06, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T16:00:00Z', - 'dew_point': 22.0, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.9, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 16.05, - 'wind_speed': 5.75, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T17:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.52, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T18:00:00Z', - 'dew_point': 21.8, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.87, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.01, - 'wind_speed': 5.32, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 22.8, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.39, - 'wind_speed': 5.33, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.22, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.79, - 'wind_speed': 5.43, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.12, - 'wind_speed': 5.52, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T22:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.59, - 'temperature': 24.3, - 'uv_index': 1, - 'wind_bearing': 147, - 'wind_gust_speed': 16.14, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T23:00:00Z', - 'dew_point': 22.4, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.74, - 'temperature': 25.7, - 'uv_index': 2, - 'wind_bearing': 146, - 'wind_gust_speed': 19.09, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.78, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 143, - 'wind_gust_speed': 21.6, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T01:00:00Z', - 'dew_point': 23.2, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.61, - 'temperature': 28.7, - 'uv_index': 5, - 'wind_bearing': 138, - 'wind_gust_speed': 23.36, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T02:00:00Z', - 'dew_point': 23.2, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.32, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 24.72, - 'wind_speed': 4.99, - }), - dict({ - 'apparent_temperature': 34.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T03:00:00Z', - 'dew_point': 23.3, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.04, - 'temperature': 30.7, - 'uv_index': 6, - 'wind_bearing': 354, - 'wind_gust_speed': 25.23, - 'wind_speed': 4.74, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.77, - 'temperature': 31.0, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 24.6, - 'wind_speed': 4.79, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 60.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T05:00:00Z', - 'dew_point': 23.2, - 'humidity': 64, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1012.53, - 'temperature': 30.7, - 'uv_index': 5, - 'wind_bearing': 336, - 'wind_gust_speed': 23.28, - 'wind_speed': 5.07, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 59.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T06:00:00Z', - 'dew_point': 23.1, - 'humidity': 66, - 'precipitation': 0.2, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1012.49, - 'temperature': 30.2, - 'uv_index': 3, - 'wind_bearing': 336, - 'wind_gust_speed': 22.05, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 32.9, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T07:00:00Z', - 'dew_point': 23.0, - 'humidity': 68, - 'precipitation': 0.2, - 'precipitation_probability': 40.0, - 'pressure': 1012.73, - 'temperature': 29.5, - 'uv_index': 2, - 'wind_bearing': 339, - 'wind_gust_speed': 21.18, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T08:00:00Z', - 'dew_point': 22.8, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 45.0, - 'pressure': 1013.16, - 'temperature': 28.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 20.35, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T09:00:00Z', - 'dew_point': 22.5, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1013.62, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 347, - 'wind_gust_speed': 19.42, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T10:00:00Z', - 'dew_point': 22.4, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.09, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 348, - 'wind_gust_speed': 18.19, - 'wind_speed': 5.31, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T11:00:00Z', - 'dew_point': 22.4, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.56, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 177, - 'wind_gust_speed': 16.79, - 'wind_speed': 4.28, - }), - dict({ - 'apparent_temperature': 27.5, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.87, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 15.61, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T13:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.91, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 14.7, - 'wind_speed': 4.11, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T14:00:00Z', - 'dew_point': 21.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.8, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 13.81, - 'wind_speed': 4.97, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T15:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.66, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 170, - 'wind_gust_speed': 12.88, - 'wind_speed': 5.57, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T16:00:00Z', - 'dew_point': 21.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.54, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 12.0, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T17:00:00Z', - 'dew_point': 21.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.45, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 11.43, - 'wind_speed': 5.48, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 44.0, - 'pressure': 1014.45, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 11.42, - 'wind_speed': 5.38, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T19:00:00Z', - 'dew_point': 21.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'pressure': 1014.63, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.15, - 'wind_speed': 5.39, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T20:00:00Z', - 'dew_point': 21.8, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 51.0, - 'pressure': 1014.91, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 13.54, - 'wind_speed': 5.45, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T21:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 42.0, - 'pressure': 1015.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 15.48, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T22:00:00Z', - 'dew_point': 22.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 28.999999999999996, - 'pressure': 1015.4, - 'temperature': 25.7, - 'uv_index': 1, - 'wind_bearing': 158, - 'wind_gust_speed': 17.86, - 'wind_speed': 5.84, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 77, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.54, - 'temperature': 27.2, - 'uv_index': 2, - 'wind_bearing': 155, - 'wind_gust_speed': 20.19, - 'wind_speed': 6.09, - }), - dict({ - 'apparent_temperature': 32.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T00:00:00Z', - 'dew_point': 23.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.55, - 'temperature': 28.6, - 'uv_index': 4, - 'wind_bearing': 152, - 'wind_gust_speed': 21.83, - 'wind_speed': 6.42, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T01:00:00Z', - 'dew_point': 23.5, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.35, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 144, - 'wind_gust_speed': 22.56, - 'wind_speed': 6.91, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.0, - 'temperature': 30.4, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.83, - 'wind_speed': 7.47, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.62, - 'temperature': 30.9, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.98, - 'wind_speed': 7.95, - }), - dict({ - 'apparent_temperature': 35.4, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T04:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 31.3, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 23.21, - 'wind_speed': 8.44, - }), - dict({ - 'apparent_temperature': 35.6, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T05:00:00Z', - 'dew_point': 23.7, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.95, - 'temperature': 31.5, - 'uv_index': 5, - 'wind_bearing': 344, - 'wind_gust_speed': 23.46, - 'wind_speed': 8.95, - }), - dict({ - 'apparent_temperature': 35.1, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T06:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.83, - 'temperature': 31.1, - 'uv_index': 3, - 'wind_bearing': 347, - 'wind_gust_speed': 23.64, - 'wind_speed': 9.13, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.96, - 'temperature': 30.3, - 'uv_index': 2, - 'wind_bearing': 350, - 'wind_gust_speed': 23.66, - 'wind_speed': 8.78, - }), - dict({ - 'apparent_temperature': 32.4, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T08:00:00Z', - 'dew_point': 23.1, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 29.0, - 'uv_index': 0, - 'wind_bearing': 356, - 'wind_gust_speed': 23.51, - 'wind_speed': 8.13, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T09:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.61, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 3, - 'wind_gust_speed': 23.21, - 'wind_speed': 7.48, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T10:00:00Z', - 'dew_point': 22.8, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.02, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 20, - 'wind_gust_speed': 22.68, - 'wind_speed': 6.83, - }), - dict({ - 'apparent_temperature': 29.2, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.43, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 129, - 'wind_gust_speed': 22.04, - 'wind_speed': 6.1, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T12:00:00Z', - 'dew_point': 22.7, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.71, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.64, - 'wind_speed': 5.6, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T13:00:00Z', - 'dew_point': 23.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.52, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 16.35, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T14:00:00Z', - 'dew_point': 22.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.37, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 17.11, - 'wind_speed': 5.79, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.21, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 17.32, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 16.6, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T17:00:00Z', - 'dew_point': 22.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.95, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 219, - 'wind_gust_speed': 15.52, - 'wind_speed': 4.62, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T18:00:00Z', - 'dew_point': 22.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.88, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 216, - 'wind_gust_speed': 14.64, - 'wind_speed': 4.32, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T19:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.91, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 198, - 'wind_gust_speed': 14.06, - 'wind_speed': 4.73, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T20:00:00Z', - 'dew_point': 22.4, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.99, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 189, - 'wind_gust_speed': 13.7, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T21:00:00Z', - 'dew_point': 22.5, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 13.77, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.12, - 'temperature': 25.5, - 'uv_index': 1, - 'wind_bearing': 179, - 'wind_gust_speed': 14.38, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 52.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.13, - 'temperature': 26.9, - 'uv_index': 2, - 'wind_bearing': 170, - 'wind_gust_speed': 15.2, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.04, - 'temperature': 28.0, - 'uv_index': 4, - 'wind_bearing': 155, - 'wind_gust_speed': 15.85, - 'wind_speed': 4.76, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 24.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T01:00:00Z', - 'dew_point': 22.6, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.52, - 'temperature': 29.2, - 'uv_index': 6, - 'wind_bearing': 110, - 'wind_gust_speed': 16.27, - 'wind_speed': 6.81, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 16.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.01, - 'temperature': 30.2, - 'uv_index': 8, - 'wind_bearing': 30, - 'wind_gust_speed': 16.55, - 'wind_speed': 6.86, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T03:00:00Z', - 'dew_point': 22.0, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.45, - 'temperature': 31.1, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.52, - 'wind_speed': 6.8, - }), - dict({ - 'apparent_temperature': 34.7, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T04:00:00Z', - 'dew_point': 21.9, - 'humidity': 57, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 31.5, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.08, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.39, - 'temperature': 31.8, - 'uv_index': 6, - 'wind_bearing': 20, - 'wind_gust_speed': 15.48, - 'wind_speed': 6.45, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T06:00:00Z', - 'dew_point': 21.7, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.11, - 'temperature': 31.4, - 'uv_index': 4, - 'wind_bearing': 26, - 'wind_gust_speed': 15.08, - 'wind_speed': 6.43, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 7.000000000000001, - 'condition': 'sunny', - 'datetime': '2023-09-16T07:00:00Z', - 'dew_point': 21.7, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.15, - 'temperature': 30.7, - 'uv_index': 2, - 'wind_bearing': 39, - 'wind_gust_speed': 14.88, - 'wind_speed': 6.61, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.41, - 'temperature': 29.6, - 'uv_index': 0, - 'wind_bearing': 72, - 'wind_gust_speed': 14.82, - 'wind_speed': 6.95, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T09:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.75, - 'temperature': 28.5, - 'uv_index': 0, - 'wind_bearing': 116, - 'wind_gust_speed': 15.13, - 'wind_speed': 7.45, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 13.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T10:00:00Z', - 'dew_point': 22.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.13, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 16.09, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.47, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.37, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 29.3, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T12:00:00Z', - 'dew_point': 22.9, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.6, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 18.29, - 'wind_speed': 9.21, - }), - dict({ - 'apparent_temperature': 28.7, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T13:00:00Z', - 'dew_point': 23.0, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 25.7, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 18.49, - 'wind_speed': 8.96, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T14:00:00Z', - 'dew_point': 22.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.47, - 'wind_speed': 8.45, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.79, - 'wind_speed': 8.1, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.1, - 'temperature': 24.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 19.81, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T17:00:00Z', - 'dew_point': 22.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.68, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 20.96, - 'wind_speed': 8.3, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T18:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.41, - 'wind_speed': 8.24, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T19:00:00Z', - 'dew_point': 22.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 20.42, - 'wind_speed': 7.62, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T20:00:00Z', - 'dew_point': 22.6, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 18.61, - 'wind_speed': 6.66, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T21:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 17.14, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 26.0, - 'uv_index': 1, - 'wind_bearing': 161, - 'wind_gust_speed': 16.78, - 'wind_speed': 5.5, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.51, - 'temperature': 27.5, - 'uv_index': 2, - 'wind_bearing': 165, - 'wind_gust_speed': 17.21, - 'wind_speed': 5.56, - }), - dict({ - 'apparent_temperature': 31.7, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T00:00:00Z', - 'dew_point': 22.8, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 28.5, - 'uv_index': 4, - 'wind_bearing': 174, - 'wind_gust_speed': 17.96, - 'wind_speed': 6.04, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T01:00:00Z', - 'dew_point': 22.7, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.4, - 'uv_index': 6, - 'wind_bearing': 192, - 'wind_gust_speed': 19.15, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 28.999999999999996, - 'condition': 'sunny', - 'datetime': '2023-09-17T02:00:00Z', - 'dew_point': 22.8, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 30.1, - 'uv_index': 7, - 'wind_bearing': 225, - 'wind_gust_speed': 20.89, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T03:00:00Z', - 'dew_point': 22.8, - 'humidity': 63, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1009.75, - 'temperature': 30.7, - 'uv_index': 8, - 'wind_bearing': 264, - 'wind_gust_speed': 22.67, - 'wind_speed': 10.27, - }), - dict({ - 'apparent_temperature': 33.9, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T04:00:00Z', - 'dew_point': 22.5, - 'humidity': 62, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1009.18, - 'temperature': 30.5, - 'uv_index': 7, - 'wind_bearing': 293, - 'wind_gust_speed': 23.93, - 'wind_speed': 10.82, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T05:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.6, - 'precipitation_probability': 12.0, - 'pressure': 1008.71, - 'temperature': 30.1, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 24.39, - 'wind_speed': 10.72, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 64, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.46, - 'temperature': 29.6, - 'uv_index': 3, - 'wind_bearing': 312, - 'wind_gust_speed': 23.9, - 'wind_speed': 10.28, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 47.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.53, - 'temperature': 28.9, - 'uv_index': 1, - 'wind_bearing': 312, - 'wind_gust_speed': 22.3, - 'wind_speed': 9.59, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 70, - 'precipitation': 0.6, - 'precipitation_probability': 15.0, - 'pressure': 1008.82, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 19.73, - 'wind_speed': 8.58, - }), - dict({ - 'apparent_temperature': 29.6, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 74, - 'precipitation': 0.5, - 'precipitation_probability': 15.0, - 'pressure': 1009.21, - 'temperature': 27.0, - 'uv_index': 0, - 'wind_bearing': 291, - 'wind_gust_speed': 16.49, - 'wind_speed': 7.34, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 78, - 'precipitation': 0.4, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1009.65, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 12.71, - 'wind_speed': 5.91, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T11:00:00Z', - 'dew_point': 21.9, - 'humidity': 82, - 'precipitation': 0.3, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.04, - 'temperature': 25.3, - 'uv_index': 0, - 'wind_bearing': 212, - 'wind_gust_speed': 9.16, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T12:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.3, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1010.24, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 192, - 'wind_gust_speed': 7.09, - 'wind_speed': 3.62, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T13:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1010.15, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 185, - 'wind_gust_speed': 7.2, - 'wind_speed': 3.27, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 44.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T14:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1009.87, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.22, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 49.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T15:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.2, - 'precipitation_probability': 31.0, - 'pressure': 1009.56, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 180, - 'wind_gust_speed': 9.21, - 'wind_speed': 3.3, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 94, - 'precipitation': 0.2, - 'precipitation_probability': 33.0, - 'pressure': 1009.29, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 9.0, - 'wind_speed': 3.46, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T17:00:00Z', - 'dew_point': 21.7, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 35.0, - 'pressure': 1009.09, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 186, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T18:00:00Z', - 'dew_point': 21.6, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 37.0, - 'pressure': 1009.01, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 7.99, - 'wind_speed': 4.07, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.07, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 258, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.55, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T20:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.23, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 8.77, - 'wind_speed': 5.17, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 38.0, - 'pressure': 1009.47, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 318, - 'wind_gust_speed': 9.69, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 30.0, - 'pressure': 1009.77, - 'temperature': 24.2, - 'uv_index': 1, - 'wind_bearing': 324, - 'wind_gust_speed': 10.88, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 83, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.09, - 'temperature': 25.1, - 'uv_index': 2, - 'wind_bearing': 329, - 'wind_gust_speed': 12.21, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T00:00:00Z', - 'dew_point': 21.9, - 'humidity': 80, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.33, - 'temperature': 25.7, - 'uv_index': 3, - 'wind_bearing': 332, - 'wind_gust_speed': 13.52, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T01:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1007.43, - 'temperature': 27.2, - 'uv_index': 5, - 'wind_bearing': 330, - 'wind_gust_speed': 11.36, - 'wind_speed': 11.36, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T02:00:00Z', - 'dew_point': 21.6, - 'humidity': 70, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1007.05, - 'temperature': 27.5, - 'uv_index': 6, - 'wind_bearing': 332, - 'wind_gust_speed': 12.06, - 'wind_speed': 12.06, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T03:00:00Z', - 'dew_point': 21.6, - 'humidity': 69, - 'precipitation': 0.5, - 'precipitation_probability': 10.0, - 'pressure': 1006.67, - 'temperature': 27.8, - 'uv_index': 6, - 'wind_bearing': 333, - 'wind_gust_speed': 12.81, - 'wind_speed': 12.81, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T04:00:00Z', - 'dew_point': 21.5, - 'humidity': 68, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1006.28, - 'temperature': 28.0, - 'uv_index': 5, - 'wind_bearing': 335, - 'wind_gust_speed': 13.68, - 'wind_speed': 13.68, - }), - dict({ - 'apparent_temperature': 30.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T05:00:00Z', - 'dew_point': 21.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1005.89, - 'temperature': 28.1, - 'uv_index': 4, - 'wind_bearing': 336, - 'wind_gust_speed': 14.61, - 'wind_speed': 14.61, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T06:00:00Z', - 'dew_point': 21.2, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 27.0, - 'pressure': 1005.67, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 338, - 'wind_gust_speed': 15.25, - 'wind_speed': 15.25, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T07:00:00Z', - 'dew_point': 21.3, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1005.74, - 'temperature': 27.4, - 'uv_index': 1, - 'wind_bearing': 339, - 'wind_gust_speed': 15.45, - 'wind_speed': 15.45, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T08:00:00Z', - 'dew_point': 21.4, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1005.98, - 'temperature': 26.7, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.38, - 'wind_speed': 15.38, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T09:00:00Z', - 'dew_point': 21.6, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.22, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.27, - 'wind_speed': 15.27, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T10:00:00Z', - 'dew_point': 21.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.44, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 15.09, - 'wind_speed': 15.09, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T11:00:00Z', - 'dew_point': 21.7, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.66, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 336, - 'wind_gust_speed': 14.88, - 'wind_speed': 14.88, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.79, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 333, - 'wind_gust_speed': 14.91, - 'wind_speed': 14.91, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.36, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 83, - 'wind_gust_speed': 4.58, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T14:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.96, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 4.74, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 24.5, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T15:00:00Z', - 'dew_point': 20.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.6, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 152, - 'wind_gust_speed': 5.63, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T16:00:00Z', - 'dew_point': 20.7, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 22.3, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 6.02, - 'wind_speed': 6.02, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T17:00:00Z', - 'dew_point': 20.4, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.2, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 6.15, - 'wind_speed': 6.15, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T18:00:00Z', - 'dew_point': 20.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.08, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 167, - 'wind_gust_speed': 6.48, - 'wind_speed': 6.48, - }), - dict({ - 'apparent_temperature': 23.2, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T19:00:00Z', - 'dew_point': 19.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.04, - 'temperature': 21.8, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 7.51, - 'wind_speed': 7.51, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 99.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T20:00:00Z', - 'dew_point': 19.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.05, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 8.73, - 'wind_speed': 8.73, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 98.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T21:00:00Z', - 'dew_point': 19.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.06, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 9.21, - 'wind_speed': 9.11, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 96.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T22:00:00Z', - 'dew_point': 19.7, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 171, - 'wind_gust_speed': 9.03, - 'wind_speed': 7.91, - }), - ]), - }) -# --- # name: test_hourly_forecast[get_forecasts] dict({ 'weather.home': dict({ diff --git a/tests/components/webhook/test_trigger.py b/tests/components/webhook/test_trigger.py index 37aae47dd14..2963db70ad4 100644 --- a/tests/components/webhook/test_trigger.py +++ b/tests/components/webhook/test_trigger.py @@ -17,7 +17,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @pytest.fixture(autouse=True) -async def setup_http(hass): +async def setup_http(hass: HomeAssistant) -> None: """Set up http.""" assert await async_setup_component(hass, "http", {}) assert await async_setup_component(hass, "webhook", {}) diff --git a/tests/components/webmin/conftest.py b/tests/components/webmin/conftest.py index c3ad43510d5..ae0d7b26b5a 100644 --- a/tests/components/webmin/conftest.py +++ b/tests/components/webmin/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Webmin integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.webmin.const import DEFAULT_PORT, DOMAIN from homeassistant.const import ( @@ -37,14 +37,21 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup -async def async_init_integration(hass: HomeAssistant) -> MockConfigEntry: +async def async_init_integration( + hass: HomeAssistant, with_mac_address: bool = True +) -> MockConfigEntry: """Set up the Webmin integration in Home Assistant.""" entry = MockConfigEntry(domain=DOMAIN, options=TEST_USER_INPUT, title="name") entry.add_to_hass(hass) with patch( "homeassistant.components.webmin.helpers.WebminInstance.update", - return_value=load_json_object_fixture("webmin_update.json", DOMAIN), + return_value=load_json_object_fixture( + "webmin_update.json" + if with_mac_address + else "webmin_update_without_mac.json", + DOMAIN, + ), ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/webmin/fixtures/webmin_update_without_mac.json b/tests/components/webmin/fixtures/webmin_update_without_mac.json new file mode 100644 index 00000000000..e79c54d0ff2 --- /dev/null +++ b/tests/components/webmin/fixtures/webmin_update_without_mac.json @@ -0,0 +1,108 @@ +{ + "disk_total": 18104905818112, + "io": [0, 4], + "load": [ + 1.29, + 1.36, + 1.37, + 3589, + "Intel(R) Core(TM) i7-5820K CPU @ 3.30GHz", + "GenuineIntel", + 15728640, + 12 + ], + "disk_free": 7749321486336, + "kernel": { "os": "Linux", "arch": "x86_64", "version": "6.6.18-1-lts" }, + "disk_fs": [ + { + "device": "UUID=00000000-80b6-0000-8a06-000000000000", + "dir": "/", + "ifree": 14927206, + "total": 248431161344, + "used_percent": 80, + "type": "ext4", + "itotal": 15482880, + "iused": 555674, + "free": 49060442112, + "used": 186676502528, + "iused_percent": 4 + }, + { + "total": 11903838912512, + "used_percent": 38, + "iused": 3542318, + "type": "ext4", + "itotal": 366198784, + "device": "/dev/md127", + "ifree": 362656466, + "dir": "/media/disk2", + "iused_percent": 1, + "free": 7028764823552, + "used": 4275077644288 + }, + { + "dir": "/media/disk1", + "ifree": 183130757, + "device": "UUID=00000000-2bb2-0000-896c-000000000000", + "type": "ext4", + "itotal": 183140352, + "iused": 9595, + "used_percent": 89, + "total": 5952635744256, + "used": 4981066997760, + "free": 671496220672, + "iused_percent": 1 + } + ], + "drivetemps": [ + { "temp": 49, "device": "/dev/sda", "failed": "", "errors": "" }, + { "failed": "", "errors": "", "device": "/dev/sdb", "temp": 49 }, + { "device": "/dev/sdc", "temp": 51, "failed": "", "errors": "" }, + { "failed": "", "errors": "", "device": "/dev/sdd", "temp": 51 }, + { "errors": "", "failed": "", "temp": 43, "device": "/dev/sde" }, + { "device": "/dev/sdf", "temp": 40, "errors": "", "failed": "" } + ], + "mem": [32766344, 28530480, 1953088, 1944384, 27845756, ""], + "disk_used": 9442821144576, + "cputemps": [ + { "temp": 51, "core": 0 }, + { "temp": 49, "core": 1 }, + { "core": 2, "temp": 59 }, + { "temp": 51, "core": 3 }, + { "temp": 50, "core": 4 }, + { "temp": 49, "core": 5 } + ], + "procs": 310, + "cpu": [0, 8, 92, 0, 0], + "cpufans": [ + { "rpm": 0, "fan": 1 }, + { "fan": 2, "rpm": 1371 }, + { "rpm": 0, "fan": 3 }, + { "rpm": 927, "fan": 4 }, + { "rpm": 801, "fan": 5 } + ], + "load_1m": 1.29, + "load_5m": 1.36, + "load_15m": 1.37, + "mem_total": 32766344, + "mem_free": 28530480, + "swap_total": 1953088, + "swap_free": 1944384, + "uptime": { "days": 11, "minutes": 1, "seconds": 28 }, + "active_interfaces": [ + { + "scope6": ["host"], + "address": "127.0.0.1", + "address6": ["::1"], + "name": "lo", + "broadcast": 0, + "up": 1, + "index": 0, + "fullname": "lo", + "netmask6": [128], + "netmask": "255.0.0.0", + "mtu": 65536, + "edit": 1 + } + ] +} diff --git a/tests/components/webmin/test_config_flow.py b/tests/components/webmin/test_config_flow.py index a9f5eafc5c7..477ad230622 100644 --- a/tests/components/webmin/test_config_flow.py +++ b/tests/components/webmin/test_config_flow.py @@ -33,15 +33,16 @@ async def user_flow(hass: HomeAssistant) -> str: return result["flow_id"] +@pytest.mark.parametrize( + "fixture", ["webmin_update_without_mac.json", "webmin_update.json"] +) async def test_form_user( - hass: HomeAssistant, - user_flow: str, - mock_setup_entry: AsyncMock, + hass: HomeAssistant, user_flow: str, mock_setup_entry: AsyncMock, fixture: str ) -> None: """Test a successful user initiated flow.""" with patch( "homeassistant.components.webmin.helpers.WebminInstance.update", - return_value=load_json_object_fixture("webmin_update.json", DOMAIN), + return_value=load_json_object_fixture(fixture, DOMAIN), ): result = await hass.config_entries.flow.async_configure( user_flow, TEST_USER_INPUT diff --git a/tests/components/webmin/test_diagnostics.py b/tests/components/webmin/test_diagnostics.py index 5f1df44f4a8..98d6544bc76 100644 --- a/tests/components/webmin/test_diagnostics.py +++ b/tests/components/webmin/test_diagnostics.py @@ -1,6 +1,7 @@ """Tests for the diagnostics data provided by the Webmin integration.""" from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -16,9 +17,6 @@ async def test_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" - assert ( - await get_diagnostics_for_config_entry( - hass, hass_client, await async_init_integration(hass) - ) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, await async_init_integration(hass) + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/webmin/test_init.py b/tests/components/webmin/test_init.py index 7b6282edfae..36894f00d5f 100644 --- a/tests/components/webmin/test_init.py +++ b/tests/components/webmin/test_init.py @@ -19,3 +19,11 @@ async def test_unload_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.NOT_LOADED assert not hass.data.get(DOMAIN) + + +async def test_entry_without_mac_address(hass: HomeAssistant) -> None: + """Test an entry without MAC address.""" + + entry = await async_init_integration(hass, False) + + assert entry.runtime_data.unique_id == entry.entry_id diff --git a/tests/components/webostv/conftest.py b/tests/components/webostv/conftest.py index 2b5d701f899..a30ae933cca 100644 --- a/tests/components/webostv/conftest.py +++ b/tests/components/webostv/conftest.py @@ -1,17 +1,14 @@ """Common fixtures and objects for the LG webOS integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.webostv.const import LIVE_TV_APP_ID -from homeassistant.core import HomeAssistant, ServiceCall from .const import CHANNEL_1, CHANNEL_2, CLIENT_KEY, FAKE_UUID, MOCK_APPS, MOCK_INPUTS -from tests.common import async_mock_service - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -22,12 +19,6 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(name="client") def client_fixture(): """Patch of client library for tests.""" diff --git a/tests/components/webostv/test_config_flow.py b/tests/components/webostv/test_config_flow.py index afda36d913f..406bb9c8804 100644 --- a/tests/components/webostv/test_config_flow.py +++ b/tests/components/webostv/test_config_flow.py @@ -295,7 +295,9 @@ async def test_form_abort_uuid_configured(hass: HomeAssistant, client) -> None: assert entry.data[CONF_HOST] == "new_host" -async def test_reauth_successful(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_reauth_successful( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test that the reauthorization is successful.""" entry = await setup_webostv(hass) assert client @@ -331,7 +333,7 @@ async def test_reauth_successful(hass: HomeAssistant, client, monkeypatch) -> No ], ) async def test_reauth_errors( - hass: HomeAssistant, client, monkeypatch, side_effect, reason + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch, side_effect, reason ) -> None: """Test reauthorization errors.""" entry = await setup_webostv(hass) diff --git a/tests/components/webostv/test_device_trigger.py b/tests/components/webostv/test_device_trigger.py index 29c75d4440b..41045969335 100644 --- a/tests/components/webostv/test_device_trigger.py +++ b/tests/components/webostv/test_device_trigger.py @@ -44,7 +44,7 @@ async def test_get_triggers( async def test_if_fires_on_turn_on_request( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, client, ) -> None: @@ -97,11 +97,11 @@ async def test_if_fires_on_turn_on_request( blocking=True, ) - assert len(calls) == 2 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 - assert calls[1].data["some"] == ENTITY_ID - assert calls[1].data["id"] == 0 + assert len(service_calls) == 3 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 + assert service_calls[2].data["some"] == ENTITY_ID + assert service_calls[2].data["id"] == 0 async def test_failure_scenarios( diff --git a/tests/components/webostv/test_diagnostics.py b/tests/components/webostv/test_diagnostics.py index 934b59a7b83..e2fbc43e187 100644 --- a/tests/components/webostv/test_diagnostics.py +++ b/tests/components/webostv/test_diagnostics.py @@ -58,5 +58,7 @@ async def test_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": entry.created_at.isoformat(), + "modified_at": entry.modified_at.isoformat(), }, } diff --git a/tests/components/webostv/test_init.py b/tests/components/webostv/test_init.py index a2961a81a4e..e2638c86f5e 100644 --- a/tests/components/webostv/test_init.py +++ b/tests/components/webostv/test_init.py @@ -3,6 +3,7 @@ from unittest.mock import Mock from aiowebostv import WebOsTvPairError +import pytest from homeassistant.components.webostv.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState @@ -12,7 +13,9 @@ from homeassistant.core import HomeAssistant from . import setup_webostv -async def test_reauth_setup_entry(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_reauth_setup_entry( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test reauth flow triggered by setup entry.""" monkeypatch.setattr(client, "is_connected", Mock(return_value=False)) monkeypatch.setattr(client, "connect", Mock(side_effect=WebOsTvPairError)) @@ -32,7 +35,9 @@ async def test_reauth_setup_entry(hass: HomeAssistant, client, monkeypatch) -> N assert flow["context"].get("entry_id") == entry.entry_id -async def test_key_update_setup_entry(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_key_update_setup_entry( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test key update from setup entry.""" monkeypatch.setattr(client, "client_key", "new_key") entry = await setup_webostv(hass) diff --git a/tests/components/webostv/test_media_player.py b/tests/components/webostv/test_media_player.py index 6c4aeb5e984..e4c02e680bd 100644 --- a/tests/components/webostv/test_media_player.py +++ b/tests/components/webostv/test_media_player.py @@ -144,7 +144,7 @@ async def test_media_play_pause(hass: HomeAssistant, client) -> None: ], ) async def test_media_next_previous_track( - hass: HomeAssistant, client, service, client_call, monkeypatch + hass: HomeAssistant, client, service, client_call, monkeypatch: pytest.MonkeyPatch ) -> None: """Test media next/previous track services.""" await setup_webostv(hass) @@ -270,7 +270,10 @@ async def test_select_sound_output(hass: HomeAssistant, client) -> None: async def test_device_info_startup_off( - hass: HomeAssistant, client, monkeypatch, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + client, + monkeypatch: pytest.MonkeyPatch, + device_registry: dr.DeviceRegistry, ) -> None: """Test device info when device is off at startup.""" monkeypatch.setattr(client, "system_info", None) @@ -291,7 +294,10 @@ async def test_device_info_startup_off( async def test_entity_attributes( - hass: HomeAssistant, client, monkeypatch, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + client, + monkeypatch: pytest.MonkeyPatch, + device_registry: dr.DeviceRegistry, ) -> None: """Test entity attributes.""" entry = await setup_webostv(hass) @@ -383,7 +389,7 @@ async def test_play_media(hass: HomeAssistant, client, media_id, ch_id) -> None: async def test_update_sources_live_tv_find( - hass: HomeAssistant, client, monkeypatch + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch ) -> None: """Test finding live TV app id in update sources.""" await setup_webostv(hass) @@ -466,7 +472,9 @@ async def test_update_sources_live_tv_find( assert len(sources) == 1 -async def test_client_disconnected(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_client_disconnected( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test error not raised when client is disconnected.""" await setup_webostv(hass) monkeypatch.setattr(client, "is_connected", Mock(return_value=False)) @@ -477,7 +485,10 @@ async def test_client_disconnected(hass: HomeAssistant, client, monkeypatch) -> async def test_control_error_handling( - hass: HomeAssistant, client, caplog: pytest.LogCaptureFixture, monkeypatch + hass: HomeAssistant, + client, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test control errors handling.""" await setup_webostv(hass) @@ -507,7 +518,9 @@ async def test_control_error_handling( ) -async def test_supported_features(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_supported_features( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test test supported features.""" monkeypatch.setattr(client, "sound_output", "lineout") await setup_webostv(hass) @@ -565,7 +578,7 @@ async def test_supported_features(hass: HomeAssistant, client, monkeypatch) -> N async def test_cached_supported_features( - hass: HomeAssistant, client, monkeypatch + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch ) -> None: """Test test supported features.""" monkeypatch.setattr(client, "is_on", False) @@ -672,7 +685,7 @@ async def test_cached_supported_features( async def test_supported_features_no_cache( - hass: HomeAssistant, client, monkeypatch + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch ) -> None: """Test supported features if device is off and no cache.""" monkeypatch.setattr(client, "is_on", False) @@ -716,7 +729,7 @@ async def test_get_image_http( client, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test get image via http.""" url = "http://something/valid_icon" @@ -742,7 +755,7 @@ async def test_get_image_http_error( hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test get image via http error.""" url = "http://something/icon_error" @@ -769,7 +782,7 @@ async def test_get_image_https( client, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test get image via http.""" url = "https://something/valid_icon_https" @@ -789,7 +802,9 @@ async def test_get_image_https( assert content == b"https_image" -async def test_reauth_reconnect(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_reauth_reconnect( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test reauth flow triggered by reconnect.""" entry = await setup_webostv(hass) monkeypatch.setattr(client, "is_connected", Mock(return_value=False)) @@ -814,7 +829,9 @@ async def test_reauth_reconnect(hass: HomeAssistant, client, monkeypatch) -> Non assert flow["context"].get("entry_id") == entry.entry_id -async def test_update_media_state(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_update_media_state( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test updating media state.""" await setup_webostv(hass) @@ -832,3 +849,7 @@ async def test_update_media_state(hass: HomeAssistant, client, monkeypatch) -> N monkeypatch.setattr(client, "media_state", data) await client.mock_state_update() assert hass.states.get(ENTITY_ID).state == MediaPlayerState.IDLE + + monkeypatch.setattr(client, "is_on", False) + await client.mock_state_update() + assert hass.states.get(ENTITY_ID).state == STATE_OFF diff --git a/tests/components/webostv/test_notify.py b/tests/components/webostv/test_notify.py index a1c37b9bf97..75c2e148310 100644 --- a/tests/components/webostv/test_notify.py +++ b/tests/components/webostv/test_notify.py @@ -72,7 +72,9 @@ async def test_notify(hass: HomeAssistant, client) -> None: ) -async def test_notify_not_connected(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_notify_not_connected( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test sending a message when client is not connected.""" await setup_webostv(hass) assert hass.services.has_service(NOTIFY_DOMAIN, TV_NAME) @@ -95,7 +97,10 @@ async def test_notify_not_connected(hass: HomeAssistant, client, monkeypatch) -> async def test_icon_not_found( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, client, monkeypatch + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + client, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test notify icon not found error.""" await setup_webostv(hass) @@ -130,7 +135,7 @@ async def test_connection_errors( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, client, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, side_effect, error, ) -> None: diff --git a/tests/components/webostv/test_trigger.py b/tests/components/webostv/test_trigger.py index 918666cf4bf..d7eeae28ea3 100644 --- a/tests/components/webostv/test_trigger.py +++ b/tests/components/webostv/test_trigger.py @@ -20,7 +20,7 @@ from tests.common import MockEntity, MockEntityPlatform async def test_webostv_turn_on_trigger_device_id( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, client, ) -> None: @@ -58,14 +58,14 @@ async def test_webostv_turn_on_trigger_device_id( blocking=True, ) - assert len(calls) == 1 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 with patch("homeassistant.config.load_yaml_dict", return_value={}): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) - calls.clear() + service_calls.clear() with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -75,11 +75,11 @@ async def test_webostv_turn_on_trigger_device_id( blocking=True, ) - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_webostv_turn_on_trigger_entity_id( - hass: HomeAssistant, calls: list[ServiceCall], client + hass: HomeAssistant, service_calls: list[ServiceCall], client ) -> None: """Test for turn_on triggers by entity_id firing.""" await setup_webostv(hass) @@ -113,9 +113,9 @@ async def test_webostv_turn_on_trigger_entity_id( blocking=True, ) - assert len(calls) == 1 - assert calls[0].data["some"] == ENTITY_ID - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == ENTITY_ID + assert service_calls[1].data["id"] == 0 async def test_wrong_trigger_platform_type( diff --git a/tests/components/websocket_api/test_auth.py b/tests/components/websocket_api/test_auth.py index 62298098adc..20a728cf3cd 100644 --- a/tests/components/websocket_api/test_auth.py +++ b/tests/components/websocket_api/test_auth.py @@ -26,7 +26,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture -def track_connected(hass): +def track_connected(hass: HomeAssistant) -> dict[str, list[int]]: """Track connected and disconnected events.""" connected_evt = [] diff --git a/tests/components/websocket_api/test_commands.py b/tests/components/websocket_api/test_commands.py index 276a383d9e9..772a8ee793e 100644 --- a/tests/components/websocket_api/test_commands.py +++ b/tests/components/websocket_api/test_commands.py @@ -24,6 +24,7 @@ from homeassistant.core import Context, HomeAssistant, State, SupportsResponse, from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.event import async_track_state_change_event from homeassistant.loader import async_get_integration from homeassistant.setup import async_setup_component from homeassistant.util.json import json_loads @@ -919,7 +920,7 @@ async def test_subscribe_entities_with_unserializable_state( class CannotSerializeMe: """Cannot serialize this.""" - def __init__(self): + def __init__(self) -> None: """Init cannot serialize this.""" hass.states.async_set("light.permitted", "off", {"color": "red"}) @@ -2814,3 +2815,54 @@ async def test_integration_descriptions( assert response["success"] assert response["result"] + + +async def test_subscribe_entities_chained_state_change( + hass: HomeAssistant, + websocket_client: MockHAClientWebSocket, + hass_admin_user: MockUser, +) -> None: + """Test chaining state changed events. + + Ensure the websocket sends the off state after + the on state. + """ + + @callback + def auto_off_listener(event): + hass.states.async_set("light.permitted", "off") + + async_track_state_change_event(hass, ["light.permitted"], auto_off_listener) + + await websocket_client.send_json({"id": 7, "type": "subscribe_entities"}) + + data = await websocket_client.receive_str() + msg = json_loads(data) + assert msg["id"] == 7 + assert msg["type"] == const.TYPE_RESULT + assert msg["success"] + + data = await websocket_client.receive_str() + msg = json_loads(data) + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == {"a": {}} + + hass.states.async_set("light.permitted", "on") + data = await websocket_client.receive_str() + msg = json_loads(data) + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "a": {"light.permitted": {"a": {}, "c": ANY, "lc": ANY, "s": "on"}} + } + data = await websocket_client.receive_str() + msg = json_loads(data) + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "c": {"light.permitted": {"+": {"c": ANY, "lc": ANY, "s": "off"}}} + } + + await websocket_client.close() + await hass.async_block_till_done() diff --git a/tests/components/websocket_api/test_connection.py b/tests/components/websocket_api/test_connection.py index d6c2765522e..343575e5b4a 100644 --- a/tests/components/websocket_api/test_connection.py +++ b/tests/components/websocket_api/test_connection.py @@ -2,7 +2,7 @@ import logging from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import Mock, patch from aiohttp.test_utils import make_mocked_request import pytest @@ -75,16 +75,17 @@ async def test_exception_handling( send_messages = [] user = MockUser() refresh_token = Mock() - current_request = AsyncMock() hass.data[DOMAIN] = {} - def get_extra_info(key: str) -> Any: + def get_extra_info(key: str) -> Any | None: if key == "sslcontext": return True if key == "peername": return ("127.0.0.42", 8123) + return None + mocked_transport = Mock() mocked_transport.get_extra_info = get_extra_info mocked_request = make_mocked_request( diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 794dd410661..11665da11b4 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -5,7 +5,7 @@ from datetime import timedelta from typing import Any, cast from unittest.mock import patch -from aiohttp import ServerDisconnectedError, WSMsgType, web +from aiohttp import WSMsgType, WSServerHandshakeError, web import pytest from homeassistant.components.websocket_api import ( @@ -374,7 +374,7 @@ async def test_prepare_fail( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(TimeoutError, web.WebSocketResponse.prepare), ), - pytest.raises(ServerDisconnectedError), + pytest.raises(WSServerHandshakeError), ): await hass_ws_client(hass) diff --git a/tests/components/websocket_api/test_sensor.py b/tests/components/websocket_api/test_sensor.py index 3af02dc8f2b..2e5f0c6c605 100644 --- a/tests/components/websocket_api/test_sensor.py +++ b/tests/components/websocket_api/test_sensor.py @@ -1,10 +1,10 @@ """Test cases for the API stream sensor.""" from homeassistant.auth.providers.homeassistant import HassAuthProvider -from homeassistant.bootstrap import async_setup_component from homeassistant.components.websocket_api.auth import TYPE_AUTH_REQUIRED from homeassistant.components.websocket_api.http import URL from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from .test_auth import test_auth_active_with_token diff --git a/tests/components/wemo/conftest.py b/tests/components/wemo/conftest.py index 1316c37b62b..64bd89f4793 100644 --- a/tests/components/wemo/conftest.py +++ b/tests/components/wemo/conftest.py @@ -1,13 +1,15 @@ """Fixtures for pywemo.""" +from collections.abc import Generator import contextlib -from unittest.mock import create_autospec, patch +from unittest.mock import MagicMock, create_autospec, patch import pytest import pywemo from homeassistant.components.wemo import CONF_DISCOVERY, CONF_STATIC from homeassistant.components.wemo.const import DOMAIN +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -22,13 +24,13 @@ MOCK_INSIGHT_STATE_THRESHOLD_POWER = 8.0 @pytest.fixture(name="pywemo_model") -def pywemo_model_fixture(): +def pywemo_model_fixture() -> str: """Fixture containing a pywemo class name used by pywemo_device_fixture.""" return "LightSwitch" @pytest.fixture(name="pywemo_registry", autouse=True) -async def async_pywemo_registry_fixture(): +def async_pywemo_registry_fixture() -> Generator[MagicMock]: """Fixture for SubscriptionRegistry instances.""" registry = create_autospec(pywemo.SubscriptionRegistry, instance=True) @@ -52,7 +54,9 @@ def pywemo_discovery_responder_fixture(): @contextlib.contextmanager -def create_pywemo_device(pywemo_registry, pywemo_model): +def create_pywemo_device( + pywemo_registry: MagicMock, pywemo_model: str +) -> pywemo.WeMoDevice: """Create a WeMoDevice instance.""" cls = getattr(pywemo, pywemo_model) device = create_autospec(cls, instance=True) @@ -90,14 +94,18 @@ def create_pywemo_device(pywemo_registry, pywemo_model): @pytest.fixture(name="pywemo_device") -def pywemo_device_fixture(pywemo_registry, pywemo_model): +def pywemo_device_fixture( + pywemo_registry: MagicMock, pywemo_model: str +) -> Generator[pywemo.WeMoDevice]: """Fixture for WeMoDevice instances.""" with create_pywemo_device(pywemo_registry, pywemo_model) as pywemo_device: yield pywemo_device @pytest.fixture(name="pywemo_dli_device") -def pywemo_dli_device_fixture(pywemo_registry, pywemo_model): +def pywemo_dli_device_fixture( + pywemo_registry: MagicMock, pywemo_model: str +) -> Generator[pywemo.WeMoDevice]: """Fixture for Digital Loggers emulated instances.""" with create_pywemo_device(pywemo_registry, pywemo_model) as pywemo_dli_device: pywemo_dli_device.model_name = "DLI emulated Belkin Socket" @@ -106,12 +114,14 @@ def pywemo_dli_device_fixture(pywemo_registry, pywemo_model): @pytest.fixture(name="wemo_entity_suffix") -def wemo_entity_suffix_fixture(): +def wemo_entity_suffix_fixture() -> str: """Fixture to select a specific entity for wemo_entity.""" return "" -async def async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix): +async def async_create_wemo_entity( + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity_suffix: str +) -> er.RegistryEntry | None: """Create a hass entity for a wemo device.""" assert await async_setup_component( hass, @@ -134,12 +144,16 @@ async def async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix): @pytest.fixture(name="wemo_entity") -async def async_wemo_entity_fixture(hass, pywemo_device, wemo_entity_suffix): +async def async_wemo_entity_fixture( + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity_suffix: str +) -> er.RegistryEntry | None: """Fixture for a Wemo entity in hass.""" return await async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix) @pytest.fixture(name="wemo_dli_entity") -async def async_wemo_dli_entity_fixture(hass, pywemo_dli_device, wemo_entity_suffix): +async def async_wemo_dli_entity_fixture( + hass: HomeAssistant, pywemo_dli_device: pywemo.WeMoDevice, wemo_entity_suffix: str +) -> er.RegistryEntry | None: """Fixture for a Wemo entity in hass.""" return await async_create_wemo_entity(hass, pywemo_dli_device, wemo_entity_suffix) diff --git a/tests/components/whirlpool/conftest.py b/tests/components/whirlpool/conftest.py index a5926f55a94..50620b20b8b 100644 --- a/tests/components/whirlpool/conftest.py +++ b/tests/components/whirlpool/conftest.py @@ -145,6 +145,8 @@ def side_effect_function(*args, **kwargs): if args[0] == "WashCavity_OpStatusBulkDispense1Level": return "3" + return None + def get_sensor_mock(said): """Get a mock of a sensor.""" diff --git a/tests/components/whirlpool/test_climate.py b/tests/components/whirlpool/test_climate.py index 18016bd9c67..cdae28f4432 100644 --- a/tests/components/whirlpool/test_climate.py +++ b/tests/components/whirlpool/test_climate.py @@ -264,10 +264,10 @@ async def test_service_calls( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 16}, blocking=True, ) - mock_instance.set_temp.assert_called_once_with(15) + mock_instance.set_temp.assert_called_once_with(16) mock_instance.set_mode.reset_mock() await hass.services.async_call( diff --git a/tests/components/whirlpool/test_diagnostics.py b/tests/components/whirlpool/test_diagnostics.py index 6cfc1b76e38..2a0b2e6fd18 100644 --- a/tests/components/whirlpool/test_diagnostics.py +++ b/tests/components/whirlpool/test_diagnostics.py @@ -29,4 +29,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) - assert result == snapshot(exclude=props("entry_id")) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/whirlpool/test_sensor.py b/tests/components/whirlpool/test_sensor.py index 6af88c8a9f3..548025e29bd 100644 --- a/tests/components/whirlpool/test_sensor.py +++ b/tests/components/whirlpool/test_sensor.py @@ -42,6 +42,8 @@ def side_effect_function_open_door(*args, **kwargs): if args[0] == "WashCavity_OpStatusBulkDispense1Level": return "3" + return None + async def test_dryer_sensor_values( hass: HomeAssistant, diff --git a/tests/components/whois/conftest.py b/tests/components/whois/conftest.py index 5fe420abb92..4bb18581c1a 100644 --- a/tests/components/whois/conftest.py +++ b/tests/components/whois/conftest.py @@ -2,11 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from datetime import datetime +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.whois.const import DOMAIN from homeassistant.const import CONF_DOMAIN @@ -74,7 +75,7 @@ def mock_whois_missing_some_attrs() -> Generator[Mock]: class LimitedWhoisMock: """A limited mock of whois_query.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock only attributes the library always sets being available.""" self.creation_date = datetime(2019, 1, 1, 0, 0, 0) self.dnssec = True diff --git a/tests/components/whois/snapshots/test_sensor.ambr b/tests/components/whois/snapshots/test_sensor.ambr index 61762c36e59..4310bc77ebf 100644 --- a/tests/components/whois/snapshots/test_sensor.ambr +++ b/tests/components/whois/snapshots/test_sensor.ambr @@ -67,8 +67,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -144,8 +146,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -225,8 +229,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -302,8 +308,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -379,8 +387,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -455,8 +465,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -531,8 +543,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -607,8 +621,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -683,8 +699,10 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/wiffi/conftest.py b/tests/components/wiffi/conftest.py index 5f16d676e81..2383906291f 100644 --- a/tests/components/wiffi/conftest.py +++ b/tests/components/wiffi/conftest.py @@ -1,9 +1,9 @@ """Configuration for Wiffi tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/wled/conftest.py b/tests/components/wled/conftest.py index 0d839fc8666..301729843a2 100644 --- a/tests/components/wled/conftest.py +++ b/tests/components/wled/conftest.py @@ -1,11 +1,11 @@ """Fixtures for WLED integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator -from wled import Device as WLEDDevice +from wled import Device as WLEDDevice, Releases from homeassistant.components.wled.const import DOMAIN from homeassistant.const import CONF_HOST @@ -51,7 +51,24 @@ def device_fixture() -> str: @pytest.fixture -def mock_wled(device_fixture: str) -> Generator[MagicMock]: +def mock_wled_releases() -> Generator[MagicMock]: + """Return a mocked WLEDReleases client.""" + with patch( + "homeassistant.components.wled.coordinator.WLEDReleases", autospec=True + ) as wled_releases_mock: + wled_releases = wled_releases_mock.return_value + wled_releases.releases.return_value = Releases( + beta="1.0.0b5", + stable="0.99.0", + ) + + yield wled_releases + + +@pytest.fixture +def mock_wled( + device_fixture: str, mock_wled_releases: MagicMock +) -> Generator[MagicMock]: """Return a mocked WLED client.""" with ( patch( @@ -60,11 +77,12 @@ def mock_wled(device_fixture: str) -> Generator[MagicMock]: patch("homeassistant.components.wled.config_flow.WLED", new=wled_mock), ): wled = wled_mock.return_value - wled.update.return_value = WLEDDevice( + wled.update.return_value = WLEDDevice.from_dict( load_json_object_fixture(f"{device_fixture}.json", DOMAIN) ) wled.connected = False wled.host = "127.0.0.1" + yield wled diff --git a/tests/components/wled/fixtures/cct.json b/tests/components/wled/fixtures/cct.json new file mode 100644 index 00000000000..da36f8a5f69 --- /dev/null +++ b/tests/components/wled/fixtures/cct.json @@ -0,0 +1,383 @@ +{ + "state": { + "on": true, + "bri": 255, + "transition": 7, + "ps": 2, + "pl": -1, + "nl": { + "on": false, + "dur": 60, + "mode": 1, + "tbri": 0, + "rem": -1 + }, + "udpn": { + "send": false, + "recv": true, + "sgrp": 1, + "rgrp": 1 + }, + "lor": 0, + "mainseg": 0, + "seg": [ + { + "id": 0, + "start": 0, + "stop": 178, + "len": 178, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 53, + "set": 0, + "col": [ + [0, 0, 0, 255], + [0, 0, 0, 0], + [0, 0, 0, 0] + ], + "fx": 0, + "sx": 128, + "ix": 128, + "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, + "sel": true, + "rev": false, + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 + } + ] + }, + "info": { + "ver": "0.15.0-b3", + "vid": 2405180, + "cn": "Kōsen", + "release": "ESP32", + "leds": { + "count": 178, + "pwr": 0, + "fps": 0, + "maxpwr": 0, + "maxseg": 32, + "bootps": 1, + "seglc": [7], + "lc": 7, + "rgbw": true, + "wv": 2, + "cct": 4 + }, + "str": false, + "name": "WLED CCT light", + "udpport": 21324, + "simplifiedui": false, + "live": false, + "liveseg": -1, + "lm": "", + "lip": "", + "ws": 1, + "fxcount": 187, + "palcount": 75, + "cpalcount": 4, + "maps": [ + { + "id": 0 + } + ], + "wifi": { + "bssid": "AA:AA:AA:AA:AA:BB", + "rssi": -44, + "signal": 100, + "channel": 11 + }, + "fs": { + "u": 20, + "t": 983, + "pmt": 1721752272 + }, + "ndc": 1, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "clock": 240, + "flash": 4, + "lwip": 0, + "freeheap": 164804, + "uptime": 79769, + "time": "2024-7-24, 14:34:00", + "opt": 79, + "brand": "WLED", + "product": "FOSS", + "mac": "aabbccddeeff", + "ip": "127.0.0.1" + }, + "effects": [ + "Solid", + "Blink", + "Breathe", + "Wipe", + "Wipe Random", + "Random Colors", + "Sweep", + "Dynamic", + "Colorloop", + "Rainbow", + "Scan", + "Scan Dual", + "Fade", + "Theater", + "Theater Rainbow", + "Running", + "Saw", + "Twinkle", + "Dissolve", + "Dissolve Rnd", + "Sparkle", + "Sparkle Dark", + "Sparkle+", + "Strobe", + "Strobe Rainbow", + "Strobe Mega", + "Blink Rainbow", + "Android", + "Chase", + "Chase Random", + "Chase Rainbow", + "Chase Flash", + "Chase Flash Rnd", + "Rainbow Runner", + "Colorful", + "Traffic Light", + "Sweep Random", + "Chase 2", + "Aurora", + "Stream", + "Scanner", + "Lighthouse", + "Fireworks", + "Rain", + "Tetrix", + "Fire Flicker", + "Gradient", + "Loading", + "Rolling Balls", + "Fairy", + "Two Dots", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", + "Tri Wipe", + "Tri Fade", + "Lightning", + "ICU", + "Multi Comet", + "Scanner Dual", + "Stream 2", + "Oscillate", + "Pride 2015", + "Juggle", + "Palette", + "Fire 2012", + "Colorwaves", + "Bpm", + "Fill Noise", + "Noise 1", + "Noise 2", + "Noise 3", + "Noise 4", + "Colortwinkles", + "Lake", + "Meteor", + "Meteor Smooth", + "Railway", + "Ripple", + "Twinklefox", + "Twinklecat", + "Halloween Eyes", + "Solid Pattern", + "Solid Pattern Tri", + "Spots", + "Spots Fade", + "Glitter", + "Candle", + "Fireworks Starburst", + "Fireworks 1D", + "Bouncing Balls", + "Sinelon", + "Sinelon Dual", + "Sinelon Rainbow", + "Popcorn", + "Drip", + "Plasma", + "Percent", + "Ripple Rainbow", + "Heartbeat", + "Pacifica", + "Candle Multi", + "Solid Glitter", + "Sunrise", + "Phased", + "Twinkleup", + "Noise Pal", + "Sine", + "Phased Noise", + "Flow", + "Chunchun", + "Dancing Shadows", + "Washing Machine", + "Rotozoomer", + "Blends", + "TV Simulator", + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" + ], + "palettes": [ + "Default", + "* Random Cycle", + "* Color 1", + "* Colors 1&2", + "* Color Gradient", + "* Colors Only", + "Party", + "Cloud", + "Lava", + "Ocean", + "Forest", + "Rainbow", + "Rainbow Bands", + "Sunset", + "Rivendell", + "Breeze", + "Red & Blue", + "Yellowout", + "Analogous", + "Splash", + "Pastel", + "Sunset 2", + "Beach", + "Vintage", + "Departure", + "Landscape", + "Beech", + "Sherbet", + "Hult", + "Hult 64", + "Drywet", + "Jul", + "Grintage", + "Rewhi", + "Tertiary", + "Fire", + "Icefire", + "Cyane", + "Light Pink", + "Autumn", + "Magenta", + "Magred", + "Yelmag", + "Yelblu", + "Orange & Teal", + "Tiamat", + "April Night", + "Orangery", + "C9", + "Sakura", + "Aurora", + "Atlantica", + "C9 2", + "C9 New", + "Temperature", + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" + ] +} diff --git a/tests/components/wled/fixtures/rgb.json b/tests/components/wled/fixtures/rgb.json index 21f9b005b72..50a82eb792e 100644 --- a/tests/components/wled/fixtures/rgb.json +++ b/tests/components/wled/fixtures/rgb.json @@ -1,28 +1,41 @@ { "state": { "on": true, - "bri": 127, + "bri": 128, "transition": 7, "ps": -1, "pl": -1, "nl": { "on": false, "dur": 60, - "fade": true, - "tbri": 0 + "mode": 1, + "tbri": 0, + "rem": -1 }, "udpn": { "send": false, - "recv": true + "recv": true, + "sgrp": 1, + "rgrp": 1 }, + "lor": 0, + "mainseg": 1, "seg": [ { "id": 0, "start": 0, - "stop": 19, - "len": 20, + "stop": 15, + "len": 15, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 127, + "set": 0, "col": [ - [255, 159, 0], + [127, 172, 255], [0, 0, 0], [0, 0, 0] ], @@ -30,62 +43,106 @@ "sx": 32, "ix": 128, "pal": 0, - "sel": true, + "c1": 128, + "c2": 128, + "c3": 16, + "sel": false, "rev": false, - "cln": -1 + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 }, { "id": 1, - "start": 20, + "start": 15, "stop": 30, - "len": 10, + "len": 15, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 127, + "set": 0, "col": [ - [0, 255, 123], + [255, 170, 0], [0, 0, 0], [0, 0, 0] ], - "fx": 1, + "fx": 3, "sx": 16, "ix": 64, "pal": 1, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": true, - "cln": -1 + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 } ] }, "info": { - "ver": "0.8.5", - "version_latest_stable": "0.12.0", - "version_latest_beta": "0.13.0b1", - "vid": 1909122, + "ver": "0.14.4", + "vid": 2405180, "leds": { "count": 30, - "rgbw": false, - "pin": [2], - "pwr": 470, + "pwr": 515, + "fps": 5, "maxpwr": 850, - "maxseg": 10 + "maxseg": 32, + "seglc": [1, 1], + "lc": 1, + "rgbw": false, + "wv": 0, + "cct": 0 }, + "str": false, "name": "WLED RGB Light", "udpport": 21324, "live": false, - "fxcount": 81, - "palcount": 50, + "liveseg": -1, + "lm": "", + "lip": "", + "ws": -1, + "fxcount": 187, + "palcount": 71, + "cpalcount": 0, + "maps": [ + { + "id": 0 + } + ], "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -62, - "signal": 76, + "rssi": -43, + "signal": 100, "channel": 11 }, - "arch": "esp8266", - "core": "2_4_2", - "freeheap": 14600, - "uptime": 32, - "opt": 119, + "fs": { + "u": 12, + "t": 983, + "pmt": 1718827787 + }, + "ndc": 1, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "lwip": 0, + "freeheap": 198384, + "uptime": 966, + "time": "2024-6-19, 20:10:38", + "opt": 79, "brand": "WLED", - "product": "DIY light", - "btype": "bin", + "product": "FOSS", "mac": "aabbccddeeff", "ip": "127.0.0.1" }, @@ -101,21 +158,21 @@ "Colorloop", "Rainbow", "Scan", - "Dual Scan", + "Scan Dual", "Fade", - "Chase", - "Chase Rainbow", + "Theater", + "Theater Rainbow", "Running", "Saw", "Twinkle", "Dissolve", "Dissolve Rnd", "Sparkle", - "Dark Sparkle", + "Sparkle Dark", "Sparkle+", "Strobe", "Strobe Rainbow", - "Mega Strobe", + "Strobe Mega", "Blink Rainbow", "Android", "Chase", @@ -127,30 +184,30 @@ "Colorful", "Traffic Light", "Sweep Random", - "Running 2", - "Red & Blue", + "Chase 2", + "Aurora", "Stream", "Scanner", "Lighthouse", "Fireworks", "Rain", - "Merry Christmas", + "Tetrix", "Fire Flicker", "Gradient", "Loading", - "In Out", - "In In", - "Out Out", - "Out In", - "Circus", - "Halloween", - "Tri Chase", + "Rolling Balls", + "Fairy", + "Two Dots", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", "Tri Wipe", "Tri Fade", "Lightning", "ICU", "Multi Comet", - "Dual Scanner", + "Scanner Dual", "Stream 2", "Oscillate", "Pride 2015", @@ -158,27 +215,133 @@ "Palette", "Fire 2012", "Colorwaves", - "BPM", + "Bpm", "Fill Noise", "Noise 1", "Noise 2", "Noise 3", "Noise 4", - "Colortwinkle", + "Colortwinkles", "Lake", "Meteor", - "Smooth Meteor", + "Meteor Smooth", "Railway", "Ripple", - "Twinklefox" + "Twinklefox", + "Twinklecat", + "Halloween Eyes", + "Solid Pattern", + "Solid Pattern Tri", + "Spots", + "Spots Fade", + "Glitter", + "Candle", + "Fireworks Starburst", + "Fireworks 1D", + "Bouncing Balls", + "Sinelon", + "Sinelon Dual", + "Sinelon Rainbow", + "Popcorn", + "Drip", + "Plasma", + "Percent", + "Ripple Rainbow", + "Heartbeat", + "Pacifica", + "Candle Multi", + "Solid Glitter", + "Sunrise", + "Phased", + "Twinkleup", + "Noise Pal", + "Sine", + "Phased Noise", + "Flow", + "Chunchun", + "Dancing Shadows", + "Washing Machine", + "RSVD", + "Blends", + "TV Simulator", + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" ], "palettes": [ "Default", - "Random Cycle", - "Primary Color", - "Based on Primary", - "Set Colors", - "Based on Set", + "* Random Cycle", + "* Color 1", + "* Colors 1&2", + "* Color Gradient", + "* Colors Only", "Party", "Cloud", "Lava", @@ -195,11 +358,11 @@ "Splash", "Pastel", "Sunset 2", - "Beech", + "Beach", "Vintage", "Departure", "Landscape", - "Beach", + "Beech", "Sherbet", "Hult", "Hult 64", @@ -222,6 +385,27 @@ "April Night", "Orangery", "C9", - "Sakura" + "Sakura", + "Aurora", + "Atlantica", + "C9 2", + "C9 New", + "Temperature", + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" ] } diff --git a/tests/components/wled/fixtures/rgb_no_update.json b/tests/components/wled/fixtures/rgb_no_update.json deleted file mode 100644 index c8aa902cc95..00000000000 --- a/tests/components/wled/fixtures/rgb_no_update.json +++ /dev/null @@ -1,227 +0,0 @@ -{ - "state": { - "on": true, - "bri": 127, - "transition": 7, - "ps": -1, - "pl": -1, - "nl": { - "on": false, - "dur": 60, - "fade": true, - "tbri": 0 - }, - "udpn": { - "send": false, - "recv": true - }, - "seg": [ - { - "id": 0, - "start": 0, - "stop": 19, - "len": 20, - "col": [ - [255, 159, 0], - [0, 0, 0], - [0, 0, 0] - ], - "fx": 0, - "sx": 32, - "ix": 128, - "pal": 0, - "sel": true, - "rev": false, - "cln": -1 - }, - { - "id": 1, - "start": 20, - "stop": 30, - "len": 10, - "col": [ - [0, 255, 123], - [0, 0, 0], - [0, 0, 0] - ], - "fx": 1, - "sx": 16, - "ix": 64, - "pal": 1, - "sel": true, - "rev": true, - "cln": -1 - } - ] - }, - "info": { - "ver": null, - "version_latest_stable": null, - "version_latest_beta": null, - "vid": 1909122, - "leds": { - "count": 30, - "rgbw": false, - "pin": [2], - "pwr": 470, - "maxpwr": 850, - "maxseg": 10 - }, - "name": "WLED RGB Light", - "udpport": 21324, - "live": false, - "fxcount": 81, - "palcount": 50, - "wifi": { - "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -62, - "signal": 76, - "channel": 11 - }, - "arch": "esp8266", - "core": "2_4_2", - "freeheap": 14600, - "uptime": 32, - "opt": 119, - "brand": "WLED", - "product": "DIY light", - "btype": "bin", - "mac": "aabbccddeeff", - "ip": "127.0.0.1" - }, - "effects": [ - "Solid", - "Blink", - "Breathe", - "Wipe", - "Wipe Random", - "Random Colors", - "Sweep", - "Dynamic", - "Colorloop", - "Rainbow", - "Scan", - "Dual Scan", - "Fade", - "Chase", - "Chase Rainbow", - "Running", - "Saw", - "Twinkle", - "Dissolve", - "Dissolve Rnd", - "Sparkle", - "Dark Sparkle", - "Sparkle+", - "Strobe", - "Strobe Rainbow", - "Mega Strobe", - "Blink Rainbow", - "Android", - "Chase", - "Chase Random", - "Chase Rainbow", - "Chase Flash", - "Chase Flash Rnd", - "Rainbow Runner", - "Colorful", - "Traffic Light", - "Sweep Random", - "Running 2", - "Red & Blue", - "Stream", - "Scanner", - "Lighthouse", - "Fireworks", - "Rain", - "Merry Christmas", - "Fire Flicker", - "Gradient", - "Loading", - "In Out", - "In In", - "Out Out", - "Out In", - "Circus", - "Halloween", - "Tri Chase", - "Tri Wipe", - "Tri Fade", - "Lightning", - "ICU", - "Multi Comet", - "Dual Scanner", - "Stream 2", - "Oscillate", - "Pride 2015", - "Juggle", - "Palette", - "Fire 2012", - "Colorwaves", - "BPM", - "Fill Noise", - "Noise 1", - "Noise 2", - "Noise 3", - "Noise 4", - "Colortwinkle", - "Lake", - "Meteor", - "Smooth Meteor", - "Railway", - "Ripple", - "Twinklefox" - ], - "palettes": [ - "Default", - "Random Cycle", - "Primary Color", - "Based on Primary", - "Set Colors", - "Based on Set", - "Party", - "Cloud", - "Lava", - "Ocean", - "Forest", - "Rainbow", - "Rainbow Bands", - "Sunset", - "Rivendell", - "Breeze", - "Red & Blue", - "Yellowout", - "Analogous", - "Splash", - "Pastel", - "Sunset 2", - "Beech", - "Vintage", - "Departure", - "Landscape", - "Beach", - "Sherbet", - "Hult", - "Hult 64", - "Drywet", - "Jul", - "Grintage", - "Rewhi", - "Tertiary", - "Fire", - "Icefire", - "Cyane", - "Light Pink", - "Autumn", - "Magenta", - "Magred", - "Yelmag", - "Yelblu", - "Orange & Teal", - "Tiamat", - "April Night", - "Orangery", - "C9", - "Sakura" - ] -} diff --git a/tests/components/wled/fixtures/rgb_single_segment.json b/tests/components/wled/fixtures/rgb_single_segment.json index aa0b79e98f5..512ac2a00df 100644 --- a/tests/components/wled/fixtures/rgb_single_segment.json +++ b/tests/components/wled/fixtures/rgb_single_segment.json @@ -1,28 +1,41 @@ { "state": { "on": true, - "bri": 127, + "bri": 128, "transition": 7, "ps": -1, "pl": -1, "nl": { "on": false, "dur": 60, - "fade": true, - "tbri": 0 + "mode": 1, + "tbri": 0, + "rem": -1 }, "udpn": { "send": false, - "recv": true + "recv": true, + "sgrp": 1, + "rgrp": 1 }, + "lor": 0, + "mainseg": 0, "seg": [ { "id": 0, "start": 0, "stop": 30, - "len": 20, + "len": 30, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 127, + "set": 0, "col": [ - [255, 159, 0], + [127, 172, 255], [0, 0, 0], [0, 0, 0] ], @@ -30,44 +43,72 @@ "sx": 32, "ix": 128, "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "cln": -1 + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 } ] }, "info": { - "ver": "0.8.6b1", - "version_latest_stable": "0.8.5", - "version_latest_beta": "0.8.6b2", - "vid": 1909122, + "ver": "1.0.0b4", + "vid": 2405180, "leds": { "count": 30, - "rgbw": false, - "pin": [2], - "pwr": 470, + "pwr": 536, + "fps": 5, "maxpwr": 850, - "maxseg": 10 + "maxseg": 32, + "seglc": [1], + "lc": 1, + "rgbw": false, + "wv": 0, + "cct": 0 }, + "str": false, "name": "WLED RGB Light", "udpport": 21324, "live": false, - "fxcount": 81, - "palcount": 50, + "liveseg": -1, + "lm": "", + "lip": "", + "ws": -1, + "fxcount": 187, + "palcount": 71, + "cpalcount": 0, + "maps": [ + { + "id": 0 + } + ], "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -62, - "signal": 76, + "rssi": -44, + "signal": 100, "channel": 11 }, - "arch": "esp8266", - "core": "2_4_2", - "freeheap": 14600, - "uptime": 32, - "opt": 119, + "fs": { + "u": 12, + "t": 983, + "pmt": 0 + }, + "ndc": 1, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "lwip": 0, + "freeheap": 196960, + "uptime": 461, + "time": "1970-1-1, 00:07:41", + "opt": 79, "brand": "WLED", - "product": "DIY light", - "btype": "bin", + "product": "FOSS", "mac": "aabbccddeeff", "ip": "127.0.0.1" }, @@ -83,21 +124,21 @@ "Colorloop", "Rainbow", "Scan", - "Dual Scan", + "Scan Dual", "Fade", - "Chase", - "Chase Rainbow", + "Theater", + "Theater Rainbow", "Running", "Saw", "Twinkle", "Dissolve", "Dissolve Rnd", "Sparkle", - "Dark Sparkle", + "Sparkle Dark", "Sparkle+", "Strobe", "Strobe Rainbow", - "Mega Strobe", + "Strobe Mega", "Blink Rainbow", "Android", "Chase", @@ -109,30 +150,30 @@ "Colorful", "Traffic Light", "Sweep Random", - "Running 2", - "Red & Blue", + "Chase 2", + "Aurora", "Stream", "Scanner", "Lighthouse", "Fireworks", "Rain", - "Merry Christmas", + "Tetrix", "Fire Flicker", "Gradient", "Loading", - "In Out", - "In In", - "Out Out", - "Out In", - "Circus", - "Halloween", - "Tri Chase", + "Rolling Balls", + "Fairy", + "Two Dots", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", "Tri Wipe", "Tri Fade", "Lightning", "ICU", "Multi Comet", - "Dual Scanner", + "Scanner Dual", "Stream 2", "Oscillate", "Pride 2015", @@ -140,27 +181,133 @@ "Palette", "Fire 2012", "Colorwaves", - "BPM", + "Bpm", "Fill Noise", "Noise 1", "Noise 2", "Noise 3", "Noise 4", - "Colortwinkle", + "Colortwinkles", "Lake", "Meteor", - "Smooth Meteor", + "Meteor Smooth", "Railway", "Ripple", - "Twinklefox" + "Twinklefox", + "Twinklecat", + "Halloween Eyes", + "Solid Pattern", + "Solid Pattern Tri", + "Spots", + "Spots Fade", + "Glitter", + "Candle", + "Fireworks Starburst", + "Fireworks 1D", + "Bouncing Balls", + "Sinelon", + "Sinelon Dual", + "Sinelon Rainbow", + "Popcorn", + "Drip", + "Plasma", + "Percent", + "Ripple Rainbow", + "Heartbeat", + "Pacifica", + "Candle Multi", + "Solid Glitter", + "Sunrise", + "Phased", + "Twinkleup", + "Noise Pal", + "Sine", + "Phased Noise", + "Flow", + "Chunchun", + "Dancing Shadows", + "Washing Machine", + "RSVD", + "Blends", + "TV Simulator", + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" ], "palettes": [ "Default", - "Random Cycle", - "Primary Color", - "Based on Primary", - "Set Colors", - "Based on Set", + "* Random Cycle", + "* Color 1", + "* Colors 1&2", + "* Color Gradient", + "* Colors Only", "Party", "Cloud", "Lava", @@ -177,11 +324,11 @@ "Splash", "Pastel", "Sunset 2", - "Beech", + "Beach", "Vintage", "Departure", "Landscape", - "Beach", + "Beech", "Sherbet", "Hult", "Hult 64", @@ -204,6 +351,27 @@ "April Night", "Orangery", "C9", - "Sakura" + "Sakura", + "Aurora", + "Atlantica", + "C9 2", + "C9 New", + "Temperature", + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" ] } diff --git a/tests/components/wled/fixtures/rgb_websocket.json b/tests/components/wled/fixtures/rgb_websocket.json index 4a0ed7b1ee5..f5a3e715654 100644 --- a/tests/components/wled/fixtures/rgb_websocket.json +++ b/tests/components/wled/fixtures/rgb_websocket.json @@ -1,26 +1,22 @@ { "state": { "on": true, - "bri": 255, + "bri": 128, "transition": 7, "ps": -1, "pl": -1, - "ccnf": { - "min": 1, - "max": 5, - "time": 12 - }, "nl": { "on": false, "dur": 60, - "fade": true, "mode": 1, "tbri": 0, "rem": -1 }, "udpn": { "send": false, - "recv": true + "recv": true, + "sgrp": 1, + "rgrp": 1 }, "lor": 0, "mainseg": 0, @@ -28,70 +24,89 @@ { "id": 0, "start": 0, - "stop": 13, - "len": 13, + "stop": 30, + "len": 30, "grp": 1, "spc": 0, + "of": 0, "on": true, + "frz": false, "bri": 255, + "cct": 127, + "set": 0, "col": [ - [255, 181, 218], + [127, 172, 255], [0, 0, 0], [0, 0, 0] ], "fx": 0, - "sx": 43, + "sx": 128, "ix": 128, - "pal": 2, + "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "mi": false + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 } ] }, "info": { - "ver": "0.12.0-b2", - "version_latest_stable": "0.11.0", - "version_latest_beta": "0.12.0-b2", - "vid": 2103220, + "ver": "0.99.0", + "vid": 2405180, "leds": { - "count": 13, + "count": 30, + "pwr": 536, + "fps": 5, + "maxpwr": 850, + "maxseg": 32, + "seglc": [1], + "lc": 1, "rgbw": false, - "wv": false, - "pin": [2], - "pwr": 266, - "fps": 2, - "maxpwr": 1000, - "maxseg": 12, - "seglock": false + "wv": 0, + "cct": 0 }, "str": false, "name": "WLED WebSocket", "udpport": 21324, "live": false, + "liveseg": -1, "lm": "", "lip": "", "ws": 0, - "fxcount": 118, - "palcount": 56, + "fxcount": 187, + "palcount": 71, + "cpalcount": 0, + "maps": [ + { + "id": 0 + } + ], "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -68, - "signal": 64, - "channel": 6 + "rssi": -44, + "signal": 100, + "channel": 11 }, "fs": { - "u": 40, - "t": 1024, - "pmt": 1623156685 + "u": 12, + "t": 983, + "pmt": 0 }, "ndc": 1, - "arch": "esp8266", - "core": "2_7_4_7", - "lwip": 1, - "freeheap": 22752, - "uptime": 258411, - "opt": 127, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "lwip": 0, + "freeheap": 196960, + "uptime": 461, + "time": "1970-1-1, 00:07:41", + "opt": 79, "brand": "WLED", "product": "FOSS", "mac": "aabbccddeeff", @@ -135,7 +150,7 @@ "Colorful", "Traffic Light", "Sweep Random", - "Running 2", + "Chase 2", "Aurora", "Stream", "Scanner", @@ -146,13 +161,13 @@ "Fire Flicker", "Gradient", "Loading", - "Police", - "Police All", + "Rolling Balls", + "Fairy", "Two Dots", - "Two Areas", - "Circus", - "Halloween", - "Tri Chase", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", "Tri Wipe", "Tri Fade", "Lightning", @@ -212,10 +227,79 @@ "Chunchun", "Dancing Shadows", "Washing Machine", - "Candy Cane", + "RSVD", "Blends", "TV Simulator", - "Dynamic Smooth" + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" ], "palettes": [ "Default", @@ -240,11 +324,11 @@ "Splash", "Pastel", "Sunset 2", - "Beech", + "Beach", "Vintage", "Departure", "Landscape", - "Beach", + "Beech", "Sherbet", "Hult", "Hult 64", @@ -273,6 +357,21 @@ "C9 2", "C9 New", "Temperature", - "Aurora 2" + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" ] } diff --git a/tests/components/wled/fixtures/rgbw.json b/tests/components/wled/fixtures/rgbw.json index 100b3936900..285842605ae 100644 --- a/tests/components/wled/fixtures/rgbw.json +++ b/tests/components/wled/fixtures/rgbw.json @@ -1,74 +1,115 @@ { "state": { "on": true, - "bri": 140, + "bri": 128, "transition": 7, - "ps": 1, - "pl": 3, + "ps": -1, + "pl": -1, "nl": { "on": false, "dur": 60, - "fade": true, - "tbri": 0 + "mode": 1, + "tbri": 0, + "rem": -1 }, "udpn": { "send": false, - "recv": true + "recv": true, + "sgrp": 1, + "rgrp": 1 }, + "lor": 0, + "mainseg": 0, "seg": [ { "id": 0, "start": 0, - "stop": 13, - "len": 13, + "stop": 30, + "len": 30, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 127, + "set": 0, "col": [ [255, 0, 0, 139], [0, 0, 0, 0], [0, 0, 0, 0] ], - "fx": 9, - "sx": 165, + "fx": 0, + "sx": 128, "ix": 128, "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "cln": -1 + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 } ] }, "info": { - "ver": "0.8.6b4", - "version_latest_stable": "0.8.6", - "version_latest_beta": "0.8.6b5", - "vid": 1910255, + "ver": "0.99.0b1", + "vid": 2405180, "leds": { - "count": 13, - "rgbw": true, - "pin": [2], - "pwr": 208, + "count": 30, + "pwr": 536, + "fps": 5, "maxpwr": 850, - "maxseg": 10 + "maxseg": 32, + "seglc": [3], + "lc": 3, + "rgbw": true, + "wv": 0, + "cct": 0 }, + "str": false, "name": "WLED RGBW Light", "udpport": 21324, "live": false, - "fxcount": 83, - "palcount": 50, + "liveseg": -1, + "lm": "", + "lip": "", + "ws": -1, + "fxcount": 187, + "palcount": 71, + "cpalcount": 0, + "maps": [ + { + "id": 0 + } + ], "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -62, - "signal": 76, + "rssi": -44, + "signal": 100, "channel": 11 }, - "arch": "esp8266", - "core": "2_5_2", - "freeheap": 20136, - "uptime": 5591, - "opt": 119, + "fs": { + "u": 12, + "t": 983, + "pmt": 0 + }, + "ndc": 1, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "lwip": 0, + "freeheap": 196960, + "uptime": 461, + "time": "1970-1-1, 00:07:41", + "opt": 79, "brand": "WLED", - "product": "DIY light", - "btype": "bin", - "mac": "aabbccddee11", + "product": "FOSS", + "mac": "aabbccddeeff", "ip": "127.0.0.1" }, "effects": [ @@ -83,21 +124,21 @@ "Colorloop", "Rainbow", "Scan", - "Dual Scan", + "Scan Dual", "Fade", - "Chase", - "Chase Rainbow", + "Theater", + "Theater Rainbow", "Running", "Saw", "Twinkle", "Dissolve", "Dissolve Rnd", "Sparkle", - "Dark Sparkle", + "Sparkle Dark", "Sparkle+", "Strobe", "Strobe Rainbow", - "Mega Strobe", + "Strobe Mega", "Blink Rainbow", "Android", "Chase", @@ -109,30 +150,30 @@ "Colorful", "Traffic Light", "Sweep Random", - "Running 2", - "Red & Blue", + "Chase 2", + "Aurora", "Stream", "Scanner", "Lighthouse", "Fireworks", "Rain", - "Merry Christmas", + "Tetrix", "Fire Flicker", "Gradient", "Loading", - "In Out", - "In In", - "Out Out", - "Out In", - "Circus", - "Halloween", - "Tri Chase", + "Rolling Balls", + "Fairy", + "Two Dots", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", "Tri Wipe", "Tri Fade", "Lightning", "ICU", "Multi Comet", - "Dual Scanner", + "Scanner Dual", "Stream 2", "Oscillate", "Pride 2015", @@ -140,7 +181,7 @@ "Palette", "Fire 2012", "Colorwaves", - "BPM", + "Bpm", "Fill Noise", "Noise 1", "Noise 2", @@ -149,20 +190,124 @@ "Colortwinkles", "Lake", "Meteor", - "Smooth Meteor", + "Meteor Smooth", "Railway", "Ripple", "Twinklefox", "Twinklecat", - "Halloween Eyes" + "Halloween Eyes", + "Solid Pattern", + "Solid Pattern Tri", + "Spots", + "Spots Fade", + "Glitter", + "Candle", + "Fireworks Starburst", + "Fireworks 1D", + "Bouncing Balls", + "Sinelon", + "Sinelon Dual", + "Sinelon Rainbow", + "Popcorn", + "Drip", + "Plasma", + "Percent", + "Ripple Rainbow", + "Heartbeat", + "Pacifica", + "Candle Multi", + "Solid Glitter", + "Sunrise", + "Phased", + "Twinkleup", + "Noise Pal", + "Sine", + "Phased Noise", + "Flow", + "Chunchun", + "Dancing Shadows", + "Washing Machine", + "RSVD", + "Blends", + "TV Simulator", + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" ], "palettes": [ "Default", - "Random Cycle", - "Primary Color", - "Based on Primary", - "Set Colors", - "Based on Set", + "* Random Cycle", + "* Color 1", + "* Colors 1&2", + "* Color Gradient", + "* Colors Only", "Party", "Cloud", "Lava", @@ -179,11 +324,11 @@ "Splash", "Pastel", "Sunset 2", - "Beech", + "Beach", "Vintage", "Departure", "Landscape", - "Beach", + "Beech", "Sherbet", "Hult", "Hult 64", @@ -206,36 +351,82 @@ "April Night", "Orangery", "C9", - "Sakura" + "Sakura", + "Aurora", + "Atlantica", + "C9 2", + "C9 New", + "Temperature", + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" ], "presets": { "0": {}, "1": { - "on": false, - "bri": 255, + "on": true, + "bri": 128, "transition": 7, "mainseg": 0, "seg": [ { "id": 0, "start": 0, - "stop": 13, + "stop": 131, "grp": 1, "spc": 0, + "of": 0, "on": true, + "frz": false, "bri": 255, + "cct": 127, + "set": 0, + "n": "", "col": [ - [97, 144, 255], + [40, 255, 3], [0, 0, 0], [0, 0, 0] ], - "fx": 9, - "sx": 183, - "ix": 255, - "pal": 1, + "fx": 0, + "sx": 128, + "ix": 128, + "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "mi": false + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 }, { "stop": 0 @@ -274,31 +465,56 @@ "n": "Preset 1" }, "2": { - "on": false, - "bri": 255, + "on": true, + "bri": 128, "transition": 7, "mainseg": 0, "seg": [ { "id": 0, "start": 0, - "stop": 13, + "stop": 131, "grp": 1, "spc": 0, + "of": 0, "on": true, + "frz": false, "bri": 255, + "cct": 127, + "set": 0, + "n": "", "col": [ - [97, 144, 255], + [51, 88, 255], [0, 0, 0], [0, 0, 0] ], - "fx": 9, - "sx": 183, - "ix": 255, - "pal": 1, + "fx": 0, + "sx": 128, + "ix": 128, + "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "mi": false + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 }, { "stop": 0 @@ -339,23 +555,25 @@ "3": { "playlist": { "ps": [1, 2], - "dur": [30, 30], + "dur": [300, 300], "transition": [7, 7], "repeat": 0, - "r": false, - "end": 0 + "end": 0, + "r": 0 }, + "on": true, "n": "Playlist 1" }, "4": { "playlist": { - "ps": [1, 2], - "dur": [30, 30], + "ps": [2, 0], + "dur": [300, 300], "transition": [7, 7], "repeat": 0, - "r": false, - "end": 0 + "end": 0, + "r": 0 }, + "on": true, "n": "Playlist 2" } } diff --git a/tests/components/wled/snapshots/test_button.ambr b/tests/components/wled/snapshots/test_button.ambr index b489bcc0a71..4e6260bc9bd 100644 --- a/tests/components/wled/snapshots/test_button.ambr +++ b/tests/components/wled/snapshots/test_button.ambr @@ -59,7 +59,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -71,12 +71,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/snapshots/test_diagnostics.ambr b/tests/components/wled/snapshots/test_diagnostics.ambr index 643e5fe4ad0..90732c02c36 100644 --- a/tests/components/wled/snapshots/test_diagnostics.ambr +++ b/tests/components/wled/snapshots/test_diagnostics.ambr @@ -5,22 +5,109 @@ '0': 'Solid', '1': 'Blink', '10': 'Scan', - '11': 'Dual Scan', + '100': 'Heartbeat', + '101': 'Pacifica', + '102': 'Candle Multi', + '103': 'Solid Glitter', + '104': 'Sunrise', + '105': 'Phased', + '106': 'Twinkleup', + '107': 'Noise Pal', + '108': 'Sine', + '109': 'Phased Noise', + '11': 'Scan Dual', + '110': 'Flow', + '111': 'Chunchun', + '112': 'Dancing Shadows', + '113': 'Washing Machine', + '114': 'RSVD', + '115': 'Blends', + '116': 'TV Simulator', + '117': 'Dynamic Smooth', + '118': 'Spaceships', + '119': 'Crazy Bees', '12': 'Fade', - '13': 'Chase', - '14': 'Chase Rainbow', + '120': 'Ghost Rider', + '121': 'Blobs', + '122': 'Scrolling Text', + '123': 'Drift Rose', + '124': 'Distortion Waves', + '125': 'Soap', + '126': 'Octopus', + '127': 'Waving Cell', + '128': 'Pixels', + '129': 'Pixelwave', + '13': 'Theater', + '130': 'Juggles', + '131': 'Matripix', + '132': 'Gravimeter', + '133': 'Plasmoid', + '134': 'Puddles', + '135': 'Midnoise', + '136': 'Noisemeter', + '137': 'Freqwave', + '138': 'Freqmatrix', + '139': 'GEQ', + '14': 'Theater Rainbow', + '140': 'Waterfall', + '141': 'Freqpixels', + '142': 'RSVD', + '143': 'Noisefire', + '144': 'Puddlepeak', + '145': 'Noisemove', + '146': 'Noise2D', + '147': 'Perlin Move', + '148': 'Ripple Peak', + '149': 'Firenoise', '15': 'Running', + '150': 'Squared Swirl', + '151': 'RSVD', + '152': 'DNA', + '153': 'Matrix', + '154': 'Metaballs', + '155': 'Freqmap', + '156': 'Gravcenter', + '157': 'Gravcentric', + '158': 'Gravfreq', + '159': 'DJ Light', '16': 'Saw', + '160': 'Funky Plank', + '161': 'RSVD', + '162': 'Pulser', + '163': 'Blurz', + '164': 'Drift', + '165': 'Waverly', + '166': 'Sun Radiation', + '167': 'Colored Bursts', + '168': 'Julia', + '169': 'RSVD', '17': 'Twinkle', + '170': 'RSVD', + '171': 'RSVD', + '172': 'Game Of Life', + '173': 'Tartan', + '174': 'Polar Lights', + '175': 'Swirl', + '176': 'Lissajous', + '177': 'Frizzles', + '178': 'Plasma Ball', + '179': 'Flow Stripe', '18': 'Dissolve', + '180': 'Hiphotic', + '181': 'Sindots', + '182': 'DNA Spiral', + '183': 'Black Hole', + '184': 'Wavesins', + '185': 'Rocktaves', + '186': 'Akemi', '19': 'Dissolve Rnd', '2': 'Breathe', '20': 'Sparkle', - '21': 'Dark Sparkle', + '21': 'Sparkle Dark', '22': 'Sparkle+', '23': 'Strobe', '24': 'Strobe Rainbow', - '25': 'Mega Strobe', + '25': 'Strobe Mega', '26': 'Blink Rainbow', '27': 'Android', '28': 'Chase', @@ -33,33 +120,33 @@ '34': 'Colorful', '35': 'Traffic Light', '36': 'Sweep Random', - '37': 'Running 2', - '38': 'Red & Blue', + '37': 'Chase 2', + '38': 'Aurora', '39': 'Stream', '4': 'Wipe Random', '40': 'Scanner', '41': 'Lighthouse', '42': 'Fireworks', '43': 'Rain', - '44': 'Merry Christmas', + '44': 'Tetrix', '45': 'Fire Flicker', '46': 'Gradient', '47': 'Loading', - '48': 'In Out', - '49': 'In In', + '48': 'Rolling Balls', + '49': 'Fairy', '5': 'Random Colors', - '50': 'Out Out', - '51': 'Out In', - '52': 'Circus', - '53': 'Halloween', - '54': 'Tri Chase', + '50': 'Two Dots', + '51': 'Fairytwinkle', + '52': 'Running Dual', + '53': 'RSVD', + '54': 'Chase 3', '55': 'Tri Wipe', '56': 'Tri Fade', '57': 'Lightning', '58': 'ICU', '59': 'Multi Comet', '6': 'Sweep', - '60': 'Dual Scanner', + '60': 'Scanner Dual', '61': 'Stream 2', '62': 'Oscillate', '63': 'Pride 2015', @@ -67,55 +154,82 @@ '65': 'Palette', '66': 'Fire 2012', '67': 'Colorwaves', - '68': 'BPM', + '68': 'Bpm', '69': 'Fill Noise', '7': 'Dynamic', '70': 'Noise 1', '71': 'Noise 2', '72': 'Noise 3', '73': 'Noise 4', - '74': 'Colortwinkle', + '74': 'Colortwinkles', '75': 'Lake', '76': 'Meteor', - '77': 'Smooth Meteor', + '77': 'Meteor Smooth', '78': 'Railway', '79': 'Ripple', '8': 'Colorloop', '80': 'Twinklefox', + '81': 'Twinklecat', + '82': 'Halloween Eyes', + '83': 'Solid Pattern', + '84': 'Solid Pattern Tri', + '85': 'Spots', + '86': 'Spots Fade', + '87': 'Glitter', + '88': 'Candle', + '89': 'Fireworks Starburst', '9': 'Rainbow', + '90': 'Fireworks 1D', + '91': 'Bouncing Balls', + '92': 'Sinelon', + '93': 'Sinelon Dual', + '94': 'Sinelon Rainbow', + '95': 'Popcorn', + '96': 'Drip', + '97': 'Plasma', + '98': 'Percent', + '99': 'Ripple Rainbow', }), 'info': dict({ - 'architecture': 'esp8266', - 'arduino_core_version': '2.4.2', + 'arch': 'esp32', 'brand': 'WLED', - 'build_type': 'bin', - 'effect_count': 81, - 'filesystem': None, - 'free_heap': 14600, + 'core': 'v3.3.6-16-gcc5440f6a2', + 'freeheap': 198384, + 'fs': dict({ + 'pmt': 1718827787.0, + 't': 983, + 'u': 12, + }), + 'fxcount': 187, 'ip': '127.0.0.1', 'leds': dict({ - '__type': "", - 'repr': 'Leds(cct=False, count=30, fps=None, light_capabilities=None, max_power=850, max_segments=10, power=470, rgbw=False, wv=True, segment_light_capabilities=None)', + 'count': 30, + 'fps': 5, + 'light_capabilities': 1, + 'max_power': 850, + 'max_segments': 32, + 'power': 515, + 'segment_light_capabilities': list([ + 1, + 1, + ]), }), + 'lip': '', 'live': False, - 'live_ip': 'Unknown', - 'live_mode': 'Unknown', - 'mac_address': 'aabbccddeeff', + 'lm': '', + 'mac': 'aabbccddeeff', 'name': 'WLED RGB Light', - 'pallet_count': 50, - 'product': 'DIY light', - 'udp_port': 21324, - 'uptime': 32, - 'version': '0.8.5', - 'version_id': 1909122, - 'version_latest_beta': '0.13.0b1', - 'version_latest_stable': '0.12.0', - 'websocket': None, + 'palcount': 71, + 'product': 'FOSS', + 'udpport': 21324, + 'uptime': 966, + 'ver': '0.14.4', + 'vid': 2405180, 'wifi': '**REDACTED**', }), 'palettes': dict({ '0': 'Default', - '1': 'Random Cycle', + '1': '* Random Cycle', '10': 'Forest', '11': 'Rainbow', '12': 'Rainbow Bands', @@ -126,18 +240,18 @@ '17': 'Yellowout', '18': 'Analogous', '19': 'Splash', - '2': 'Primary Color', + '2': '* Color 1', '20': 'Pastel', '21': 'Sunset 2', - '22': 'Beech', + '22': 'Beach', '23': 'Vintage', '24': 'Departure', '25': 'Landscape', - '26': 'Beach', + '26': 'Beech', '27': 'Sherbet', '28': 'Hult', '29': 'Hult 64', - '3': 'Based on Primary', + '3': '* Colors 1&2', '30': 'Drywet', '31': 'Jul', '32': 'Grintage', @@ -148,7 +262,7 @@ '37': 'Cyane', '38': 'Light Pink', '39': 'Autumn', - '4': 'Set Colors', + '4': '* Color Gradient', '40': 'Magenta', '41': 'Magred', '42': 'Yelmag', @@ -159,9 +273,30 @@ '47': 'Orangery', '48': 'C9', '49': 'Sakura', - '5': 'Based on Set', + '5': '* Colors Only', + '50': 'Aurora', + '51': 'Atlantica', + '52': 'C9 2', + '53': 'C9 New', + '54': 'Temperature', + '55': 'Aurora 2', + '56': 'Retro Clown', + '57': 'Candy', + '58': 'Toxy Reaf', + '59': 'Fairy Reaf', '6': 'Party', + '60': 'Semi Blue', + '61': 'Pink Candy', + '62': 'Red Reaf', + '63': 'Aqua Flash', + '64': 'Yelblu Hot', + '65': 'Lite Light', + '66': 'Red Flash', + '67': 'Blink Red', + '68': 'Red Shift', + '69': 'Red Tide', '7': 'Cloud', + '70': 'Candy2', '8': 'Lava', '9': 'Ocean', }), @@ -170,30 +305,90 @@ 'presets': dict({ }), 'state': dict({ - 'brightness': 127, + 'bri': 128, 'lor': 0, - 'nightlight': dict({ - '__type': "", - 'repr': 'Nightlight(duration=60, fade=True, on=False, mode=, target_brightness=0)', + 'nl': dict({ + 'dur': 60, + 'mode': 1, + 'on': False, + 'tbri': 0, }), 'on': True, - 'playlist': -1, - 'preset': -1, - 'segments': list([ - dict({ - '__type': "", - 'repr': "Segment(brightness=127, clones=-1, color_primary=(255, 159, 0), color_secondary=(0, 0, 0), color_tertiary=(0, 0, 0), effect=Effect(effect_id=0, name='Solid'), intensity=128, length=20, on=True, palette=Palette(name='Default', palette_id=0), reverse=False, segment_id=0, selected=True, speed=32, start=0, stop=19)", + 'seg': dict({ + '0': dict({ + 'bri': 255, + 'cct': 127, + 'cln': -1, + 'col': list([ + list([ + 127, + 172, + 255, + ]), + list([ + 0, + 0, + 0, + ]), + list([ + 0, + 0, + 0, + ]), + ]), + 'fx': 0, + 'id': 0, + 'ix': 128, + 'len': 15, + 'on': True, + 'pal': 0, + 'rev': False, + 'sel': False, + 'start': 0, + 'stop': 15, + 'sx': 32, }), - dict({ - '__type': "", - 'repr': "Segment(brightness=127, clones=-1, color_primary=(0, 255, 123), color_secondary=(0, 0, 0), color_tertiary=(0, 0, 0), effect=Effect(effect_id=1, name='Blink'), intensity=64, length=10, on=True, palette=Palette(name='Random Cycle', palette_id=1), reverse=True, segment_id=1, selected=True, speed=16, start=20, stop=30)", + '1': dict({ + 'bri': 255, + 'cct': 127, + 'cln': -1, + 'col': list([ + list([ + 255, + 170, + 0, + ]), + list([ + 0, + 0, + 0, + ]), + list([ + 0, + 0, + 0, + ]), + ]), + 'fx': 3, + 'id': 1, + 'ix': 64, + 'len': 15, + 'on': True, + 'pal': 1, + 'rev': True, + 'sel': True, + 'start': 15, + 'stop': 30, + 'sx': 16, }), - ]), - 'sync': dict({ - '__type': "", - 'repr': 'Sync(receive=True, send=False)', }), 'transition': 7, + 'udpn': dict({ + 'recv': True, + 'rgrp': 1, + 'send': False, + 'sgrp': 1, + }), }), }) # --- diff --git a/tests/components/wled/snapshots/test_number.ambr b/tests/components/wled/snapshots/test_number.ambr index c3440108148..0fb6cff3d51 100644 --- a/tests/components/wled/snapshots/test_number.ambr +++ b/tests/components/wled/snapshots/test_number.ambr @@ -67,7 +67,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -79,12 +79,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -156,7 +158,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -168,181 +170,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', - 'via_device_id': None, - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_intensity-42-intensity] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'WLED RGB Light Segment 1 Intensity', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.wled_rgb_light_segment_1_intensity', - 'last_changed': , - 'last_updated': , - 'state': '64', - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_intensity-42-intensity].1 - EntityRegistryEntrySnapshot({ - '_display_repr': , - '_partial_repr': , - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.wled_rgb_light_segment_1_intensity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Segment 1 Intensity', - 'platform': 'wled', - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'aabbccddeeff_intensity_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_intensity-42-intensity].2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://127.0.0.1', - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': 'esp8266', - 'id': , - 'identifiers': set({ - tuple( - 'wled', - 'aabbccddeeff', - ), - }), - 'is_new': False, - 'manufacturer': 'WLED', - 'model': 'DIY light', - 'name': 'WLED RGB Light', - 'name_by_user': None, - 'suggested_area': None, - 'sw_version': '0.8.5', - 'via_device_id': None, - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_speed-42-speed] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'WLED RGB Light Segment 1 Speed', - 'icon': 'mdi:speedometer', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.wled_rgb_light_segment_1_speed', - 'last_changed': , - 'last_updated': , - 'state': '16', - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_speed-42-speed].1 - EntityRegistryEntrySnapshot({ - '_display_repr': , - '_partial_repr': , - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.wled_rgb_light_segment_1_speed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:speedometer', - 'original_name': 'Segment 1 Speed', - 'platform': 'wled', - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'aabbccddeeff_speed_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_speed-42-speed].2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://127.0.0.1', - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': 'esp8266', - 'id': , - 'identifiers': set({ - tuple( - 'wled', - 'aabbccddeeff', - ), - }), - 'is_new': False, - 'manufacturer': 'WLED', - 'model': 'DIY light', - 'name': 'WLED RGB Light', - 'name_by_user': None, - 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/snapshots/test_select.ambr b/tests/components/wled/snapshots/test_select.ambr index 6d64ec43658..2998583f8b3 100644 --- a/tests/components/wled/snapshots/test_select.ambr +++ b/tests/components/wled/snapshots/test_select.ambr @@ -69,7 +69,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -81,12 +81,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -95,56 +97,77 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'WLED RGB Light Segment 1 color palette', 'options': list([ - 'Analogous', - 'April Night', - 'Autumn', - 'Based on Primary', - 'Based on Set', - 'Beach', - 'Beech', - 'Breeze', - 'C9', - 'Cloud', - 'Cyane', 'Default', - 'Departure', - 'Drywet', - 'Fire', - 'Forest', - 'Grintage', - 'Hult', - 'Hult 64', - 'Icefire', - 'Jul', - 'Landscape', - 'Lava', - 'Light Pink', - 'Magenta', - 'Magred', - 'Ocean', - 'Orange & Teal', - 'Orangery', + '* Random Cycle', + '* Color 1', + '* Colors 1&2', + '* Color Gradient', + '* Colors Only', 'Party', - 'Pastel', - 'Primary Color', + 'Cloud', + 'Lava', + 'Ocean', + 'Forest', 'Rainbow', 'Rainbow Bands', - 'Random Cycle', - 'Red & Blue', - 'Rewhi', - 'Rivendell', - 'Sakura', - 'Set Colors', - 'Sherbet', - 'Splash', 'Sunset', - 'Sunset 2', - 'Tertiary', - 'Tiamat', - 'Vintage', - 'Yelblu', + 'Rivendell', + 'Breeze', + 'Red & Blue', 'Yellowout', + 'Analogous', + 'Splash', + 'Pastel', + 'Sunset 2', + 'Beach', + 'Vintage', + 'Departure', + 'Landscape', + 'Beech', + 'Sherbet', + 'Hult', + 'Hult 64', + 'Drywet', + 'Jul', + 'Grintage', + 'Rewhi', + 'Tertiary', + 'Fire', + 'Icefire', + 'Cyane', + 'Light Pink', + 'Autumn', + 'Magenta', + 'Magred', 'Yelmag', + 'Yelblu', + 'Orange & Teal', + 'Tiamat', + 'April Night', + 'Orangery', + 'C9', + 'Sakura', + 'Aurora', + 'Atlantica', + 'C9 2', + 'C9 New', + 'Temperature', + 'Aurora 2', + 'Retro Clown', + 'Candy', + 'Toxy Reaf', + 'Fairy Reaf', + 'Semi Blue', + 'Pink Candy', + 'Red Reaf', + 'Aqua Flash', + 'Yelblu Hot', + 'Lite Light', + 'Red Flash', + 'Blink Red', + 'Red Shift', + 'Red Tide', + 'Candy2', ]), }), 'context': , @@ -152,7 +175,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Random Cycle', + 'state': '* Random Cycle', }) # --- # name: test_color_palette_state[rgb-select.wled_rgb_light_segment_1_color_palette-Icefire-segment-called_with0].1 @@ -162,56 +185,77 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Analogous', - 'April Night', - 'Autumn', - 'Based on Primary', - 'Based on Set', - 'Beach', - 'Beech', - 'Breeze', - 'C9', - 'Cloud', - 'Cyane', 'Default', - 'Departure', - 'Drywet', - 'Fire', - 'Forest', - 'Grintage', - 'Hult', - 'Hult 64', - 'Icefire', - 'Jul', - 'Landscape', - 'Lava', - 'Light Pink', - 'Magenta', - 'Magred', - 'Ocean', - 'Orange & Teal', - 'Orangery', + '* Random Cycle', + '* Color 1', + '* Colors 1&2', + '* Color Gradient', + '* Colors Only', 'Party', - 'Pastel', - 'Primary Color', + 'Cloud', + 'Lava', + 'Ocean', + 'Forest', 'Rainbow', 'Rainbow Bands', - 'Random Cycle', - 'Red & Blue', - 'Rewhi', - 'Rivendell', - 'Sakura', - 'Set Colors', - 'Sherbet', - 'Splash', 'Sunset', - 'Sunset 2', - 'Tertiary', - 'Tiamat', - 'Vintage', - 'Yelblu', + 'Rivendell', + 'Breeze', + 'Red & Blue', 'Yellowout', + 'Analogous', + 'Splash', + 'Pastel', + 'Sunset 2', + 'Beach', + 'Vintage', + 'Departure', + 'Landscape', + 'Beech', + 'Sherbet', + 'Hult', + 'Hult 64', + 'Drywet', + 'Jul', + 'Grintage', + 'Rewhi', + 'Tertiary', + 'Fire', + 'Icefire', + 'Cyane', + 'Light Pink', + 'Autumn', + 'Magenta', + 'Magred', 'Yelmag', + 'Yelblu', + 'Orange & Teal', + 'Tiamat', + 'April Night', + 'Orangery', + 'C9', + 'Sakura', + 'Aurora', + 'Atlantica', + 'C9 2', + 'C9 New', + 'Temperature', + 'Aurora 2', + 'Retro Clown', + 'Candy', + 'Toxy Reaf', + 'Fairy Reaf', + 'Semi Blue', + 'Pink Candy', + 'Red Reaf', + 'Aqua Flash', + 'Yelblu Hot', + 'Lite Light', + 'Red Flash', + 'Blink Red', + 'Red Shift', + 'Red Tide', + 'Candy2', ]), }), 'config_entry_id': , @@ -254,7 +298,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -266,12 +310,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -289,7 +335,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Playlist 1', + 'state': 'unknown', }) # --- # name: test_color_palette_state[rgbw-select.wled_rgbw_light_playlist-Playlist 2-playlist-called_with2].1 @@ -326,7 +372,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'playlist', - 'unique_id': 'aabbccddee11_playlist', + 'unique_id': 'aabbccddeeff_playlist', 'unit_of_measurement': None, }) # --- @@ -338,29 +384,31 @@ 'connections': set({ tuple( 'mac', - 'aa:bb:cc:dd:ee:11', + 'aa:bb:cc:dd:ee:ff', ), }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( 'wled', - 'aabbccddee11', + 'aabbccddeeff', ), }), 'is_new': False, 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGBW Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.6b4', + 'sw_version': '0.99.0b1', 'via_device_id': None, }) # --- @@ -378,7 +426,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Preset 1', + 'state': 'unknown', }) # --- # name: test_color_palette_state[rgbw-select.wled_rgbw_light_preset-Preset 2-preset-called_with3].1 @@ -415,7 +463,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preset', - 'unique_id': 'aabbccddee11_preset', + 'unique_id': 'aabbccddeeff_preset', 'unit_of_measurement': None, }) # --- @@ -427,29 +475,31 @@ 'connections': set({ tuple( 'mac', - 'aa:bb:cc:dd:ee:11', + 'aa:bb:cc:dd:ee:ff', ), }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( 'wled', - 'aabbccddee11', + 'aabbccddeeff', ), }), 'is_new': False, 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGBW Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.6b4', + 'sw_version': '0.99.0b1', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/snapshots/test_switch.ambr b/tests/components/wled/snapshots/test_switch.ambr index da69e686f07..ee3a72ba872 100644 --- a/tests/components/wled/snapshots/test_switch.ambr +++ b/tests/components/wled/snapshots/test_switch.ambr @@ -3,7 +3,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'duration': 60, - 'fade': True, 'friendly_name': 'WLED RGB Light Nightlight', 'target_brightness': 0, }), @@ -61,7 +60,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -73,12 +72,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -141,7 +142,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -153,12 +154,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -222,7 +225,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -234,12 +237,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -303,7 +308,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -315,12 +320,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/test_config_flow.py b/tests/components/wled/test_config_flow.py index a1529eda1c7..a1cf515a24b 100644 --- a/tests/components/wled/test_config_flow.py +++ b/tests/components/wled/test_config_flow.py @@ -33,9 +33,7 @@ async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: assert result.get("title") == "WLED RGB Light" assert result.get("type") is FlowResultType.CREATE_ENTRY - assert "data" in result assert result["data"][CONF_HOST] == "192.168.1.123" - assert "result" in result assert result["result"].unique_id == "aabbccddeeff" @@ -167,23 +165,6 @@ async def test_user_device_exists_abort( assert result.get("reason") == "already_configured" -async def test_user_with_cct_channel_abort( - hass: HomeAssistant, - mock_wled: MagicMock, -) -> None: - """Test we abort user flow if WLED device uses a CCT channel.""" - mock_wled.update.return_value.info.leds.cct = True - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_HOST: "192.168.1.123"}, - ) - - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "cct_unsupported" - - @pytest.mark.usefixtures("mock_wled") async def test_zeroconf_without_mac_device_exists_abort( hass: HomeAssistant, @@ -234,31 +215,6 @@ async def test_zeroconf_with_mac_device_exists_abort( assert result.get("reason") == "already_configured" -async def test_zeroconf_with_cct_channel_abort( - hass: HomeAssistant, - mock_wled: MagicMock, -) -> None: - """Test we abort zeroconf flow if WLED device uses a CCT channel.""" - mock_wled.update.return_value.info.leds.cct = True - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("192.168.1.123"), - ip_addresses=[ip_address("192.168.1.123")], - hostname="example.local.", - name="mock_name", - port=None, - properties={CONF_MAC: "aabbccddeeff"}, - type="mock_type", - ), - ) - - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "cct_unsupported" - - async def test_options_flow( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: diff --git a/tests/components/wled/test_init.py b/tests/components/wled/test_init.py index f6f1da0d41e..9dfcabd55e3 100644 --- a/tests/components/wled/test_init.py +++ b/tests/components/wled/test_init.py @@ -7,7 +7,6 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest from wled import WLEDConnectionError -from homeassistant.components.wled.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -44,7 +43,6 @@ async def test_load_unload_config_entry( # Ensure everything is cleaned up nicely and are disconnected assert mock_wled.disconnect.call_count == 1 - assert not hass.data.get(DOMAIN) @patch( @@ -69,21 +67,3 @@ async def test_setting_unique_id( """Test we set unique ID if not set yet.""" assert init_integration.runtime_data assert init_integration.unique_id == "aabbccddeeff" - - -async def test_error_config_entry_with_cct_channel( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_wled: AsyncMock, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the WLED fails entry setup with a CCT channel.""" - mock_wled.update.return_value.info.leds.cct = True - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - # Ensure config entry is errored and are connected and disconnected - assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR - assert "has a CCT channel, which is not supported" in caplog.text diff --git a/tests/components/wled/test_light.py b/tests/components/wled/test_light.py index 2b64619e306..58c4aa4e8c6 100644 --- a/tests/components/wled/test_light.py +++ b/tests/components/wled/test_light.py @@ -1,6 +1,5 @@ """Tests for the WLED light platform.""" -import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -9,14 +8,24 @@ from wled import Device as WLEDDevice, WLEDConnectionError, WLEDError from homeassistant.components.light import ( ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, + ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, DOMAIN as LIGHT_DOMAIN, + ColorMode, +) +from homeassistant.components.wled.const import ( + CONF_KEEP_MAIN_LIGHT, + DOMAIN, + SCAN_INTERVAL, ) -from homeassistant.components.wled.const import CONF_KEEP_MAIN_LIGHT, SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_ICON, @@ -30,7 +39,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_object_fixture, +) pytestmark = pytest.mark.usefixtures("init_integration") @@ -41,9 +54,9 @@ async def test_rgb_light_state( """Test the creation and values of the WLED lights.""" # First segment of the strip assert (state := hass.states.get("light.wled_rgb_light")) - assert state.attributes.get(ATTR_BRIGHTNESS) == 127 + assert state.attributes.get(ATTR_BRIGHTNESS) == 255 assert state.attributes.get(ATTR_EFFECT) == "Solid" - assert state.attributes.get(ATTR_HS_COLOR) == (37.412, 100.0) + assert state.attributes.get(ATTR_HS_COLOR) == (218.906, 50.196) assert state.attributes.get(ATTR_ICON) is None assert state.state == STATE_ON @@ -52,9 +65,9 @@ async def test_rgb_light_state( # Second segment of the strip assert (state := hass.states.get("light.wled_rgb_light_segment_1")) - assert state.attributes.get(ATTR_BRIGHTNESS) == 127 - assert state.attributes.get(ATTR_EFFECT) == "Blink" - assert state.attributes.get(ATTR_HS_COLOR) == (148.941, 100.0) + assert state.attributes.get(ATTR_BRIGHTNESS) == 255 + assert state.attributes.get(ATTR_EFFECT) == "Wipe" + assert state.attributes.get(ATTR_HS_COLOR) == (40.0, 100.0) assert state.attributes.get(ATTR_ICON) is None assert state.state == STATE_ON @@ -63,7 +76,7 @@ async def test_rgb_light_state( # Test main control of the lightstrip assert (state := hass.states.get("light.wled_rgb_light_main")) - assert state.attributes.get(ATTR_BRIGHTNESS) == 127 + assert state.attributes.get(ATTR_BRIGHTNESS) == 128 assert state.state == STATE_ON assert (entry := entity_registry.async_get("light.wled_rgb_light_main")) @@ -188,8 +201,8 @@ async def test_dynamically_handle_segments( assert not hass.states.get("light.wled_rgb_light_segment_1") return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice( - json.loads(load_fixture("wled/rgb.json")) + mock_wled.update.return_value = WLEDDevice.from_dict( + load_json_object_fixture("rgb.json", DOMAIN) ) freezer.tick(SCAN_INTERVAL) @@ -327,6 +340,8 @@ async def test_rgbw_light(hass: HomeAssistant, mock_wled: MagicMock) -> None: """Test RGBW support for WLED.""" assert (state := hass.states.get("light.wled_rgbw_light")) assert state.state == STATE_ON + assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ColorMode.RGBW] + assert state.attributes.get(ATTR_COLOR_MODE) == ColorMode.RGBW assert state.attributes.get(ATTR_RGBW_COLOR) == (255, 0, 0, 139) await hass.services.async_call( @@ -362,3 +377,34 @@ async def test_single_segment_with_keep_main_light( assert (state := hass.states.get("light.wled_rgb_light_main")) assert state.state == STATE_ON + + +@pytest.mark.parametrize("device_fixture", ["cct"]) +async def test_cct_light(hass: HomeAssistant, mock_wled: MagicMock) -> None: + """Test CCT support for WLED.""" + assert (state := hass.states.get("light.wled_cct_light")) + assert state.state == STATE_ON + assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ + ColorMode.COLOR_TEMP, + ColorMode.RGBW, + ] + assert state.attributes.get(ATTR_COLOR_MODE) == ColorMode.COLOR_TEMP + assert state.attributes.get(ATTR_MIN_COLOR_TEMP_KELVIN) == 2000 + assert state.attributes.get(ATTR_MAX_COLOR_TEMP_KELVIN) == 6535 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 2942 + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.wled_cct_light", + ATTR_COLOR_TEMP_KELVIN: 4321, + }, + blocking=True, + ) + assert mock_wled.segment.call_count == 1 + mock_wled.segment.assert_called_with( + cct=130, + on=True, + segment_id=0, + ) diff --git a/tests/components/wled/test_number.py b/tests/components/wled/test_number.py index b692de37282..344eb03bc06 100644 --- a/tests/components/wled/test_number.py +++ b/tests/components/wled/test_number.py @@ -1,6 +1,5 @@ """Tests for the WLED number platform.""" -import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -13,13 +12,13 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) -from homeassistant.components.wled.const import SCAN_INTERVAL +from homeassistant.components.wled.const import DOMAIN, SCAN_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import async_fire_time_changed, load_fixture +from tests.common import async_fire_time_changed, load_json_object_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -128,8 +127,8 @@ async def test_speed_dynamically_handle_segments( # Test adding a segment dynamically... return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice( - json.loads(load_fixture("wled/rgb.json")) + mock_wled.update.return_value = WLEDDevice.from_dict( + load_json_object_fixture("rgb.json", DOMAIN) ) freezer.tick(SCAN_INTERVAL) diff --git a/tests/components/wled/test_select.py b/tests/components/wled/test_select.py index 380af1a286a..364e5fc2034 100644 --- a/tests/components/wled/test_select.py +++ b/tests/components/wled/test_select.py @@ -1,6 +1,5 @@ """Tests for the WLED select platform.""" -import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -9,18 +8,13 @@ from syrupy.assertion import SnapshotAssertion from wled import Device as WLEDDevice, WLEDConnectionError, WLEDError from homeassistant.components.select import ATTR_OPTION, DOMAIN as SELECT_DOMAIN -from homeassistant.components.wled.const import SCAN_INTERVAL -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_SELECT_OPTION, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) +from homeassistant.components.wled.const import DOMAIN, SCAN_INTERVAL +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_SELECT_OPTION, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import async_fire_time_changed, load_fixture +from tests.common import async_fire_time_changed, load_json_object_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -135,8 +129,8 @@ async def test_color_palette_dynamically_handle_segments( assert not hass.states.get("select.wled_rgb_light_segment_1_color_palette") return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice( - json.loads(load_fixture("wled/rgb.json")) + mock_wled.update.return_value = WLEDDevice.from_dict( + load_json_object_fixture("rgb.json", DOMAIN) ) freezer.tick(SCAN_INTERVAL) @@ -148,7 +142,7 @@ async def test_color_palette_dynamically_handle_segments( assert ( segment1 := hass.states.get("select.wled_rgb_light_segment_1_color_palette") ) - assert segment1.state == "Random Cycle" + assert segment1.state == "* Random Cycle" # Test adding if segment shows up again, including the master entity mock_wled.update.return_value = return_value @@ -174,39 +168,3 @@ async def test_playlist_unavailable_without_playlists(hass: HomeAssistant) -> No """Test WLED playlist entity is unavailable when playlists are not available.""" assert (state := hass.states.get("select.wled_rgb_light_playlist")) assert state.state == STATE_UNAVAILABLE - - -@pytest.mark.parametrize("device_fixture", ["rgbw"]) -async def test_old_style_preset_active( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_wled: MagicMock, -) -> None: - """Test unknown preset returned (when old style/unknown) preset is active.""" - # Set device preset state to a random number - mock_wled.update.return_value.state.preset = 99 - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert (state := hass.states.get("select.wled_rgbw_light_preset")) - assert state.state == STATE_UNKNOWN - - -@pytest.mark.parametrize("device_fixture", ["rgbw"]) -async def test_old_style_playlist_active( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_wled: MagicMock, -) -> None: - """Test when old style playlist cycle is active.""" - # Set device playlist to 0, which meant "cycle" previously. - mock_wled.update.return_value.state.playlist = 0 - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert (state := hass.states.get("select.wled_rgbw_light_playlist")) - assert state.state == STATE_UNKNOWN diff --git a/tests/components/wled/test_sensor.py b/tests/components/wled/test_sensor.py index 319622e7cb3..8bd5431cf59 100644 --- a/tests/components/wled/test_sensor.py +++ b/tests/components/wled/test_sensor.py @@ -44,7 +44,7 @@ async def test_sensors( == UnitOfElectricCurrent.MILLIAMPERE ) assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.CURRENT - assert state.state == "470" + assert state.state == "515" assert ( entry := entity_registry.async_get("sensor.wled_rgb_light_estimated_current") @@ -55,7 +55,7 @@ async def test_sensors( assert (state := hass.states.get("sensor.wled_rgb_light_uptime")) assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.state == "2019-11-11T09:10:00+00:00" + assert state.state == "2019-11-11T08:54:26+00:00" assert (entry := entity_registry.async_get("sensor.wled_rgb_light_uptime")) assert entry.unique_id == "aabbccddeeff_uptime" @@ -64,7 +64,7 @@ async def test_sensors( assert (state := hass.states.get("sensor.wled_rgb_light_free_memory")) assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfInformation.BYTES - assert state.state == "14600" + assert state.state == "198384" assert entry.entity_category is EntityCategory.DIAGNOSTIC assert (entry := entity_registry.async_get("sensor.wled_rgb_light_free_memory")) @@ -74,7 +74,7 @@ async def test_sensors( assert (state := hass.states.get("sensor.wled_rgb_light_wi_fi_signal")) assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "76" + assert state.state == "100" assert entry.entity_category is EntityCategory.DIAGNOSTIC assert (entry := entity_registry.async_get("sensor.wled_rgb_light_wi_fi_signal")) @@ -87,7 +87,7 @@ async def test_sensors( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SIGNAL_STRENGTH_DECIBELS_MILLIWATT ) - assert state.state == "-62" + assert state.state == "-43" assert (entry := entity_registry.async_get("sensor.wled_rgb_light_wi_fi_rssi")) assert entry.unique_id == "aabbccddeeff_wifi_rssi" diff --git a/tests/components/wled/test_switch.py b/tests/components/wled/test_switch.py index 6dfd62e363f..48331ffa9cc 100644 --- a/tests/components/wled/test_switch.py +++ b/tests/components/wled/test_switch.py @@ -1,6 +1,5 @@ """Tests for the WLED switch platform.""" -import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -9,7 +8,7 @@ from syrupy.assertion import SnapshotAssertion from wled import Device as WLEDDevice, WLEDConnectionError, WLEDError from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.components.wled.const import SCAN_INTERVAL +from homeassistant.components.wled.const import DOMAIN, SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -22,7 +21,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import async_fire_time_changed, load_fixture +from tests.common import async_fire_time_changed, load_json_object_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -144,8 +143,8 @@ async def test_switch_dynamically_handle_segments( # Test adding a segment dynamically... return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice( - json.loads(load_fixture("wled/rgb.json")) + mock_wled.update.return_value = WLEDDevice.from_dict( + load_json_object_fixture("rgb.json", DOMAIN) ) freezer.tick(SCAN_INTERVAL) diff --git a/tests/components/wled/test_update.py b/tests/components/wled/test_update.py index c576cdf16f9..a27aa918385 100644 --- a/tests/components/wled/test_update.py +++ b/tests/components/wled/test_update.py @@ -2,8 +2,9 @@ from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory import pytest -from wled import WLEDError +from wled import Releases, WLEDError from homeassistant.components.update import ( ATTR_INSTALLED_VERSION, @@ -16,6 +17,7 @@ from homeassistant.components.update import ( UpdateDeviceClass, UpdateEntityFeature, ) +from homeassistant.components.wled.const import RELEASES_SCAN_INTERVAL from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -31,6 +33,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from tests.common import async_fire_time_changed + pytestmark = pytest.mark.usefixtures("init_integration") @@ -45,12 +49,12 @@ async def test_update_available( state.attributes[ATTR_ENTITY_PICTURE] == "https://brands.home-assistant.io/_/wled/icon.png" ) - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.5" - assert state.attributes[ATTR_LATEST_VERSION] == "0.12.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.14.4" + assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" assert state.attributes[ATTR_RELEASE_SUMMARY] is None assert ( state.attributes[ATTR_RELEASE_URL] - == "https://github.com/Aircoookie/WLED/releases/tag/v0.12.0" + == "https://github.com/Aircoookie/WLED/releases/tag/v0.99.0" ) assert ( state.attributes[ATTR_SUPPORTED_FEATURES] @@ -64,15 +68,26 @@ async def test_update_available( assert entry.entity_category is EntityCategory.CONFIG -@pytest.mark.parametrize("device_fixture", ["rgb_no_update"]) async def test_update_information_available( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + mock_wled_releases: MagicMock, ) -> None: """Test having no update information available at all.""" + mock_wled_releases.releases.return_value = Releases( + beta=None, + stable=None, + ) + + freezer.tick(RELEASES_SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert (state := hass.states.get("update.wled_rgb_light_firmware")) assert state.attributes.get(ATTR_DEVICE_CLASS) == UpdateDeviceClass.FIRMWARE assert state.state == STATE_UNKNOWN - assert state.attributes[ATTR_INSTALLED_VERSION] is None + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.14.4" assert state.attributes[ATTR_LATEST_VERSION] is None assert state.attributes[ATTR_RELEASE_SUMMARY] is None assert state.attributes[ATTR_RELEASE_URL] is None @@ -98,12 +113,12 @@ async def test_no_update_available( assert (state := hass.states.get("update.wled_websocket_firmware")) assert state.state == STATE_OFF assert state.attributes.get(ATTR_DEVICE_CLASS) == UpdateDeviceClass.FIRMWARE - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.12.0-b2" - assert state.attributes[ATTR_LATEST_VERSION] == "0.12.0-b2" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.99.0" + assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" assert state.attributes[ATTR_RELEASE_SUMMARY] is None assert ( state.attributes[ATTR_RELEASE_URL] - == "https://github.com/Aircoookie/WLED/releases/tag/v0.12.0-b2" + == "https://github.com/Aircoookie/WLED/releases/tag/v0.99.0" ) assert ( state.attributes[ATTR_SUPPORTED_FEATURES] @@ -151,8 +166,8 @@ async def test_update_stay_stable( """ assert (state := hass.states.get("update.wled_rgb_light_firmware")) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.5" - assert state.attributes[ATTR_LATEST_VERSION] == "0.12.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.14.4" + assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" await hass.services.async_call( UPDATE_DOMAIN, @@ -161,7 +176,7 @@ async def test_update_stay_stable( blocking=True, ) assert mock_wled.upgrade.call_count == 1 - mock_wled.upgrade.assert_called_with(version="0.12.0") + mock_wled.upgrade.assert_called_with(version="0.99.0") @pytest.mark.parametrize("device_fixture", ["rgbw"]) @@ -177,8 +192,8 @@ async def test_update_beta_to_stable( """ assert (state := hass.states.get("update.wled_rgbw_light_firmware")) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.6b4" - assert state.attributes[ATTR_LATEST_VERSION] == "0.8.6" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.99.0b1" + assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" await hass.services.async_call( UPDATE_DOMAIN, @@ -187,7 +202,7 @@ async def test_update_beta_to_stable( blocking=True, ) assert mock_wled.upgrade.call_count == 1 - mock_wled.upgrade.assert_called_with(version="0.8.6") + mock_wled.upgrade.assert_called_with(version="0.99.0") @pytest.mark.parametrize("device_fixture", ["rgb_single_segment"]) @@ -202,8 +217,8 @@ async def test_update_stay_beta( """ assert (state := hass.states.get("update.wled_rgb_light_firmware")) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.6b1" - assert state.attributes[ATTR_LATEST_VERSION] == "0.8.6b2" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0b4" + assert state.attributes[ATTR_LATEST_VERSION] == "1.0.0b5" await hass.services.async_call( UPDATE_DOMAIN, @@ -212,4 +227,4 @@ async def test_update_stay_beta( blocking=True, ) assert mock_wled.upgrade.call_count == 1 - mock_wled.upgrade.assert_called_with(version="0.8.6b2") + mock_wled.upgrade.assert_called_with(version="1.0.0b5") diff --git a/tests/components/workday/__init__.py b/tests/components/workday/__init__.py index a7e26765643..17449af8bd1 100644 --- a/tests/components/workday/__init__.py +++ b/tests/components/workday/__init__.py @@ -4,6 +4,8 @@ from __future__ import annotations from typing import Any +from holidays import OPTIONAL + from homeassistant.components.workday.const import ( DEFAULT_EXCLUDES, DEFAULT_NAME, @@ -310,3 +312,26 @@ TEST_LANGUAGE_NO_CHANGE = { "remove_holidays": ["2022-12-04", "2022-12-24,2022-12-26"], "language": "de", } +TEST_NO_OPTIONAL_CATEGORY = { + "name": DEFAULT_NAME, + "country": "CH", + "province": "FR", + "excludes": DEFAULT_EXCLUDES, + "days_offset": DEFAULT_OFFSET, + "workdays": DEFAULT_WORKDAYS, + "add_holidays": [], + "remove_holidays": [], + "language": "de", +} +TEST_OPTIONAL_CATEGORY = { + "name": DEFAULT_NAME, + "country": "CH", + "province": "FR", + "excludes": DEFAULT_EXCLUDES, + "days_offset": DEFAULT_OFFSET, + "workdays": DEFAULT_WORKDAYS, + "add_holidays": [], + "remove_holidays": [], + "language": "de", + "category": [OPTIONAL], +} diff --git a/tests/components/workday/conftest.py b/tests/components/workday/conftest.py index 33bf98f90c3..081d6ce90db 100644 --- a/tests/components/workday/conftest.py +++ b/tests/components/workday/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Workday integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/workday/test_binary_sensor.py b/tests/components/workday/test_binary_sensor.py index e973a9f9c28..a2718c00824 100644 --- a/tests/components/workday/test_binary_sensor.py +++ b/tests/components/workday/test_binary_sensor.py @@ -39,6 +39,8 @@ from . import ( TEST_CONFIG_YESTERDAY, TEST_LANGUAGE_CHANGE, TEST_LANGUAGE_NO_CHANGE, + TEST_NO_OPTIONAL_CATEGORY, + TEST_OPTIONAL_CATEGORY, init_integration, ) @@ -400,3 +402,23 @@ async def test_language_difference_no_change_other_language( """Test skipping if no difference in language naming.""" await init_integration(hass, TEST_LANGUAGE_NO_CHANGE) assert "Changing language from en to en_US" not in caplog.text + + +@pytest.mark.parametrize( + ("config", "end_state"), + [(TEST_OPTIONAL_CATEGORY, "off"), (TEST_NO_OPTIONAL_CATEGORY, "on")], +) +async def test_optional_category( + hass: HomeAssistant, + config: dict[str, Any], + end_state: str, + freezer: FrozenDateTimeFactory, +) -> None: + """Test setup from various configs.""" + # CH, subdiv FR has optional holiday Jan 2nd + freezer.move_to(datetime(2024, 1, 2, 12, tzinfo=UTC)) # Tuesday + await init_integration(hass, config) + + state = hass.states.get("binary_sensor.workday_sensor") + assert state is not None + assert state.state == end_state diff --git a/tests/components/workday/test_config_flow.py b/tests/components/workday/test_config_flow.py index 7eb3065e576..cc83cee93a2 100644 --- a/tests/components/workday/test_config_flow.py +++ b/tests/components/workday/test_config_flow.py @@ -5,11 +5,13 @@ from __future__ import annotations from datetime import datetime from freezegun.api import FrozenDateTimeFactory +from holidays import HALF_DAY, OPTIONAL import pytest from homeassistant import config_entries from homeassistant.components.workday.const import ( CONF_ADD_HOLIDAYS, + CONF_CATEGORY, CONF_EXCLUDES, CONF_OFFSET, CONF_REMOVE_HOLIDAYS, @@ -354,13 +356,14 @@ async def test_options_form_abort_duplicate(hass: HomeAssistant) -> None: hass, { "name": "Workday Sensor", - "country": "DE", + "country": "CH", "excludes": ["sat", "sun", "holiday"], "days_offset": 0, "workdays": ["mon", "tue", "wed", "thu", "fri"], "add_holidays": [], "remove_holidays": [], - "province": None, + "province": "FR", + "category": [OPTIONAL], }, entry_id="1", ) @@ -368,13 +371,14 @@ async def test_options_form_abort_duplicate(hass: HomeAssistant) -> None: hass, { "name": "Workday Sensor2", - "country": "DE", + "country": "CH", "excludes": ["sat", "sun", "holiday"], "days_offset": 0, "workdays": ["mon", "tue", "wed", "thu", "fri"], "add_holidays": ["2023-03-28"], "remove_holidays": [], - "province": None, + "province": "FR", + "category": [OPTIONAL], }, entry_id="2", ) @@ -389,6 +393,8 @@ async def test_options_form_abort_duplicate(hass: HomeAssistant) -> None: "workdays": ["mon", "tue", "wed", "thu", "fri"], "add_holidays": [], "remove_holidays": [], + "province": "FR", + "category": [OPTIONAL], }, ) @@ -602,3 +608,48 @@ async def test_language( state = hass.states.get("binary_sensor.workday_sensor") assert state is not None assert state.state == "on" + + +async def test_form_with_categories(hass: HomeAssistant) -> None: + """Test optional categories.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: "Workday Sensor", + CONF_COUNTRY: "CH", + }, + ) + await hass.async_block_till_done() + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + CONF_EXCLUDES: DEFAULT_EXCLUDES, + CONF_OFFSET: DEFAULT_OFFSET, + CONF_WORKDAYS: DEFAULT_WORKDAYS, + CONF_ADD_HOLIDAYS: [], + CONF_REMOVE_HOLIDAYS: [], + CONF_LANGUAGE: "de", + CONF_CATEGORY: [HALF_DAY], + }, + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Workday Sensor" + assert result3["options"] == { + "name": "Workday Sensor", + "country": "CH", + "excludes": ["sat", "sun", "holiday"], + "days_offset": 0, + "workdays": ["mon", "tue", "wed", "thu", "fri"], + "add_holidays": [], + "remove_holidays": [], + "language": "de", + "category": ["half_day"], + } diff --git a/tests/components/worldclock/conftest.py b/tests/components/worldclock/conftest.py new file mode 100644 index 00000000000..74ed82f099a --- /dev/null +++ b/tests/components/worldclock/conftest.py @@ -0,0 +1,66 @@ +"""Fixtures for the Worldclock integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.worldclock.const import ( + CONF_TIME_FORMAT, + DEFAULT_NAME, + DEFAULT_TIME_STR_FORMAT, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_NAME, CONF_TIME_ZONE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically patch setup.""" + with patch( + "homeassistant.components.worldclock.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: DEFAULT_TIME_STR_FORMAT, + } + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the Worldclock integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + title=DEFAULT_NAME, + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/worldclock/test_config_flow.py b/tests/components/worldclock/test_config_flow.py new file mode 100644 index 00000000000..dfdb8159b9c --- /dev/null +++ b/tests/components/worldclock/test_config_flow.py @@ -0,0 +1,104 @@ +"""Test the Worldclock config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant import config_entries +from homeassistant.components.worldclock.const import ( + CONF_TIME_FORMAT, + DEFAULT_NAME, + DEFAULT_TIME_STR_FORMAT, + DOMAIN, +) +from homeassistant.const import CONF_NAME, CONF_TIME_ZONE +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: DEFAULT_TIME_STR_FORMAT, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_TIME_FORMAT: "%a, %b %d, %Y %I:%M %p", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: "%a, %b %d, %Y %I:%M %p", + } + + await hass.async_block_till_done() + + # Check the entity was updated, no new entity was created + assert len(hass.states.async_all()) == 1 + + state = hass.states.get("sensor.worldclock_sensor") + assert state is not None + + +async def test_entry_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: DEFAULT_TIME_STR_FORMAT, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/worldclock/test_init.py b/tests/components/worldclock/test_init.py new file mode 100644 index 00000000000..5683836c166 --- /dev/null +++ b/tests/components/worldclock/test_init.py @@ -0,0 +1,17 @@ +"""Test Worldclock component setup process.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/worldclock/test_sensor.py b/tests/components/worldclock/test_sensor.py index 00195a49827..a8e3e41e649 100644 --- a/tests/components/worldclock/test_sensor.py +++ b/tests/components/worldclock/test_sensor.py @@ -1,19 +1,32 @@ """The test for the World clock sensor platform.""" +from datetime import tzinfo + import pytest -from homeassistant.core import HomeAssistant +from homeassistant.components.worldclock.const import ( + CONF_TIME_FORMAT, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.const import CONF_NAME, CONF_TIME_ZONE +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util +from tests.common import MockConfigEntry + @pytest.fixture -def time_zone(): +async def time_zone() -> tzinfo | None: """Fixture for time zone.""" - return dt_util.get_time_zone("America/New_York") + return await dt_util.async_get_time_zone("America/New_York") -async def test_time(hass: HomeAssistant, time_zone) -> None: +async def test_time_imported_from_yaml( + hass: HomeAssistant, time_zone: tzinfo | None, issue_registry: ir.IssueRegistry +) -> None: """Test the time at a different location.""" config = {"sensor": {"platform": "worldclock", "time_zone": "America/New_York"}} @@ -29,26 +42,42 @@ async def test_time(hass: HomeAssistant, time_zone) -> None: assert state.state == dt_util.now(time_zone=time_zone).strftime("%H:%M") - -async def test_time_format(hass: HomeAssistant, time_zone) -> None: - """Test time_format setting.""" - time_format = "%a, %b %d, %Y %I:%M %p" - config = { - "sensor": { - "platform": "worldclock", - "time_zone": "America/New_York", - "time_format": time_format, - } - } - - assert await async_setup_component( - hass, - "sensor", - config, + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" ) - await hass.async_block_till_done() + assert issue + assert issue.issue_domain == DOMAIN + + +async def test_time_from_config_entry( + hass: HomeAssistant, time_zone: tzinfo | None, loaded_entry: MockConfigEntry +) -> None: + """Test the time at a different location.""" state = hass.states.get("sensor.worldclock_sensor") assert state is not None - assert state.state == dt_util.now(time_zone=time_zone).strftime(time_format) + assert state.state == dt_util.now(time_zone=time_zone).strftime("%H:%M") + + +@pytest.mark.parametrize( + "get_config", + [ + { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: "%a, %b %d, %Y %I:%M %p", + } + ], +) +async def test_time_format( + hass: HomeAssistant, time_zone: tzinfo | None, loaded_entry: MockConfigEntry +) -> None: + """Test time_format setting.""" + + state = hass.states.get("sensor.worldclock_sensor") + assert state is not None + + assert state.state == dt_util.now(time_zone=time_zone).strftime( + "%a, %b %d, %Y %I:%M %p" + ) diff --git a/tests/components/ws66i/test_media_player.py b/tests/components/ws66i/test_media_player.py index a66e79bf9e0..aa67ea24b63 100644 --- a/tests/components/ws66i/test_media_player.py +++ b/tests/components/ws66i/test_media_player.py @@ -73,7 +73,7 @@ class AttrDict(dict): class MockWs66i: """Mock for pyws66i object.""" - def __init__(self, fail_open=False, fail_zone_check=None): + def __init__(self, fail_open=False, fail_zone_check=None) -> None: """Init mock object.""" self.zones = defaultdict( lambda: AttrDict( diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index 47ef0566dc6..770186d92aa 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Wyoming tests.""" +from collections.abc import Generator from pathlib import Path from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components import stt from homeassistant.components.wyoming import DOMAIN @@ -19,9 +19,8 @@ from tests.common import MockConfigEntry @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) diff --git a/tests/components/wyoming/snapshots/test_config_flow.ambr b/tests/components/wyoming/snapshots/test_config_flow.ambr index a0e0c7c5011..ee4c5533254 100644 --- a/tests/components/wyoming/snapshots/test_config_flow.ambr +++ b/tests/components/wyoming/snapshots/test_config_flow.ambr @@ -1,42 +1,4 @@ # serializer version: 1 -# name: test_hassio_addon_discovery - FlowResultSnapshot({ - 'context': dict({ - 'source': 'hassio', - 'unique_id': '1234', - }), - 'data': dict({ - 'host': 'mock-piper', - 'port': 10200, - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'wyoming', - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': 'mock-piper', - 'port': 10200, - }), - 'disabled_by': None, - 'domain': 'wyoming', - 'entry_id': , - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'hassio', - 'title': 'Piper', - 'unique_id': '1234', - 'version': 1, - }), - 'title': 'Piper', - 'type': , - 'version': 1, - }) -# --- # name: test_hassio_addon_discovery[info0] FlowResultSnapshot({ 'context': dict({ diff --git a/tests/components/wyoming/snapshots/test_tts.ambr b/tests/components/wyoming/snapshots/test_tts.ambr index 299bddb07e5..7ca5204e66c 100644 --- a/tests/components/wyoming/snapshots/test_tts.ambr +++ b/tests/components/wyoming/snapshots/test_tts.ambr @@ -32,28 +32,6 @@ }), ]) # --- -# name: test_get_tts_audio_mp3 - list([ - dict({ - 'data': dict({ - 'text': 'Hello world', - }), - 'payload': None, - 'type': 'synthesize', - }), - ]) -# --- -# name: test_get_tts_audio_raw - list([ - dict({ - 'data': dict({ - 'text': 'Hello world', - }), - 'payload': None, - 'type': 'synthesize', - }), - ]) -# --- # name: test_voice_speaker list([ dict({ diff --git a/tests/components/wyoming/test_select.py b/tests/components/wyoming/test_select.py index e6ec2c4d432..2438d25b838 100644 --- a/tests/components/wyoming/test_select.py +++ b/tests/components/wyoming/test_select.py @@ -5,6 +5,7 @@ from unittest.mock import Mock, patch from homeassistant.components import assist_pipeline from homeassistant.components.assist_pipeline.pipeline import PipelineData from homeassistant.components.assist_pipeline.select import OPTION_PREFERRED +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.components.wyoming.devices import SatelliteDevice from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -140,3 +141,50 @@ async def test_noise_suppression_level_select( ) assert satellite_device.noise_suppression_level == 2 + + +async def test_vad_sensitivity_select( + hass: HomeAssistant, + satellite_config_entry: ConfigEntry, + satellite_device: SatelliteDevice, +) -> None: + """Test VAD sensitivity select.""" + vs_entity_id = satellite_device.get_vad_sensitivity_entity_id(hass) + assert vs_entity_id + + state = hass.states.get(vs_entity_id) + assert state is not None + assert state.state == VadSensitivity.DEFAULT + assert satellite_device.vad_sensitivity == VadSensitivity.DEFAULT + + # Change setting + with patch.object(satellite_device, "set_vad_sensitivity") as mock_vs_changed: + await hass.services.async_call( + "select", + "select_option", + {"entity_id": vs_entity_id, "option": VadSensitivity.AGGRESSIVE.value}, + blocking=True, + ) + + state = hass.states.get(vs_entity_id) + assert state is not None + assert state.state == VadSensitivity.AGGRESSIVE.value + + # set function should have been called + mock_vs_changed.assert_called_once_with(VadSensitivity.AGGRESSIVE) + + # test restore + satellite_device = await reload_satellite(hass, satellite_config_entry.entry_id) + + state = hass.states.get(vs_entity_id) + assert state is not None + assert state.state == VadSensitivity.AGGRESSIVE.value + + await hass.services.async_call( + "select", + "select_option", + {"entity_id": vs_entity_id, "option": VadSensitivity.RELAXED.value}, + blocking=True, + ) + + assert satellite_device.vad_sensitivity == VadSensitivity.RELAXED diff --git a/tests/components/xiaomi/test_device_tracker.py b/tests/components/xiaomi/test_device_tracker.py index 975e666af68..0f1c36d1fba 100644 --- a/tests/components/xiaomi/test_device_tracker.py +++ b/tests/components/xiaomi/test_device_tracker.py @@ -144,6 +144,7 @@ def mocked_requests(*args, **kwargs): 200, ) _LOGGER.debug("UNKNOWN ROUTE") + return None @patch( diff --git a/tests/components/xiaomi_ble/conftest.py b/tests/components/xiaomi_ble/conftest.py index bb74b3c7af3..d4864cbe2f8 100644 --- a/tests/components/xiaomi_ble/conftest.py +++ b/tests/components/xiaomi_ble/conftest.py @@ -1,9 +1,9 @@ """Session fixtures.""" +from collections.abc import Generator from unittest import mock import pytest -from typing_extensions import Generator class MockServices: @@ -19,7 +19,7 @@ class MockBleakClient: services = MockServices() - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/xiaomi_ble/test_device_trigger.py b/tests/components/xiaomi_ble/test_device_trigger.py index 87a4d340d8c..218a382ada5 100644 --- a/tests/components/xiaomi_ble/test_device_trigger.py +++ b/tests/components/xiaomi_ble/test_device_trigger.py @@ -18,7 +18,6 @@ from tests.common import ( MockConfigEntry, async_capture_events, async_get_device_automations, - async_mock_service, ) from tests.components.bluetooth import inject_bluetooth_service_info_bleak @@ -29,12 +28,6 @@ def get_device_id(mac: str) -> tuple[str, str]: return (BLUETOOTH_DOMAIN, mac) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def _async_setup_xiaomi_device( hass: HomeAssistant, mac: str, data: Any | None = None ): @@ -399,7 +392,9 @@ async def test_get_triggers_for_invalid_device_id( async def test_if_fires_on_button_press( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for button press event trigger firing.""" mac = "54:EF:44:E3:9C:BC" @@ -452,15 +447,17 @@ async def test_if_fires_on_button_press( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() async def test_if_fires_on_double_button_long_press( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for button press event trigger firing.""" mac = "DC:ED:83:87:12:73" @@ -513,15 +510,17 @@ async def test_if_fires_on_double_button_long_press( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_right_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_right_button_press" assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() async def test_if_fires_on_motion_detected( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for motion event trigger firing.""" mac = "DE:70:E8:B2:39:0C" @@ -567,8 +566,8 @@ async def test_if_fires_on_motion_detected( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_motion_detected" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_motion_detected" assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() @@ -676,7 +675,9 @@ async def test_automation_with_invalid_trigger_event_property( async def test_triggers_for_invalid__model( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test invalid model doesn't return triggers.""" mac = "DE:70:E8:B2:39:0C" diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index 462145d16ab..54646d30513 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -1,12 +1,12 @@ """The tests for the Xiaomi vacuum platform.""" +from collections.abc import Generator from datetime import datetime, time, timedelta from unittest import mock from unittest.mock import MagicMock, patch from miio import DeviceException import pytest -from typing_extensions import Generator from homeassistant.components.vacuum import ( ATTR_BATTERY_ICON, diff --git a/tests/components/yamaha/test_media_player.py b/tests/components/yamaha/test_media_player.py index 02246e69269..2375e7d07f4 100644 --- a/tests/components/yamaha/test_media_player.py +++ b/tests/components/yamaha/test_media_player.py @@ -25,7 +25,7 @@ def _create_zone_mock(name, url): class FakeYamahaDevice: """A fake Yamaha device.""" - def __init__(self, ctrl_url, name, zones=None): + def __init__(self, ctrl_url, name, zones=None) -> None: """Initialize the fake Yamaha device.""" self.ctrl_url = ctrl_url self.name = name @@ -46,11 +46,27 @@ def main_zone_fixture(): def device_fixture(main_zone): """Mock the yamaha device.""" device = FakeYamahaDevice("http://receiver", "Receiver", zones=[main_zone]) - with patch("rxv.RXV", return_value=device): + with ( + patch("rxv.RXV", return_value=device), + patch("rxv.find", return_value=[device]), + ): yield device -async def test_setup_host(hass: HomeAssistant, device, main_zone) -> None: +@pytest.fixture(name="device2") +def device2_fixture(main_zone): + """Mock the yamaha device.""" + device = FakeYamahaDevice( + "http://127.0.0.1:80/YamahaRemoteControl/ctrl", "Receiver 2", zones=[main_zone] + ) + with ( + patch("rxv.RXV", return_value=device), + patch("rxv.find", return_value=[device]), + ): + yield device + + +async def test_setup_host(hass: HomeAssistant, device, device2, main_zone) -> None: """Test set up integration with host.""" assert await async_setup_component(hass, MP_DOMAIN, CONFIG) await hass.async_block_till_done() @@ -60,6 +76,36 @@ async def test_setup_host(hass: HomeAssistant, device, main_zone) -> None: assert state is not None assert state.state == "off" + with patch("rxv.find", return_value=[device2]): + assert await async_setup_component(hass, MP_DOMAIN, CONFIG) + await hass.async_block_till_done() + + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is not None + assert state.state == "off" + + +@pytest.mark.parametrize( + ("error"), + [ + AttributeError, + ValueError, + UnicodeDecodeError("", b"", 1, 0, ""), + ], +) +async def test_setup_find_errors(hass: HomeAssistant, device, main_zone, error) -> None: + """Test set up integration encountering an Error.""" + + with patch("rxv.find", side_effect=error): + assert await async_setup_component(hass, MP_DOMAIN, CONFIG) + await hass.async_block_till_done() + + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is not None + assert state.state == "off" + async def test_setup_no_host(hass: HomeAssistant, device, main_zone) -> None: """Test set up integration without host.""" diff --git a/tests/components/yamaha_musiccast/test_config_flow.py b/tests/components/yamaha_musiccast/test_config_flow.py index 321e7250e5a..7629d2401c2 100644 --- a/tests/components/yamaha_musiccast/test_config_flow.py +++ b/tests/components/yamaha_musiccast/test_config_flow.py @@ -1,5 +1,6 @@ """Test config flow.""" +from collections.abc import Generator from unittest.mock import patch from aiomusiccast import MusicCastConnectionException @@ -17,7 +18,7 @@ from tests.common import MockConfigEntry @pytest.fixture(autouse=True) -async def silent_ssdp_scanner(hass): +def silent_ssdp_scanner() -> Generator[None]: """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), diff --git a/tests/components/yandextts/test_tts.py b/tests/components/yandextts/test_tts.py index 496c187469a..77878c2be51 100644 --- a/tests/components/yandextts/test_tts.py +++ b/tests/components/yandextts/test_tts.py @@ -29,9 +29,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/yardian/conftest.py b/tests/components/yardian/conftest.py index 26a01f889b7..00e76c4c34f 100644 --- a/tests/components/yardian/conftest.py +++ b/tests/components/yardian/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Yardian tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/yeelight/__init__.py b/tests/components/yeelight/__init__.py index 2de064cf567..bdd8cdda312 100644 --- a/tests/components/yeelight/__init__.py +++ b/tests/components/yeelight/__init__.py @@ -109,7 +109,7 @@ CONFIG_ENTRY_DATA = {CONF_ID: ID} class MockAsyncBulb: """A mock for yeelight.aio.AsyncBulb.""" - def __init__(self, model, bulb_type, cannot_connect): + def __init__(self, model, bulb_type, cannot_connect) -> None: """Init the mock.""" self.model = model self.bulb_type = bulb_type diff --git a/tests/components/yolink/test_device_trigger.py b/tests/components/yolink/test_device_trigger.py index f6aa9a28ac0..6b48b32fd62 100644 --- a/tests/components/yolink/test_device_trigger.py +++ b/tests/components/yolink/test_device_trigger.py @@ -1,6 +1,5 @@ """The tests for YoLink device triggers.""" -import pytest from pytest_unordered import unordered from yolink.const import ATTR_DEVICE_DIMMER, ATTR_DEVICE_SMART_REMOTER @@ -11,17 +10,7 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "yolink", "automation") +from tests.common import MockConfigEntry, async_get_device_automations async def test_get_triggers( @@ -120,7 +109,9 @@ async def test_get_triggers_exception( async def test_if_fires_on_event( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for event triggers firing.""" mac_address = "12:34:56:AB:CD:EF" @@ -166,5 +157,5 @@ async def test_if_fires_on_event( }, ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["message"] == "service called" + assert len(service_calls) == 1 + assert service_calls[0].data["message"] == "service called" diff --git a/tests/components/youless/__init__.py b/tests/components/youless/__init__.py index 8711c6721bc..8770a7e2dc8 100644 --- a/tests/components/youless/__init__.py +++ b/tests/components/youless/__init__.py @@ -1 +1,39 @@ """Tests for the youless component.""" + +import requests_mock + +from homeassistant.components import youless +from homeassistant.const import CONF_DEVICE, CONF_HOST +from homeassistant.core import HomeAssistant + +from tests.common import ( + MockConfigEntry, + load_json_array_fixture, + load_json_object_fixture, +) + + +async def init_component(hass: HomeAssistant) -> MockConfigEntry: + """Check if the setup of the integration succeeds.""" + with requests_mock.Mocker() as mock: + mock.get( + "http://1.1.1.1/d", + json=load_json_object_fixture("device.json", youless.DOMAIN), + ) + mock.get( + "http://1.1.1.1/e", + json=load_json_array_fixture("enologic.json", youless.DOMAIN), + headers={"Content-Type": "application/json"}, + ) + + entry = MockConfigEntry( + domain=youless.DOMAIN, + title="localhost", + data={CONF_HOST: "1.1.1.1", CONF_DEVICE: "localhost"}, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + return entry diff --git a/tests/components/youless/fixtures/device.json b/tests/components/youless/fixtures/device.json new file mode 100644 index 00000000000..7d089851923 --- /dev/null +++ b/tests/components/youless/fixtures/device.json @@ -0,0 +1,5 @@ +{ + "model": "LS120", + "fw": "1.4.2-EL", + "mac": "de2:2d2:3d23" +} diff --git a/tests/components/youless/fixtures/enologic.json b/tests/components/youless/fixtures/enologic.json new file mode 100644 index 00000000000..0189f43af5e --- /dev/null +++ b/tests/components/youless/fixtures/enologic.json @@ -0,0 +1,18 @@ +[ + { + "tm": 1611929119, + "net": 9194.164, + "pwr": 2382, + "ts0": 1608654000, + "cs0": 0.0, + "ps0": 0, + "p1": 4703.562, + "p2": 4490.631, + "n1": 0.029, + "n2": 0.0, + "gas": 1624.264, + "gts": 0, + "wtr": 1234.564, + "wts": 0 + } +] diff --git a/tests/components/youless/snapshots/test_sensor.ambr b/tests/components/youless/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..bcfd0139e5c --- /dev/null +++ b/tests/components/youless/snapshots/test_sensor.ambr @@ -0,0 +1,972 @@ +# serializer version: 1 +# name: test_sensors[sensor.energy_delivery_high-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_delivery_high', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy delivery high', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_delivery_high', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_delivery_high-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy delivery high', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_delivery_high', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_delivery_low-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_delivery_low', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy delivery low', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_delivery_low', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_delivery_low-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy delivery low', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_delivery_low', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.029', + }) +# --- +# name: test_sensors[sensor.energy_high-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_high', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy high', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_power_high', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_high-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy high', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_high', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4490.631', + }) +# --- +# name: test_sensors[sensor.energy_low-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_low', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy low', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_power_low', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_low-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy low', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_low', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4703.562', + }) +# --- +# name: test_sensors[sensor.energy_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_total', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy total', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_power_total', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9194.164', + }) +# --- +# name: test_sensors[sensor.extra_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.extra_total', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Extra total', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_extra_total', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.extra_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Extra total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.extra_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.extra_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.extra_usage', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Extra usage', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_extra_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.extra_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Extra usage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.extra_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.gas_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gas_usage', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:fire', + 'original_name': 'Gas usage', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_gas', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.gas_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'gas', + 'friendly_name': 'Gas usage', + 'icon': 'mdi:fire', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gas_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1624.264', + }) +# --- +# name: test_sensors[sensor.phase_1_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.phase_1_current', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Phase 1 current', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_phase_1_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.phase_1_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Phase 1 current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.phase_1_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.phase_1_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.phase_1_power', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Phase 1 power', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_phase_1_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.phase_1_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Phase 1 power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.phase_1_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.phase_1_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.phase_1_voltage', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Phase 1 voltage', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_phase_1_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.phase_1_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Phase 1 voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.phase_1_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.phase_2_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.phase_2_current', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Phase 2 current', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_phase_2_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.phase_2_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Phase 2 current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.phase_2_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.phase_2_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.phase_2_power', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Phase 2 power', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_phase_2_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.phase_2_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Phase 2 power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.phase_2_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.phase_2_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.phase_2_voltage', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Phase 2 voltage', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_phase_2_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.phase_2_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Phase 2 voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.phase_2_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.phase_3_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.phase_3_current', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Phase 3 current', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_phase_3_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.phase_3_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Phase 3 current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.phase_3_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.phase_3_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.phase_3_power', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Phase 3 power', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_phase_3_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.phase_3_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Phase 3 power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.phase_3_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.phase_3_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.phase_3_voltage', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Phase 3 voltage', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_phase_3_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.phase_3_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Phase 3 voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.phase_3_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.power_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.power_usage', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power Usage', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.power_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Power Usage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.power_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2382', + }) +# --- +# name: test_sensors[sensor.water_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.water_usage', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:water', + 'original_name': 'Water usage', + 'platform': 'youless', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'youless_localhost_water', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.water_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Water usage', + 'icon': 'mdi:water', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.water_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1234.564', + }) +# --- diff --git a/tests/components/youless/test_init.py b/tests/components/youless/test_init.py new file mode 100644 index 00000000000..29db8c66af0 --- /dev/null +++ b/tests/components/youless/test_init.py @@ -0,0 +1,18 @@ +"""Test the setup of the Youless integration.""" + +from homeassistant import setup +from homeassistant.components import youless +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import init_component + + +async def test_async_setup_entry(hass: HomeAssistant) -> None: + """Check if the setup of the integration succeeds.""" + + entry = await init_component(hass) + + assert await setup.async_setup_component(hass, youless.DOMAIN, {}) + assert entry.state is ConfigEntryState.LOADED + assert len(hass.states.async_entity_ids()) == 19 diff --git a/tests/components/youless/test_sensor.py b/tests/components/youless/test_sensor.py new file mode 100644 index 00000000000..67dff314df7 --- /dev/null +++ b/tests/components/youless/test_sensor.py @@ -0,0 +1,23 @@ +"""Test the sensor classes for youless.""" + +from unittest.mock import patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_component + +from tests.common import snapshot_platform + + +async def test_sensors( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test the sensor classes for youless.""" + with patch("homeassistant.components.youless.PLATFORMS", [Platform.SENSOR]): + entry = await init_component(hass) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/youtube/__init__.py b/tests/components/youtube/__init__.py index 1b559f0f1c4..31125d3a71e 100644 --- a/tests/components/youtube/__init__.py +++ b/tests/components/youtube/__init__.py @@ -1,8 +1,8 @@ """Tests for the YouTube integration.""" +from collections.abc import AsyncGenerator import json -from typing_extensions import AsyncGenerator from youtubeaio.models import YouTubeChannel, YouTubePlaylistItem, YouTubeSubscription from youtubeaio.types import AuthScope diff --git a/tests/components/zamg/conftest.py b/tests/components/zamg/conftest.py index 1795baa7fad..9fa4f333ef8 100644 --- a/tests/components/zamg/conftest.py +++ b/tests/components/zamg/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Zamg integration tests.""" +from collections.abc import Generator import json from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from zamg import ZamgData as ZamgDevice from homeassistant.components.zamg.const import CONF_STATION_ID, DOMAIN diff --git a/tests/components/zerproc/test_light.py b/tests/components/zerproc/test_light.py index c47f960b182..6e00cfbde4c 100644 --- a/tests/components/zerproc/test_light.py +++ b/tests/components/zerproc/test_light.py @@ -35,13 +35,13 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture -async def mock_entry(hass): +async def mock_entry() -> MockConfigEntry: """Create a mock light entity.""" return MockConfigEntry(domain=DOMAIN) @pytest.fixture -async def mock_light(hass, mock_entry): +async def mock_light(hass: HomeAssistant, mock_entry: MockConfigEntry) -> MagicMock: """Create a mock light entity.""" mock_entry.add_to_hass(hass) diff --git a/tests/components/zeversolar/snapshots/test_diagnostics.ambr b/tests/components/zeversolar/snapshots/test_diagnostics.ambr index eebc8468076..4090a3262ba 100644 --- a/tests/components/zeversolar/snapshots/test_diagnostics.ambr +++ b/tests/components/zeversolar/snapshots/test_diagnostics.ambr @@ -10,6 +10,7 @@ # name: test_entry_diagnostics dict({ 'communication_status': 'OK', + 'energy_today': 123.4, 'hardware_version': 'M10', 'meter_status': 'OK', 'num_inverters': 1, diff --git a/tests/components/zeversolar/snapshots/test_sensor.ambr b/tests/components/zeversolar/snapshots/test_sensor.ambr index bee522133a5..aaef2c43d79 100644 --- a/tests/components/zeversolar/snapshots/test_sensor.ambr +++ b/tests/components/zeversolar/snapshots/test_sensor.ambr @@ -1,24 +1,4 @@ # serializer version: 1 -# name: test_sensors - ConfigEntrySnapshot({ - 'data': dict({ - 'host': 'zeversolar-fake-host', - 'port': 10200, - }), - 'disabled_by': None, - 'domain': 'zeversolar', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }) -# --- # name: test_sensors[sensor.zeversolar_sensor_energy_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/zha/common.py b/tests/components/zha/common.py index a8bec33a23a..1dd1e5f81aa 100644 --- a/tests/components/zha/common.py +++ b/tests/components/zha/common.py @@ -1,19 +1,12 @@ """Common test objects.""" -import asyncio from datetime import timedelta -import math -from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, Mock import zigpy.zcl import zigpy.zcl.foundation as zcl_f -import homeassistant.components.zha.core.const as zha_const -from homeassistant.components.zha.core.helpers import ( - async_get_zha_config_value, - get_zha_gateway, -) +from homeassistant.components.zha.helpers import ZHADeviceProxy from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util @@ -98,7 +91,7 @@ def make_attribute(attrid, value, status=0): return attr -def send_attribute_report(hass, cluster, attrid, value): +def send_attribute_report(hass: HomeAssistant, cluster, attrid, value): """Send a single attribute report.""" return send_attributes_report(hass, cluster, {attrid: value}) @@ -131,7 +124,9 @@ async def send_attributes_report( await hass.async_block_till_done() -def find_entity_id(domain, zha_device, hass, qualifier=None): +def find_entity_id( + domain: str, zha_device: ZHADeviceProxy, hass: HomeAssistant, qualifier=None +) -> str | None: """Find the entity id under the testing. This is used to get the entity id in order to get the state from the state @@ -144,11 +139,13 @@ def find_entity_id(domain, zha_device, hass, qualifier=None): for entity_id in entities: if qualifier in entity_id: return entity_id - else: - return entities[0] + return None + return entities[0] -def find_entity_ids(domain, zha_device, hass): +def find_entity_ids( + domain: str, zha_device: ZHADeviceProxy, hass: HomeAssistant +) -> list[str]: """Find the entity ids under the testing. This is used to get the entity id in order to get the state from the state @@ -163,7 +160,7 @@ def find_entity_ids(domain, zha_device, hass): ] -def async_find_group_entity_id(hass, domain, group): +def async_find_group_entity_id(hass: HomeAssistant, domain, group): """Find the group entity id under test.""" entity_id = f"{domain}.coordinator_manufacturer_coordinator_model_{group.name.lower().replace(' ', '_')}" @@ -172,13 +169,6 @@ def async_find_group_entity_id(hass, domain, group): return entity_id -async def async_enable_traffic(hass, zha_devices, enabled=True): - """Allow traffic to flow through the gateway and the ZHA device.""" - for zha_device in zha_devices: - zha_device.update_available(enabled) - await hass.async_block_till_done() - - def make_zcl_header( command_id: int, global_command: bool = True, tsn: int = 1 ) -> zcl_f.ZCLHeader: @@ -199,57 +189,8 @@ def reset_clusters(clusters): cluster.write_attributes.reset_mock() -async def async_test_rejoin(hass, zigpy_device, clusters, report_counts, ep_id=1): - """Test device rejoins.""" - reset_clusters(clusters) - - zha_gateway = get_zha_gateway(hass) - await zha_gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done() - for cluster, reports in zip(clusters, report_counts, strict=False): - assert cluster.bind.call_count == 1 - assert cluster.bind.await_count == 1 - if reports: - assert cluster.configure_reporting.call_count == 0 - assert cluster.configure_reporting.await_count == 0 - assert cluster.configure_reporting_multiple.call_count == math.ceil( - reports / zha_const.REPORT_CONFIG_ATTR_PER_REQ - ) - assert cluster.configure_reporting_multiple.await_count == math.ceil( - reports / zha_const.REPORT_CONFIG_ATTR_PER_REQ - ) - else: - # no reports at all - assert cluster.configure_reporting.call_count == reports - assert cluster.configure_reporting.await_count == reports - assert cluster.configure_reporting_multiple.call_count == reports - assert cluster.configure_reporting_multiple.await_count == reports - - -async def async_wait_for_updates(hass): - """Wait until all scheduled updates are executed.""" - await hass.async_block_till_done() - await asyncio.sleep(0) - await asyncio.sleep(0) - await hass.async_block_till_done() - - -async def async_shift_time(hass): +async def async_shift_time(hass: HomeAssistant): """Shift time to cause call later tasks to run.""" next_update = dt_util.utcnow() + timedelta(seconds=11) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() - - -def patch_zha_config(component: str, overrides: dict[tuple[str, str], Any]): - """Patch the ZHA custom configuration defaults.""" - - def new_get_config(config_entry, section, config_key, default): - if (section, config_key) in overrides: - return overrides[section, config_key] - return async_get_zha_config_value(config_entry, section, config_key, default) - - return patch( - f"homeassistant.components.zha.{component}.async_get_zha_config_value", - side_effect=new_get_config, - ) diff --git a/tests/components/zha/conftest.py b/tests/components/zha/conftest.py index 410eaceda76..a9f4c51d75d 100644 --- a/tests/components/zha/conftest.py +++ b/tests/components/zha/conftest.py @@ -1,6 +1,6 @@ """Test configuration for the ZHA component.""" -from collections.abc import Callable +from collections.abc import Generator import itertools import time from typing import Any @@ -8,7 +8,6 @@ from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import warnings import pytest -from typing_extensions import Generator import zigpy from zigpy.application import ControllerApplication import zigpy.backups @@ -25,14 +24,9 @@ from zigpy.zcl.clusters.general import Basic, Groups from zigpy.zcl.foundation import Status import zigpy.zdo.types as zdo_t -import homeassistant.components.zha.core.const as zha_const -import homeassistant.components.zha.core.device as zha_core_device -from homeassistant.components.zha.core.gateway import ZHAGateway -from homeassistant.components.zha.core.helpers import get_zha_gateway +import homeassistant.components.zha.const as zha_const from homeassistant.core import HomeAssistant -from homeassistant.helpers import restore_state from homeassistant.setup import async_setup_component -import homeassistant.util.dt as dt_util from .common import patch_cluster as common_patch_cluster @@ -44,17 +38,6 @@ FIXTURE_GRP_NAME = "fixture group" COUNTER_NAMES = ["counter_1", "counter_2", "counter_3"] -@pytest.fixture(scope="module", autouse=True) -def disable_request_retry_delay(): - """Disable ZHA request retrying delay to speed up failures.""" - - with patch( - "homeassistant.components.zha.core.cluster_handlers.RETRYABLE_REQUEST_DECORATOR", - zigpy.util.retryable_request(tries=3, delay=0), - ): - yield - - @pytest.fixture(scope="module", autouse=True) def globally_load_quirks(): """Load quirks automatically so that ZHA tests run deterministically in isolation. @@ -128,6 +111,9 @@ class _FakeApp(ControllerApplication): ) -> None: pass + def _persist_coordinator_model_strings_in_db(self) -> None: + pass + def _wrap_mock_instance(obj: Any) -> MagicMock: """Auto-mock every attribute and method in an object.""" @@ -168,6 +154,8 @@ async def zigpy_app_controller(): app.state.node_info.nwk = 0x0000 app.state.node_info.ieee = zigpy.types.EUI64.convert("00:15:8d:00:02:32:4f:32") + app.state.node_info.manufacturer = "Coordinator Manufacturer" + app.state.node_info.model = "Coordinator Model" app.state.network_info.pan_id = 0x1234 app.state.network_info.extended_pan_id = app.state.node_info.ieee app.state.network_info.channel = 15 @@ -202,10 +190,14 @@ async def zigpy_app_controller(): async def config_entry_fixture() -> MockConfigEntry: """Fixture representing a config entry.""" return MockConfigEntry( - version=3, + version=4, domain=zha_const.DOMAIN, data={ - zigpy.config.CONF_DEVICE: {zigpy.config.CONF_DEVICE_PATH: "/dev/ttyUSB0"}, + zigpy.config.CONF_DEVICE: { + zigpy.config.CONF_DEVICE_PATH: "/dev/ttyUSB0", + zigpy.config.CONF_DEVICE_BAUDRATE: 115200, + zigpy.config.CONF_DEVICE_FLOW_CONTROL: "hardware", + }, zha_const.CONF_RADIO_TYPE: "ezsp", }, options={ @@ -280,170 +272,6 @@ def cluster_handler(): return cluster_handler -@pytest.fixture -def zigpy_device_mock(zigpy_app_controller): - """Make a fake device using the specified cluster classes.""" - - def _mock_dev( - endpoints, - ieee="00:0d:6f:00:0a:90:69:e7", - manufacturer="FakeManufacturer", - model="FakeModel", - node_descriptor=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", - nwk=0xB79C, - patch_cluster=True, - quirk=None, - attributes=None, - ): - """Make a fake device using the specified cluster classes.""" - device = zigpy.device.Device( - zigpy_app_controller, zigpy.types.EUI64.convert(ieee), nwk - ) - device.manufacturer = manufacturer - device.model = model - device.node_desc = zdo_t.NodeDescriptor.deserialize(node_descriptor)[0] - device.last_seen = time.time() - - for epid, ep in endpoints.items(): - endpoint = device.add_endpoint(epid) - endpoint.device_type = ep[SIG_EP_TYPE] - endpoint.profile_id = ep.get(SIG_EP_PROFILE, 0x0104) - endpoint.request = AsyncMock() - - for cluster_id in ep.get(SIG_EP_INPUT, []): - endpoint.add_input_cluster(cluster_id) - - for cluster_id in ep.get(SIG_EP_OUTPUT, []): - endpoint.add_output_cluster(cluster_id) - - device.status = zigpy.device.Status.ENDPOINTS_INIT - - if quirk: - device = quirk(zigpy_app_controller, device.ieee, device.nwk, device) - else: - # Allow zigpy to apply quirks if we don't pass one explicitly - device = zigpy.quirks.get_device(device) - - if patch_cluster: - for endpoint in (ep for epid, ep in device.endpoints.items() if epid): - endpoint.request = AsyncMock(return_value=[0]) - for cluster in itertools.chain( - endpoint.in_clusters.values(), endpoint.out_clusters.values() - ): - common_patch_cluster(cluster) - - if attributes is not None: - for ep_id, clusters in attributes.items(): - for cluster_name, attrs in clusters.items(): - cluster = getattr(device.endpoints[ep_id], cluster_name) - - for name, value in attrs.items(): - attr_id = cluster.find_attribute(name).id - cluster._attr_cache[attr_id] = value - - return device - - return _mock_dev - - -@patch("homeassistant.components.zha.setup_quirks", MagicMock(return_value=True)) -@pytest.fixture -def zha_device_joined(hass, setup_zha): - """Return a newly joined ZHA device.""" - setup_zha_fixture = setup_zha - - async def _zha_device(zigpy_dev, *, setup_zha: bool = True): - zigpy_dev.last_seen = time.time() - - if setup_zha: - await setup_zha_fixture() - - zha_gateway = get_zha_gateway(hass) - zha_gateway.application_controller.devices[zigpy_dev.ieee] = zigpy_dev - await zha_gateway.async_device_initialized(zigpy_dev) - await hass.async_block_till_done() - return zha_gateway.get_device(zigpy_dev.ieee) - - return _zha_device - - -@patch("homeassistant.components.zha.setup_quirks", MagicMock(return_value=True)) -@pytest.fixture -def zha_device_restored(hass, zigpy_app_controller, setup_zha): - """Return a restored ZHA device.""" - setup_zha_fixture = setup_zha - - async def _zha_device(zigpy_dev, *, last_seen=None, setup_zha: bool = True): - zigpy_app_controller.devices[zigpy_dev.ieee] = zigpy_dev - - if last_seen is not None: - zigpy_dev.last_seen = last_seen - - if setup_zha: - await setup_zha_fixture() - - zha_gateway = get_zha_gateway(hass) - return zha_gateway.get_device(zigpy_dev.ieee) - - return _zha_device - - -@pytest.fixture(params=["zha_device_joined", "zha_device_restored"]) -def zha_device_joined_restored(request: pytest.FixtureRequest): - """Join or restore ZHA device.""" - named_method = request.getfixturevalue(request.param) - named_method.name = request.param - return named_method - - -@pytest.fixture -def zha_device_mock( - hass: HomeAssistant, config_entry, zigpy_device_mock -) -> Callable[..., zha_core_device.ZHADevice]: - """Return a ZHA Device factory.""" - - def _zha_device( - endpoints=None, - ieee="00:11:22:33:44:55:66:77", - manufacturer="mock manufacturer", - model="mock model", - node_desc=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", - patch_cluster=True, - ) -> zha_core_device.ZHADevice: - if endpoints is None: - endpoints = { - 1: { - "in_clusters": [0, 1, 8, 768], - "out_clusters": [0x19], - "device_type": 0x0105, - }, - 2: { - "in_clusters": [0], - "out_clusters": [6, 8, 0x19, 768], - "device_type": 0x0810, - }, - } - zigpy_device = zigpy_device_mock( - endpoints, ieee, manufacturer, model, node_desc, patch_cluster=patch_cluster - ) - return zha_core_device.ZHADevice( - hass, - zigpy_device, - ZHAGateway(hass, {}, config_entry), - ) - - return _zha_device - - -@pytest.fixture -def hass_disable_services(hass): - """Mock services.""" - with patch.object( - hass, "services", MagicMock(has_service=MagicMock(return_value=True)) - ): - yield hass - - @pytest.fixture(autouse=True) def speed_up_radio_mgr(): """Speed up the radio manager connection time by removing delays.""" @@ -523,31 +351,66 @@ def network_backup() -> zigpy.backups.NetworkBackup: @pytest.fixture -def core_rs(hass_storage: dict[str, Any]) -> Callable[[str, Any, dict[str, Any]], None]: - """Core.restore_state fixture.""" +def zigpy_device_mock(zigpy_app_controller): + """Make a fake device using the specified cluster classes.""" - def _storage(entity_id: str, state: str, attributes: dict[str, Any]) -> None: - now = dt_util.utcnow().isoformat() + def _mock_dev( + endpoints, + ieee="00:0d:6f:00:0a:90:69:e7", + manufacturer="FakeManufacturer", + model="FakeModel", + node_descriptor=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", + nwk=0xB79C, + patch_cluster=True, + quirk=None, + attributes=None, + ): + """Make a fake device using the specified cluster classes.""" + device = zigpy.device.Device( + zigpy_app_controller, zigpy.types.EUI64.convert(ieee), nwk + ) + device.manufacturer = manufacturer + device.model = model + device.node_desc = zdo_t.NodeDescriptor.deserialize(node_descriptor)[0] + device.last_seen = time.time() - hass_storage[restore_state.STORAGE_KEY] = { - "version": restore_state.STORAGE_VERSION, - "key": restore_state.STORAGE_KEY, - "data": [ - { - "state": { - "entity_id": entity_id, - "state": str(state), - "attributes": attributes, - "last_changed": now, - "last_updated": now, - "context": { - "id": "3c2243ff5f30447eb12e7348cfd5b8ff", - "user_id": None, - }, - }, - "last_seen": now, - } - ], - } + for epid, ep in endpoints.items(): + endpoint = device.add_endpoint(epid) + endpoint.device_type = ep[SIG_EP_TYPE] + endpoint.profile_id = ep.get(SIG_EP_PROFILE, 0x0104) + endpoint.request = AsyncMock() - return _storage + for cluster_id in ep.get(SIG_EP_INPUT, []): + endpoint.add_input_cluster(cluster_id) + + for cluster_id in ep.get(SIG_EP_OUTPUT, []): + endpoint.add_output_cluster(cluster_id) + + device.status = zigpy.device.Status.ENDPOINTS_INIT + + if quirk: + device = quirk(zigpy_app_controller, device.ieee, device.nwk, device) + else: + # Allow zigpy to apply quirks if we don't pass one explicitly + device = zigpy.quirks.get_device(device) + + if patch_cluster: + for endpoint in (ep for epid, ep in device.endpoints.items() if epid): + endpoint.request = AsyncMock(return_value=[0]) + for cluster in itertools.chain( + endpoint.in_clusters.values(), endpoint.out_clusters.values() + ): + common_patch_cluster(cluster) + + if attributes is not None: + for ep_id, clusters in attributes.items(): + for cluster_name, attrs in clusters.items(): + cluster = getattr(device.endpoints[ep_id], cluster_name) + + for name, value in attrs.items(): + attr_id = cluster.find_attribute(name).id + cluster._attr_cache[attr_id] = value + + return device + + return _mock_dev diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..67655aebc8c --- /dev/null +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -0,0 +1,292 @@ +# serializer version: 1 +# name: test_diagnostics_for_config_entry + dict({ + 'application_state': dict({ + 'broadcast_counters': dict({ + }), + 'counters': dict({ + 'ezsp_counters': dict({ + 'counter_1': dict({ + '__type': "", + 'repr': "Counter(name='counter_1', _raw_value=1, reset_count=0, _last_reset_value=0)", + }), + 'counter_2': dict({ + '__type': "", + 'repr': "Counter(name='counter_2', _raw_value=1, reset_count=0, _last_reset_value=0)", + }), + 'counter_3': dict({ + '__type': "", + 'repr': "Counter(name='counter_3', _raw_value=1, reset_count=0, _last_reset_value=0)", + }), + }), + }), + 'device_counters': dict({ + }), + 'group_counters': dict({ + }), + 'network_info': dict({ + 'channel': 15, + 'channel_mask': 0, + 'children': list([ + ]), + 'extended_pan_id': '**REDACTED**', + 'key_table': list([ + ]), + 'metadata': dict({ + }), + 'network_key': '**REDACTED**', + 'nwk_addresses': dict({ + }), + 'nwk_manager_id': 0, + 'nwk_update_id': 0, + 'pan_id': 4660, + 'security_level': 0, + 'source': None, + 'stack_specific': dict({ + }), + 'tc_link_key': dict({ + 'key': list([ + 90, + 105, + 103, + 66, + 101, + 101, + 65, + 108, + 108, + 105, + 97, + 110, + 99, + 101, + 48, + 57, + ]), + 'partner_ieee': '**REDACTED**', + 'rx_counter': 0, + 'seq': 0, + 'tx_counter': 0, + }), + }), + 'node_info': dict({ + 'ieee': '**REDACTED**', + 'logical_type': 2, + 'manufacturer': 'Coordinator Manufacturer', + 'model': 'Coordinator Model', + 'nwk': 0, + 'version': None, + }), + }), + 'config': dict({ + 'device_config': dict({ + }), + 'enable_quirks': False, + }), + 'config_entry': dict({ + 'data': dict({ + 'device': dict({ + 'baudrate': 115200, + 'flow_control': 'hardware', + 'path': '/dev/ttyUSB0', + }), + 'radio_type': 'ezsp', + }), + 'disabled_by': None, + 'domain': 'zha', + 'minor_version': 1, + 'options': dict({ + 'custom_configuration': dict({ + 'zha_alarm_options': dict({ + 'alarm_arm_requires_code': False, + 'alarm_failed_tries': 2, + 'alarm_master_code': '**REDACTED**', + }), + 'zha_options': dict({ + 'enhanced_light_transition': True, + 'group_members_assume_state': False, + }), + }), + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 4, + }), + 'devices': list([ + dict({ + 'logical_type': 'Coordinator', + 'manufacturer': 'Coordinator Manufacturer', + 'model': 'Coordinator Model', + }), + dict({ + 'logical_type': 'EndDevice', + 'manufacturer': 'FakeManufacturer', + 'model': 'FakeModel', + }), + ]), + 'energy_scan': dict({ + '11': 4.313725490196078, + '12': 4.705882352941177, + '13': 5.098039215686274, + '14': 5.490196078431373, + '15': 5.882352941176471, + '16': 6.2745098039215685, + '17': 6.666666666666667, + '18': 7.0588235294117645, + '19': 7.450980392156863, + '20': 7.8431372549019605, + '21': 8.235294117647058, + '22': 8.627450980392156, + '23': 9.019607843137255, + '24': 9.411764705882353, + '25': 9.803921568627452, + '26': 10.196078431372548, + }), + }) +# --- +# name: test_diagnostics_for_device + dict({ + 'active_coordinator': False, + 'area_id': None, + 'available': True, + 'cluster_details': dict({ + '1': dict({ + 'device_type': dict({ + 'id': 1025, + 'name': 'IAS_ANCILLARY_CONTROL', + }), + 'in_clusters': dict({ + '0x0500': dict({ + 'attributes': dict({ + '0x0000': dict({ + 'attribute': "ZCLAttributeDef(id=0x0000, name='zone_state', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0001': dict({ + 'attribute': "ZCLAttributeDef(id=0x0001, name='zone_type', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0002': dict({ + 'attribute': "ZCLAttributeDef(id=0x0002, name='zone_status', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0010': dict({ + 'attribute': "ZCLAttributeDef(id=0x0010, name='cie_addr', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': list([ + 50, + 79, + 50, + 2, + 0, + 141, + 21, + 0, + ]), + }), + '0x0011': dict({ + 'attribute': "ZCLAttributeDef(id=0x0011, name='zone_id', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0012': dict({ + 'attribute': "ZCLAttributeDef(id=0x0012, name='num_zone_sensitivity_levels_supported', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0013': dict({ + 'attribute': "ZCLAttributeDef(id=0x0013, name='current_zone_sensitivity_level', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'value': None, + }), + }), + 'endpoint_attribute': 'ias_zone', + 'unsupported_attributes': list([ + 18, + 'current_zone_sensitivity_level', + ]), + }), + '0x0501': dict({ + 'attributes': dict({ + '0xfffd': dict({ + 'attribute': "ZCLAttributeDef(id=0xFFFD, name='cluster_revision', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0xfffe': dict({ + 'attribute': "ZCLAttributeDef(id=0xFFFE, name='reporting_status', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'value': None, + }), + }), + 'endpoint_attribute': 'ias_ace', + 'unsupported_attributes': list([ + 4096, + 'unknown_attribute_name', + ]), + }), + }), + 'out_clusters': dict({ + }), + 'profile_id': 260, + }), + }), + 'device_type': 'EndDevice', + 'endpoint_names': list([ + dict({ + 'name': 'IAS_ANCILLARY_CONTROL', + }), + ]), + 'entities': list([ + dict({ + 'entity_id': 'alarm_control_panel.fakemanufacturer_fakemodel_alarm_control_panel', + 'name': 'FakeManufacturer FakeModel', + }), + ]), + 'ieee': '**REDACTED**', + 'lqi': None, + 'manufacturer': 'FakeManufacturer', + 'manufacturer_code': 4098, + 'model': 'FakeModel', + 'name': 'FakeManufacturer FakeModel', + 'neighbors': list([ + ]), + 'nwk': 47004, + 'power_source': 'Mains', + 'quirk_applied': False, + 'quirk_class': 'zigpy.device.Device', + 'quirk_id': None, + 'routes': list([ + ]), + 'rssi': None, + 'signature': dict({ + 'endpoints': dict({ + '1': dict({ + 'device_type': '0x0401', + 'input_clusters': list([ + '0x0500', + '0x0501', + ]), + 'output_clusters': list([ + ]), + 'profile_id': '0x0104', + }), + }), + 'manufacturer': 'FakeManufacturer', + 'model': 'FakeModel', + 'node_descriptor': dict({ + 'aps_flags': 0, + 'complex_descriptor_available': 0, + 'descriptor_capability_field': 0, + 'frequency_band': 8, + 'logical_type': 2, + 'mac_capability_flags': 140, + 'manufacturer_code': 4098, + 'maximum_buffer_size': 82, + 'maximum_incoming_transfer_size': 82, + 'maximum_outgoing_transfer_size': 82, + 'reserved': 0, + 'server_mask': 0, + 'user_descriptor_available': 0, + }), + }), + 'user_given_name': None, + }) +# --- diff --git a/tests/components/zha/test_alarm_control_panel.py b/tests/components/zha/test_alarm_control_panel.py index 8d3bd76ef61..3473a9b00ad 100644 --- a/tests/components/zha/test_alarm_control_panel.py +++ b/tests/components/zha/test_alarm_control_panel.py @@ -4,10 +4,17 @@ from unittest.mock import AsyncMock, call, patch, sentinel import pytest from zigpy.profiles import zha +from zigpy.zcl import Cluster from zigpy.zcl.clusters import security import zigpy.zcl.foundation as zcl_f from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import ( ATTR_ENTITY_ID, STATE_ALARM_ARMED_AWAY, @@ -15,12 +22,11 @@ from homeassistant.const import ( STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, - STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant -from .common import async_enable_traffic, find_entity_id +from .common import find_entity_id from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @@ -39,44 +45,40 @@ def alarm_control_panel_platform_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00" - ) - - @patch( "zigpy.zcl.clusters.security.IasAce.client_command", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), ) async def test_alarm_control_panel( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, setup_zha, zigpy_device_mock ) -> None: """Test ZHA alarm control panel platform.""" - zha_device = await zha_device_joined_restored(zigpy_device) - cluster = zigpy_device.endpoints.get(1).ias_ace - entity_id = find_entity_id(Platform.ALARM_CONTROL_PANEL, zha_device, hass) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.ALARM_CONTROL_PANEL, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].ias_ace assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the panel was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to STATE_ALARM_DISARMED assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED # arm_away from HA @@ -255,8 +257,30 @@ async def test_alarm_control_panel( # reset the panel await reset_alarm_panel(hass, cluster, entity_id) + await hass.services.async_call( + ALARM_DOMAIN, + "alarm_trigger", + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.async_block_till_done() + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert cluster.client_command.call_count == 1 + assert cluster.client_command.await_count == 1 + assert cluster.client_command.call_args == call( + 4, + security.IasAce.PanelStatus.In_Alarm, + 0, + security.IasAce.AudibleNotification.Default_Sound, + security.IasAce.AlarmStatus.Emergency_Panic, + ) -async def reset_alarm_panel(hass, cluster, entity_id): + # reset the panel + await reset_alarm_panel(hass, cluster, entity_id) + cluster.client_command.reset_mock() + + +async def reset_alarm_panel(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Reset the state of the alarm panel.""" cluster.client_command.reset_mock() await hass.services.async_call( diff --git a/tests/components/zha/test_api.py b/tests/components/zha/test_api.py index ed3394aafba..7aff6d81f5d 100644 --- a/tests/components/zha/test_api.py +++ b/tests/components/zha/test_api.py @@ -6,12 +6,12 @@ from typing import TYPE_CHECKING from unittest.mock import AsyncMock, MagicMock, call, patch import pytest +from zha.application.const import RadioType import zigpy.backups import zigpy.state from homeassistant.components.zha import api -from homeassistant.components.zha.core.const import RadioType -from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.components.zha.helpers import get_zha_gateway_proxy from homeassistant.core import HomeAssistant if TYPE_CHECKING: @@ -41,7 +41,7 @@ async def test_async_get_network_settings_inactive( """Test reading settings with an inactive ZHA installation.""" await setup_zha() - gateway = get_zha_gateway(hass) + gateway = get_zha_gateway_proxy(hass) await hass.config_entries.async_unload(gateway.config_entry.entry_id) backup = zigpy.backups.NetworkBackup() @@ -53,7 +53,7 @@ async def test_async_get_network_settings_inactive( controller.new = AsyncMock(return_value=zigpy_app_controller) with patch.dict( - "homeassistant.components.zha.core.const.RadioType._member_map_", + "homeassistant.components.zha.api.RadioType._member_map_", ezsp=MagicMock(controller=controller, description="EZSP"), ): settings = await api.async_get_network_settings(hass) @@ -68,7 +68,7 @@ async def test_async_get_network_settings_missing( """Test reading settings with an inactive ZHA installation, no valid channel.""" await setup_zha() - gateway = get_zha_gateway(hass) + gateway = get_zha_gateway_proxy(hass) await hass.config_entries.async_unload(gateway.config_entry.entry_id) # Network settings were never loaded for whatever reason diff --git a/tests/components/zha/test_base.py b/tests/components/zha/test_base.py deleted file mode 100644 index 203df2ffda5..00000000000 --- a/tests/components/zha/test_base.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Test ZHA base cluster handlers module.""" - -from homeassistant.components.zha.core.cluster_handlers import parse_and_log_command - -from .test_cluster_handlers import ( # noqa: F401 - endpoint, - poll_control_ch, - zigpy_coordinator_device, -) - - -def test_parse_and_log_command(poll_control_ch) -> None: # noqa: F811 - """Test that `parse_and_log_command` correctly parses a known command.""" - assert parse_and_log_command(poll_control_ch, 0x00, 0x01, []) == "fast_poll_stop" - - -def test_parse_and_log_command_unknown(poll_control_ch) -> None: # noqa: F811 - """Test that `parse_and_log_command` correctly parses an unknown command.""" - assert parse_and_log_command(poll_control_ch, 0x00, 0xAB, []) == "0xAB" diff --git a/tests/components/zha/test_binary_sensor.py b/tests/components/zha/test_binary_sensor.py index 8276223926d..419823b3b52 100644 --- a/tests/components/zha/test_binary_sensor.py +++ b/tests/components/zha/test_binary_sensor.py @@ -1,54 +1,25 @@ """Test ZHA binary sensor.""" -from collections.abc import Callable -from typing import Any from unittest.mock import patch import pytest -import zigpy.profiles.zha -from zigpy.zcl.clusters import general, measurement, security +from zigpy.profiles import zha +from zigpy.zcl.clusters import general -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - send_attributes_report, -) +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import async_mock_load_restore_state_from_storage - -DEVICE_IAS = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ZONE, - SIG_EP_INPUT: [security.IasZone.cluster_id], - SIG_EP_OUTPUT: [], - } -} - - -DEVICE_OCCUPANCY = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.OCCUPANCY_SENSOR, - SIG_EP_INPUT: [measurement.OccupancySensing.cluster_id], - SIG_EP_OUTPUT: [], - } -} - - -DEVICE_ONOFF = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SENSOR, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - } -} +ON = 1 +OFF = 0 @pytest.fixture(autouse=True) @@ -58,121 +29,51 @@ def binary_sensor_platform_only(): "homeassistant.components.zha.PLATFORMS", ( Platform.BINARY_SENSOR, - Platform.DEVICE_TRACKER, - Platform.NUMBER, - Platform.SELECT, + Platform.SENSOR, ), ): yield -async def async_test_binary_sensor_on_off(hass, cluster, entity_id): - """Test getting on and off messages for binary sensors.""" - # binary sensor on - await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2}) - assert hass.states.get(entity_id).state == STATE_ON - - # binary sensor off - await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) - assert hass.states.get(entity_id).state == STATE_OFF - - -async def async_test_iaszone_on_off(hass, cluster, entity_id): - """Test getting on and off messages for iaszone binary sensors.""" - # binary sensor on - cluster.listener_event("cluster_command", 1, 0, [1]) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ON - - # binary sensor off - cluster.listener_event("cluster_command", 1, 0, [0]) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF - - # check that binary sensor remains off when non-alarm bits change - cluster.listener_event("cluster_command", 1, 0, [0b1111111100]) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF - - -@pytest.mark.parametrize( - ("device", "on_off_test", "cluster_name", "reporting", "name"), - [ - ( - DEVICE_IAS, - async_test_iaszone_on_off, - "ias_zone", - (0,), - "FakeManufacturer FakeModel IAS zone", - ), - ( - DEVICE_OCCUPANCY, - async_test_binary_sensor_on_off, - "occupancy", - (1,), - "FakeManufacturer FakeModel Occupancy", - ), - ], -) async def test_binary_sensor( hass: HomeAssistant, + setup_zha, zigpy_device_mock, - zha_device_joined_restored, - device, - on_off_test, - cluster_name, - reporting, - name, ) -> None: """Test ZHA binary_sensor platform.""" - zigpy_device = zigpy_device_mock(device) - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SENSOR, + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + ) + cluster = zigpy_device.endpoints[1].out_clusters[general.OnOff.cluster_id] + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device_proxy, hass) assert entity_id is not None - assert hass.states.get(entity_id).name == name - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the sensors exist and are in the unavailable state - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - await async_enable_traffic(hass, [zha_device]) - - # test that the sensors exist and are in the off state assert hass.states.get(entity_id).state == STATE_OFF - # test getting messages that trigger and reset the sensors - cluster = getattr(zigpy_device.endpoints[1], cluster_name) - await on_off_test(hass, cluster, entity_id) + await send_attributes_report( + hass, cluster, {general.OnOff.AttributeDefs.on_off.id: ON} + ) + assert hass.states.get(entity_id).state == STATE_ON - # test rejoin - await async_test_rejoin(hass, zigpy_device, [cluster], reporting) + await send_attributes_report( + hass, cluster, {general.OnOff.AttributeDefs.on_off.id: OFF} + ) assert hass.states.get(entity_id).state == STATE_OFF - - -@pytest.mark.parametrize( - "restored_state", - [ - STATE_ON, - STATE_OFF, - ], -) -async def test_onoff_binary_sensor_restore_state( - hass: HomeAssistant, - zigpy_device_mock, - core_rs: Callable[[str, Any, dict[str, Any]], None], - zha_device_restored, - restored_state: str, -) -> None: - """Test ZHA OnOff binary_sensor restores last state from HA.""" - - entity_id = "binary_sensor.fakemanufacturer_fakemodel_opening" - core_rs(entity_id, state=restored_state, attributes={}) - await async_mock_load_restore_state_from_storage(hass) - - zigpy_device = zigpy_device_mock(DEVICE_ONOFF) - zha_device = await zha_device_restored(zigpy_device) - entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass) - - assert entity_id is not None - assert hass.states.get(entity_id).state == restored_state diff --git a/tests/components/zha/test_button.py b/tests/components/zha/test_button.py index fdcc0d7271c..574805db5f6 100644 --- a/tests/components/zha/test_button.py +++ b/tests/components/zha/test_button.py @@ -1,29 +1,21 @@ """Test ZHA button.""" -from typing import Final -from unittest.mock import call, patch +from unittest.mock import patch from freezegun import freeze_time import pytest -from zhaquirks.const import ( - DEVICE_TYPE, - ENDPOINTS, - INPUT_CLUSTERS, - OUTPUT_CLUSTERS, - PROFILE_ID, -) -from zhaquirks.tuya.ts0601_valve import ParksideTuyaValveManufCluster -from zigpy.const import SIG_EP_PROFILE -from zigpy.exceptions import ZigbeeException +from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from zigpy.profiles import zha -from zigpy.quirks import CustomCluster, CustomDevice -from zigpy.quirks.v2 import add_to_registry_v2 -import zigpy.types as t -from zigpy.zcl.clusters import general, security -from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster +from zigpy.zcl.clusters import general import zigpy.zcl.foundation as zcl_f from homeassistant.components.button import DOMAIN, SERVICE_PRESS, ButtonDeviceClass +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -32,11 +24,9 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from .common import find_entity_id, update_attribute_cache -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .common import find_entity_id @pytest.fixture(autouse=True) @@ -44,106 +34,53 @@ def button_platform_only(): """Only set up the button and required base platforms to speed up tests.""" with patch( "homeassistant.components.zha.PLATFORMS", - ( - Platform.BINARY_SENSOR, - Platform.BUTTON, - Platform.DEVICE_TRACKER, - Platform.NUMBER, - Platform.SELECT, - Platform.SENSOR, - Platform.SWITCH, - ), + (Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR), ): yield @pytest.fixture -async def contact_sensor( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Contact sensor fixture.""" +async def setup_zha_integration(hass: HomeAssistant, setup_zha): + """Set up ZHA component.""" - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - security.IasZone.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.IAS_ZONE, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].identify - - -class FrostLockQuirk(CustomDevice): - """Quirk with frost lock attribute.""" - - class TuyaManufCluster(CustomCluster, ManufacturerSpecificCluster): - """Tuya manufacturer specific cluster.""" - - cluster_id = 0xEF00 - ep_attribute = "tuya_manufacturer" - - attributes = {0xEF01: ("frost_lock_reset", t.Bool)} - - replacement = { - ENDPOINTS: { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH, - INPUT_CLUSTERS: [general.Basic.cluster_id, TuyaManufCluster], - OUTPUT_CLUSTERS: [], - }, - } - } - - -@pytest.fixture -async def tuya_water_valve( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Tuya Water Valve fixture.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH, - INPUT_CLUSTERS: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.Groups.cluster_id, - general.Scenes.cluster_id, - general.OnOff.cluster_id, - ParksideTuyaValveManufCluster.cluster_id, - ], - OUTPUT_CLUSTERS: [general.Time.cluster_id, general.Ota.cluster_id], - }, - }, - manufacturer="_TZE200_htnnfasr", - model="TS0601", - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].tuya_manufacturer + # if we call this in the test itself the test hangs forever + await setup_zha() @freeze_time("2021-11-04 17:37:00", tz_offset=-1) async def test_button( - hass: HomeAssistant, entity_registry: er.EntityRegistry, contact_sensor + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + setup_zha_integration, # pylint: disable=unused-argument + zigpy_device_mock, ) -> None: """Test ZHA button platform.""" - zha_device, cluster = contact_sensor - assert cluster is not None - entity_id = find_entity_id(DOMAIN, zha_device, hass) + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SENSOR, + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.Identify.cluster_id, + ], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + ) + cluster = zigpy_device.endpoints[1].identify + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.BUTTON, zha_device_proxy, hass) assert entity_id is not None state = hass.states.get(entity_id) @@ -175,198 +112,3 @@ async def test_button( assert state assert state.state == "2021-11-04T16:37:00+00:00" assert state.attributes[ATTR_DEVICE_CLASS] == ButtonDeviceClass.IDENTIFY - - -async def test_frost_unlock( - hass: HomeAssistant, entity_registry: er.EntityRegistry, tuya_water_valve -) -> None: - """Test custom frost unlock ZHA button.""" - - zha_device, cluster = tuya_water_valve - assert cluster is not None - entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="frost_lock_reset") - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_UNKNOWN - assert state.attributes[ATTR_DEVICE_CLASS] == ButtonDeviceClass.RESTART - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.entity_category == EntityCategory.CONFIG - - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x00, zcl_f.Status.SUCCESS], - ): - await hass.services.async_call( - DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - await hass.async_block_till_done() - assert cluster.write_attributes.mock_calls == [ - call({"frost_lock_reset": 0}, manufacturer=None) - ] - - state = hass.states.get(entity_id) - assert state - assert state.attributes[ATTR_DEVICE_CLASS] == ButtonDeviceClass.RESTART - - cluster.write_attributes.reset_mock() - cluster.write_attributes.side_effect = ZigbeeException - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - # There are three retries - assert cluster.write_attributes.mock_calls == [ - call({"frost_lock_reset": 0}, manufacturer=None), - call({"frost_lock_reset": 0}, manufacturer=None), - call({"frost_lock_reset": 0}, manufacturer=None), - ] - - -class FakeManufacturerCluster(CustomCluster, ManufacturerSpecificCluster): - """Fake manufacturer cluster.""" - - cluster_id: Final = 0xFFF3 - ep_attribute: Final = "mfg_identify" - - class AttributeDefs(zcl_f.BaseAttributeDefs): - """Attribute definitions.""" - - feed: Final = zcl_f.ZCLAttributeDef( - id=0x0000, type=t.uint8_t, access="rw", is_manufacturer_specific=True - ) - - class ServerCommandDefs(zcl_f.BaseCommandDefs): - """Server command definitions.""" - - self_test: Final = zcl_f.ZCLCommandDef( - id=0x00, schema={"identify_time": t.uint16_t}, direction=False - ) - - -( - add_to_registry_v2("Fake_Model", "Fake_Manufacturer") - .replaces(FakeManufacturerCluster) - .command_button( - FakeManufacturerCluster.ServerCommandDefs.self_test.name, - FakeManufacturerCluster.cluster_id, - command_args=(5,), - ) - .write_attr_button( - FakeManufacturerCluster.AttributeDefs.feed.name, - 2, - FakeManufacturerCluster.cluster_id, - ) -) - - -@pytest.fixture -async def custom_button_device( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Button device fixture for quirks button tests.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - FakeManufacturerCluster.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.REMOTE_CONTROL, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - manufacturer="Fake_Model", - model="Fake_Manufacturer", - ) - - zigpy_device.endpoints[1].mfg_identify.PLUGGED_ATTR_READS = { - FakeManufacturerCluster.AttributeDefs.feed.name: 0, - } - update_attribute_cache(zigpy_device.endpoints[1].mfg_identify) - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].mfg_identify - - -@freeze_time("2021-11-04 17:37:00", tz_offset=-1) -async def test_quirks_command_button(hass: HomeAssistant, custom_button_device) -> None: - """Test ZHA button platform.""" - - zha_device, cluster = custom_button_device - assert cluster is not None - entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="self_test") - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_UNKNOWN - - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x00, zcl_f.Status.SUCCESS], - ): - await hass.services.async_call( - DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - await hass.async_block_till_done() - assert len(cluster.request.mock_calls) == 1 - assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == 0 - assert cluster.request.call_args[0][3] == 5 # duration in seconds - - state = hass.states.get(entity_id) - assert state - assert state.state == "2021-11-04T16:37:00+00:00" - - -@freeze_time("2021-11-04 17:37:00", tz_offset=-1) -async def test_quirks_write_attr_button( - hass: HomeAssistant, custom_button_device -) -> None: - """Test ZHA button platform.""" - - zha_device, cluster = custom_button_device - assert cluster is not None - entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="feed") - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_UNKNOWN - assert cluster.get(cluster.AttributeDefs.feed.name) == 0 - - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x00, zcl_f.Status.SUCCESS], - ): - await hass.services.async_call( - DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - await hass.async_block_till_done() - assert cluster.write_attributes.mock_calls == [ - call({cluster.AttributeDefs.feed.name: 2}, manufacturer=None) - ] - - state = hass.states.get(entity_id) - assert state - assert state.state == "2021-11-04T16:37:00+00:00" - assert cluster.get(cluster.AttributeDefs.feed.name) == 2 diff --git a/tests/components/zha/test_climate.py b/tests/components/zha/test_climate.py index 32ef08fcd96..7b94db51d04 100644 --- a/tests/components/zha/test_climate.py +++ b/tests/components/zha/test_climate.py @@ -1,17 +1,17 @@ """Test ZHA climate.""" from typing import Literal -from unittest.mock import call, patch +from unittest.mock import patch import pytest +from zha.application.platforms.climate.const import HVAC_MODE_2_SYSTEM, SEQ_OF_OPERATION import zhaquirks.sinope.thermostat -from zhaquirks.sinope.thermostat import SinopeTechnologiesThermostatCluster import zhaquirks.tuya.ts0601_trv import zigpy.profiles +from zigpy.profiles import zha import zigpy.types import zigpy.zcl.clusters from zigpy.zcl.clusters.hvac import Thermostat -import zigpy.zcl.foundation as zcl_f from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, @@ -28,10 +28,6 @@ from homeassistant.components.climate import ( FAN_LOW, FAN_ON, PRESET_AWAY, - PRESET_BOOST, - PRESET_COMFORT, - PRESET_ECO, - PRESET_NONE, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, @@ -39,13 +35,11 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.components.zha.climate import HVAC_MODE_2_SYSTEM, SEQ_OF_OPERATION -from homeassistant.components.zha.core.const import ( - PRESET_COMPLEX, - PRESET_SCHEDULE, - PRESET_TEMP_MANUAL, +from homeassistant.components.zha.helpers import ( + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) -from homeassistant.components.zha.core.device import ZHADevice from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, @@ -53,15 +47,15 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ServiceValidationError -from .common import async_enable_traffic, find_entity_id, send_attributes_report +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE CLIMATE = { 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.THERMOSTAT, SIG_EP_INPUT: [ zigpy.zcl.clusters.general.Basic.cluster_id, zigpy.zcl.clusters.general.Identify.cluster_id, @@ -74,8 +68,8 @@ CLIMATE = { CLIMATE_FAN = { 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.THERMOSTAT, SIG_EP_INPUT: [ zigpy.zcl.clusters.general.Basic.cluster_id, zigpy.zcl.clusters.general.Identify.cluster_id, @@ -108,72 +102,7 @@ CLIMATE_SINOPE = { }, } -CLIMATE_ZEN = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.general.Identify.cluster_id, - zigpy.zcl.clusters.hvac.Fan.cluster_id, - zigpy.zcl.clusters.hvac.Thermostat.cluster_id, - zigpy.zcl.clusters.hvac.UserInterface.cluster_id, - ], - SIG_EP_OUTPUT: [zigpy.zcl.clusters.general.Ota.cluster_id], - } -} - -CLIMATE_MOES = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.general.Identify.cluster_id, - zigpy.zcl.clusters.hvac.Thermostat.cluster_id, - zigpy.zcl.clusters.hvac.UserInterface.cluster_id, - 61148, - ], - SIG_EP_OUTPUT: [zigpy.zcl.clusters.general.Ota.cluster_id], - } -} - -CLIMATE_BECA = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SMART_PLUG, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.general.Groups.cluster_id, - zigpy.zcl.clusters.general.Scenes.cluster_id, - 61148, - ], - SIG_EP_OUTPUT: [ - zigpy.zcl.clusters.general.Time.cluster_id, - zigpy.zcl.clusters.general.Ota.cluster_id, - ], - } -} - -CLIMATE_ZONNSMART = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.hvac.Thermostat.cluster_id, - zigpy.zcl.clusters.hvac.UserInterface.cluster_id, - 61148, - ], - SIG_EP_OUTPUT: [zigpy.zcl.clusters.general.Ota.cluster_id], - } -} - MANUF_SINOPE = "Sinope Technologies" -MANUF_ZEN = "Zen Within" -MANUF_MOES = "_TZE200_ckud7u2l" -MANUF_BECA = "_TZE200_b6wax7g0" -MANUF_ZONNSMART = "_TZE200_hue3yfsn" ZCL_ATTR_PLUG = { "abs_min_heat_setpoint_limit": 800, @@ -218,22 +147,22 @@ def climate_platform_only(): @pytest.fixture -def device_climate_mock(hass, zigpy_device_mock, zha_device_joined): +def device_climate_mock(hass: HomeAssistant, setup_zha, zigpy_device_mock): """Test regular thermostat device.""" async def _dev(clusters, plug=None, manuf=None, quirk=None): - if plug is None: - plugged_attrs = ZCL_ATTR_PLUG - else: - plugged_attrs = {**ZCL_ATTR_PLUG, **plug} - + plugged_attrs = ZCL_ATTR_PLUG if plug is None else {**ZCL_ATTR_PLUG, **plug} zigpy_device = zigpy_device_mock(clusters, manufacturer=manuf, quirk=quirk) zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 zigpy_device.endpoints[1].thermostat.PLUGGED_ATTR_READS = plugged_attrs - zha_device = await zha_device_joined(zigpy_device) - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - return zha_device + + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + return gateway_proxy.get_device_proxy(zigpy_device.ieee) return _dev @@ -268,44 +197,6 @@ async def device_climate_sinope(device_climate_mock): ) -@pytest.fixture -async def device_climate_zen(device_climate_mock): - """Zen Within thermostat.""" - - return await device_climate_mock(CLIMATE_ZEN, manuf=MANUF_ZEN) - - -@pytest.fixture -async def device_climate_moes(device_climate_mock): - """MOES thermostat.""" - - return await device_climate_mock( - CLIMATE_MOES, manuf=MANUF_MOES, quirk=zhaquirks.tuya.ts0601_trv.MoesHY368_Type1 - ) - - -@pytest.fixture -async def device_climate_beca(device_climate_mock) -> ZHADevice: - """Beca thermostat.""" - - return await device_climate_mock( - CLIMATE_BECA, - manuf=MANUF_BECA, - quirk=zhaquirks.tuya.ts0601_trv.MoesHY368_Type1new, - ) - - -@pytest.fixture -async def device_climate_zonnsmart(device_climate_mock): - """ZONNSMART thermostat.""" - - return await device_climate_mock( - CLIMATE_ZONNSMART, - manuf=MANUF_ZONNSMART, - quirk=zhaquirks.tuya.ts0601_trv.ZonnsmartTV01_ZG, - ) - - def test_sequence_mappings() -> None: """Test correct mapping between control sequence -> HVAC Mode -> Sysmode.""" @@ -318,7 +209,7 @@ def test_sequence_mappings() -> None: async def test_climate_local_temperature(hass: HomeAssistant, device_climate) -> None: """Test local temperature.""" - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -334,7 +225,7 @@ async def test_climate_hvac_action_running_state( ) -> None: """Test hvac action via running state.""" - thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat + thrm_cluster = device_climate_sinope.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) sensor_entity_id = find_entity_id( Platform.SENSOR, device_climate_sinope, hass, "hvac" @@ -394,101 +285,12 @@ async def test_climate_hvac_action_running_state( assert hvac_sensor_state.state == HVACAction.FAN -async def test_climate_hvac_action_running_state_zen( - hass: HomeAssistant, device_climate_zen -) -> None: - """Test Zen hvac action via running state.""" - - thrm_cluster = device_climate_zen.device.endpoints[1].thermostat - entity_id = find_entity_id(Platform.CLIMATE, device_climate_zen, hass) - sensor_entity_id = find_entity_id( - Platform.SENSOR, device_climate_zen, hass, "hvac_action" - ) - - state = hass.states.get(entity_id) - assert ATTR_HVAC_ACTION not in state.attributes - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == "unknown" - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Cool_2nd_Stage_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.COOLING - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Fan_State_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.FAN - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.FAN - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Heat_2nd_Stage_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.HEATING - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Fan_2nd_Stage_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.FAN - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.FAN - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Cool_State_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.COOLING - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Fan_3rd_Stage_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.FAN - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.FAN - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Heat_State_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.HEATING - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Idle} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.OFF - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.OFF - - await send_attributes_report( - hass, thrm_cluster, {0x001C: Thermostat.SystemMode.Heat} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.IDLE - - async def test_climate_hvac_action_pi_demand( hass: HomeAssistant, device_climate ) -> None: """Test hvac action based on pi_heating/cooling_demand attrs.""" - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -537,7 +339,7 @@ async def test_hvac_mode( ) -> None: """Test HVAC mode.""" - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -714,7 +516,7 @@ async def test_set_hvac_mode( ) -> None: """Test setting hvac mode.""" - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -753,134 +555,11 @@ async def test_set_hvac_mode( } -async def test_preset_setting(hass: HomeAssistant, device_climate_sinope) -> None: - """Test preset setting.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) - thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - # unsuccessful occupancy change - thrm_cluster.write_attributes.return_value = [ - zcl_f.WriteAttributesResponse( - [ - zcl_f.WriteAttributesStatusRecord( - status=zcl_f.Status.FAILURE, - attrid=SinopeTechnologiesThermostatCluster.AttributeDefs.set_occupancy.id, - ) - ] - ) - ] - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - assert thrm_cluster.write_attributes.call_count == 1 - assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 0} - - # successful occupancy change - thrm_cluster.write_attributes.reset_mock() - thrm_cluster.write_attributes.return_value = [ - zcl_f.WriteAttributesResponse( - [zcl_f.WriteAttributesStatusRecord(status=zcl_f.Status.SUCCESS)] - ) - ] - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY - assert thrm_cluster.write_attributes.call_count == 1 - assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 0} - - # unsuccessful occupancy change - thrm_cluster.write_attributes.reset_mock() - thrm_cluster.write_attributes.return_value = [ - zcl_f.WriteAttributesResponse( - [ - zcl_f.WriteAttributesStatusRecord( - status=zcl_f.Status.FAILURE, - attrid=SinopeTechnologiesThermostatCluster.AttributeDefs.set_occupancy.id, - ) - ] - ) - ] - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY - assert thrm_cluster.write_attributes.call_count == 1 - assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 1} - - # successful occupancy change - thrm_cluster.write_attributes.reset_mock() - thrm_cluster.write_attributes.return_value = [ - zcl_f.WriteAttributesResponse( - [zcl_f.WriteAttributesStatusRecord(status=zcl_f.Status.SUCCESS)] - ) - ] - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - assert thrm_cluster.write_attributes.call_count == 1 - assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 1} - - -async def test_preset_setting_invalid( - hass: HomeAssistant, device_climate_sinope -) -> None: - """Test invalid preset setting.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) - thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "invalid_preset"}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - assert thrm_cluster.write_attributes.call_count == 0 - - async def test_set_temperature_hvac_mode(hass: HomeAssistant, device_climate) -> None: """Test setting HVAC mode in temperature service call.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.OFF @@ -922,7 +601,7 @@ async def test_set_temperature_heat_cool( quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT_COOL @@ -1008,7 +687,7 @@ async def test_set_temperature_heat(hass: HomeAssistant, device_climate_mock) -> quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT @@ -1087,7 +766,7 @@ async def test_set_temperature_cool(hass: HomeAssistant, device_climate_mock) -> quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.COOL @@ -1172,7 +851,7 @@ async def test_set_temperature_wrong_mode( manuf=MANUF_SINOPE, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.DRY @@ -1191,38 +870,11 @@ async def test_set_temperature_wrong_mode( assert thrm_cluster.write_attributes.await_count == 0 -async def test_occupancy_reset(hass: HomeAssistant, device_climate_sinope) -> None: - """Test away preset reset.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) - thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, - blocking=True, - ) - thrm_cluster.write_attributes.reset_mock() - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY - - await send_attributes_report( - hass, thrm_cluster, {"occupied_heating_setpoint": zigpy.types.uint16_t(1950)} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - async def test_fan_mode(hass: HomeAssistant, device_climate_fan) -> None: """Test fan mode.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass) - thrm_cluster = device_climate_fan.device.endpoints[1].thermostat + thrm_cluster = device_climate_fan.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert set(state.attributes[ATTR_FAN_MODES]) == {FAN_AUTO, FAN_ON} @@ -1253,7 +905,7 @@ async def test_set_fan_mode_not_supported( """Test fan setting unsupported mode.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass) - fan_cluster = device_climate_fan.device.endpoints[1].fan + fan_cluster = device_climate_fan.device.device.endpoints[1].fan with pytest.raises(ServiceValidationError): await hass.services.async_call( @@ -1269,7 +921,7 @@ async def test_set_fan_mode(hass: HomeAssistant, device_climate_fan) -> None: """Test fan mode setting.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass) - fan_cluster = device_climate_fan.device.endpoints[1].fan + fan_cluster = device_climate_fan.device.device.endpoints[1].fan state = hass.states.get(entity_id) assert state.attributes[ATTR_FAN_MODE] == FAN_AUTO @@ -1292,309 +944,3 @@ async def test_set_fan_mode(hass: HomeAssistant, device_climate_fan) -> None: ) assert fan_cluster.write_attributes.await_count == 1 assert fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 5} - - -async def test_set_moes_preset(hass: HomeAssistant, device_climate_moes) -> None: - """Test setting preset for moes trv.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_moes, hass) - thrm_cluster = device_climate_moes.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 1 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 0 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_SCHEDULE}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 1 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_COMFORT}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 3 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_ECO}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 4 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_BOOST}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 5 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_COMPLEX}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 6 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 1 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - - -async def test_set_moes_operation_mode( - hass: HomeAssistant, device_climate_moes -) -> None: - """Test setting preset for moes trv.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_moes, hass) - thrm_cluster = device_climate_moes.device.endpoints[1].thermostat - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 0}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 1}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_SCHEDULE - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 2}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 3}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_COMFORT - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 4}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_ECO - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 5}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_BOOST - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 6}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_COMPLEX - - -@pytest.mark.parametrize( - ("preset_attr", "preset_mode"), - [ - (0, PRESET_AWAY), - (1, PRESET_SCHEDULE), - # pylint: disable-next=fixme - # (2, PRESET_NONE), # TODO: why does this not work? - (4, PRESET_ECO), - (5, PRESET_BOOST), - (7, PRESET_TEMP_MANUAL), - ], -) -async def test_beca_operation_mode_update( - hass: HomeAssistant, - device_climate_beca: ZHADevice, - preset_attr: int, - preset_mode: str, -) -> None: - """Test beca trv operation mode attribute update.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_beca, hass) - thrm_cluster = device_climate_beca.device.endpoints[1].thermostat - - # Test sending an attribute report - await send_attributes_report(hass, thrm_cluster, {"operation_preset": preset_attr}) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == preset_mode - - # Test setting the preset - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: preset_mode}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.mock_calls == [ - call( - {"operation_preset": preset_attr}, - manufacturer=device_climate_beca.manufacturer_code, - ) - ] - - -async def test_set_zonnsmart_preset( - hass: HomeAssistant, device_climate_zonnsmart -) -> None: - """Test setting preset from homeassistant for zonnsmart trv.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass) - thrm_cluster = device_climate_zonnsmart.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_SCHEDULE}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 1 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 0 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "holiday"}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 1 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 3 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "frost protect"}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 1 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 4 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 1 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 1 - } - - -async def test_set_zonnsmart_operation_mode( - hass: HomeAssistant, device_climate_zonnsmart -) -> None: - """Test setting preset from trv for zonnsmart trv.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass) - thrm_cluster = device_climate_zonnsmart.device.endpoints[1].thermostat - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 0}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_SCHEDULE - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 1}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 2}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == "holiday" - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 3}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == "holiday" - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 4}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == "frost protect" diff --git a/tests/components/zha/test_cluster_handlers.py b/tests/components/zha/test_cluster_handlers.py deleted file mode 100644 index 655a36a2492..00000000000 --- a/tests/components/zha/test_cluster_handlers.py +++ /dev/null @@ -1,1009 +0,0 @@ -"""Test ZHA Core cluster handlers.""" - -from collections.abc import Callable -import logging -import math -import threading -from types import NoneType -from unittest import mock -from unittest.mock import AsyncMock, patch - -import pytest -import zigpy.device -import zigpy.endpoint -from zigpy.endpoint import Endpoint as ZigpyEndpoint -import zigpy.profiles.zha -import zigpy.quirks as zigpy_quirks -import zigpy.types as t -from zigpy.zcl import foundation -import zigpy.zcl.clusters -from zigpy.zcl.clusters import CLUSTERS_BY_ID -import zigpy.zdo.types as zdo_t - -from homeassistant.components.zha.core import cluster_handlers, registries -from homeassistant.components.zha.core.cluster_handlers.lighting import ( - ColorClusterHandler, -) -import homeassistant.components.zha.core.const as zha_const -from homeassistant.components.zha.core.device import ZHADevice -from homeassistant.components.zha.core.endpoint import Endpoint -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from .common import make_zcl_header -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE - -from tests.common import async_capture_events - - -@pytest.fixture(autouse=True) -def disable_platform_only(): - """Disable platforms to speed up tests.""" - with patch("homeassistant.components.zha.PLATFORMS", []): - yield - - -@pytest.fixture -def ieee(): - """IEEE fixture.""" - return t.EUI64.deserialize(b"ieeeaddr")[0] - - -@pytest.fixture -def nwk(): - """NWK fixture.""" - return t.NWK(0xBEEF) - - -@pytest.fixture -async def zha_gateway(hass, setup_zha): - """Return ZhaGateway fixture.""" - await setup_zha() - return get_zha_gateway(hass) - - -@pytest.fixture -def zigpy_coordinator_device(zigpy_device_mock): - """Coordinator device fixture.""" - - coordinator = zigpy_device_mock( - {1: {SIG_EP_INPUT: [0x1000], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - with patch.object(coordinator, "add_to_group", AsyncMock(return_value=[0])): - yield coordinator - - -@pytest.fixture -def endpoint(zigpy_coordinator_device): - """Endpoint fixture.""" - endpoint_mock = mock.MagicMock(spec_set=Endpoint) - endpoint_mock.zigpy_endpoint.device.application.get_device.return_value = ( - zigpy_coordinator_device - ) - type(endpoint_mock.device).skip_configuration = mock.PropertyMock( - return_value=False - ) - endpoint_mock.device.hass.loop_thread_id = threading.get_ident() - endpoint_mock.id = 1 - return endpoint_mock - - -@pytest.fixture -def poll_control_ch(endpoint, zigpy_device_mock): - """Poll control cluster handler fixture.""" - cluster_id = zigpy.zcl.clusters.general.PollControl.cluster_id - zigpy_dev = zigpy_device_mock( - {1: {SIG_EP_INPUT: [cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - cluster = zigpy_dev.endpoints[1].in_clusters[cluster_id] - cluster_handler_class = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id - ).get(None) - return cluster_handler_class(cluster, endpoint) - - -@pytest.fixture -async def poll_control_device(zha_device_restored, zigpy_device_mock): - """Poll control device fixture.""" - cluster_id = zigpy.zcl.clusters.general.PollControl.cluster_id - zigpy_dev = zigpy_device_mock( - {1: {SIG_EP_INPUT: [cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - return await zha_device_restored(zigpy_dev) - - -@pytest.mark.parametrize( - ("cluster_id", "bind_count", "attrs"), - [ - (zigpy.zcl.clusters.general.Basic.cluster_id, 0, {}), - ( - zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, - 1, - {"battery_voltage", "battery_percentage_remaining"}, - ), - ( - zigpy.zcl.clusters.general.DeviceTemperature.cluster_id, - 1, - {"current_temperature"}, - ), - (zigpy.zcl.clusters.general.Identify.cluster_id, 0, {}), - (zigpy.zcl.clusters.general.Groups.cluster_id, 0, {}), - (zigpy.zcl.clusters.general.Scenes.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.OnOff.cluster_id, 1, {"on_off"}), - (zigpy.zcl.clusters.general.OnOffConfiguration.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.LevelControl.cluster_id, 1, {"current_level"}), - (zigpy.zcl.clusters.general.Alarms.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.AnalogInput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.AnalogOutput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.AnalogValue.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.BinaryOutput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.BinaryValue.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.MultistateInput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.MultistateOutput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.MultistateValue.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.Commissioning.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.Partition.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.Ota.cluster_id, 0, {}), - (zigpy.zcl.clusters.general.PowerProfile.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.ApplianceControl.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.PollControl.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.GreenPowerProxy.cluster_id, 0, {}), - (zigpy.zcl.clusters.closures.DoorLock.cluster_id, 1, {"lock_state"}), - ( - zigpy.zcl.clusters.hvac.Thermostat.cluster_id, - 1, - { - "local_temperature", - "occupied_cooling_setpoint", - "occupied_heating_setpoint", - "unoccupied_cooling_setpoint", - "unoccupied_heating_setpoint", - "running_mode", - "running_state", - "system_mode", - "occupancy", - "pi_cooling_demand", - "pi_heating_demand", - }, - ), - (zigpy.zcl.clusters.hvac.Fan.cluster_id, 1, {"fan_mode"}), - ( - zigpy.zcl.clusters.lighting.Color.cluster_id, - 1, - { - "current_x", - "current_y", - "color_temperature", - "current_hue", - "enhanced_current_hue", - "current_saturation", - }, - ), - ( - zigpy.zcl.clusters.measurement.IlluminanceMeasurement.cluster_id, - 1, - {"measured_value"}, - ), - ( - zigpy.zcl.clusters.measurement.IlluminanceLevelSensing.cluster_id, - 1, - {"level_status"}, - ), - ( - zigpy.zcl.clusters.measurement.TemperatureMeasurement.cluster_id, - 1, - {"measured_value"}, - ), - ( - zigpy.zcl.clusters.measurement.PressureMeasurement.cluster_id, - 1, - {"measured_value"}, - ), - ( - zigpy.zcl.clusters.measurement.FlowMeasurement.cluster_id, - 1, - {"measured_value"}, - ), - ( - zigpy.zcl.clusters.measurement.RelativeHumidity.cluster_id, - 1, - {"measured_value"}, - ), - (zigpy.zcl.clusters.measurement.OccupancySensing.cluster_id, 1, {"occupancy"}), - ( - zigpy.zcl.clusters.smartenergy.Metering.cluster_id, - 1, - { - "instantaneous_demand", - "current_summ_delivered", - "current_tier1_summ_delivered", - "current_tier2_summ_delivered", - "current_tier3_summ_delivered", - "current_tier4_summ_delivered", - "current_tier5_summ_delivered", - "current_tier6_summ_delivered", - "current_summ_received", - "status", - }, - ), - ( - zigpy.zcl.clusters.homeautomation.ElectricalMeasurement.cluster_id, - 1, - { - "active_power", - "active_power_max", - "apparent_power", - "rms_current", - "rms_current_max", - "rms_voltage", - "rms_voltage_max", - }, - ), - ], -) -async def test_in_cluster_handler_config( - cluster_id, bind_count, attrs, endpoint, zigpy_device_mock, zha_gateway -) -> None: - """Test ZHA core cluster handler configuration for input clusters.""" - zigpy_dev = zigpy_device_mock( - {1: {SIG_EP_INPUT: [cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - cluster = zigpy_dev.endpoints[1].in_clusters[cluster_id] - cluster_handler_class = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id, {None, cluster_handlers.ClusterHandler} - ).get(None) - cluster_handler = cluster_handler_class(cluster, endpoint) - - await cluster_handler.async_configure() - - assert cluster.bind.call_count == bind_count - assert cluster.configure_reporting.call_count == 0 - assert cluster.configure_reporting_multiple.call_count == math.ceil(len(attrs) / 3) - reported_attrs = { - a - for a in attrs - for attr in cluster.configure_reporting_multiple.call_args_list - for attrs in attr[0][0] - } - assert set(attrs) == reported_attrs - - -@pytest.mark.parametrize( - ("cluster_id", "bind_count"), - [ - (0x0000, 0), - (0x0001, 1), - (0x0002, 1), - (0x0003, 0), - (0x0004, 0), - (0x0005, 1), - (0x0006, 1), - (0x0007, 1), - (0x0008, 1), - (0x0009, 1), - (0x0015, 1), - (0x0016, 1), - (0x0019, 0), - (0x001A, 1), - (0x001B, 1), - (0x0020, 1), - (0x0021, 0), - (0x0101, 1), - (0x0202, 1), - (0x0300, 1), - (0x0400, 1), - (0x0402, 1), - (0x0403, 1), - (0x0405, 1), - (0x0406, 1), - (0x0702, 1), - (0x0B04, 1), - ], -) -async def test_out_cluster_handler_config( - cluster_id, bind_count, endpoint, zigpy_device_mock, zha_gateway -) -> None: - """Test ZHA core cluster handler configuration for output clusters.""" - zigpy_dev = zigpy_device_mock( - {1: {SIG_EP_OUTPUT: [cluster_id], SIG_EP_INPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - cluster = zigpy_dev.endpoints[1].out_clusters[cluster_id] - cluster.bind_only = True - cluster_handler_class = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id, {None: cluster_handlers.ClusterHandler} - ).get(None) - cluster_handler = cluster_handler_class(cluster, endpoint) - - await cluster_handler.async_configure() - - assert cluster.bind.call_count == bind_count - assert cluster.configure_reporting.call_count == 0 - - -def test_cluster_handler_registry() -> None: - """Test ZIGBEE cluster handler Registry.""" - - # get all quirk ID from zigpy quirks registry - all_quirk_ids = {} - for cluster_id in CLUSTERS_BY_ID: - all_quirk_ids[cluster_id] = {None} - # pylint: disable-next=too-many-nested-blocks - for manufacturer in zigpy_quirks._DEVICE_REGISTRY.registry.values(): - for model_quirk_list in manufacturer.values(): - for quirk in model_quirk_list: - quirk_id = getattr(quirk, zha_const.ATTR_QUIRK_ID, None) - device_description = getattr(quirk, "replacement", None) or getattr( - quirk, "signature", None - ) - - for endpoint in device_description["endpoints"].values(): - cluster_ids = set() - if "input_clusters" in endpoint: - cluster_ids.update(endpoint["input_clusters"]) - if "output_clusters" in endpoint: - cluster_ids.update(endpoint["output_clusters"]) - for cluster_id in cluster_ids: - if not isinstance(cluster_id, int): - cluster_id = cluster_id.cluster_id - if cluster_id not in all_quirk_ids: - all_quirk_ids[cluster_id] = {None} - all_quirk_ids[cluster_id].add(quirk_id) - - # pylint: disable-next=undefined-loop-variable - del quirk, model_quirk_list, manufacturer - - for ( - cluster_id, - cluster_handler_classes, - ) in registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.items(): - assert isinstance(cluster_id, int) - assert 0 <= cluster_id <= 0xFFFF - assert cluster_id in all_quirk_ids - assert isinstance(cluster_handler_classes, dict) - for quirk_id, cluster_handler in cluster_handler_classes.items(): - assert isinstance(quirk_id, (NoneType, str)) - assert issubclass(cluster_handler, cluster_handlers.ClusterHandler) - assert quirk_id in all_quirk_ids[cluster_id] - - -def test_epch_unclaimed_cluster_handlers(cluster_handler) -> None: - """Test unclaimed cluster handlers.""" - - ch_1 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) - ch_2 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) - ch_3 = cluster_handler(zha_const.CLUSTER_HANDLER_COLOR, 768) - - ep_cluster_handlers = Endpoint( - mock.MagicMock(spec_set=ZigpyEndpoint), mock.MagicMock(spec_set=ZHADevice) - ) - all_cluster_handlers = {ch_1.id: ch_1, ch_2.id: ch_2, ch_3.id: ch_3} - with mock.patch.dict( - ep_cluster_handlers.all_cluster_handlers, all_cluster_handlers, clear=True - ): - available = ep_cluster_handlers.unclaimed_cluster_handlers() - assert ch_1 in available - assert ch_2 in available - assert ch_3 in available - - ep_cluster_handlers.claimed_cluster_handlers[ch_2.id] = ch_2 - available = ep_cluster_handlers.unclaimed_cluster_handlers() - assert ch_1 in available - assert ch_2 not in available - assert ch_3 in available - - ep_cluster_handlers.claimed_cluster_handlers[ch_1.id] = ch_1 - available = ep_cluster_handlers.unclaimed_cluster_handlers() - assert ch_1 not in available - assert ch_2 not in available - assert ch_3 in available - - ep_cluster_handlers.claimed_cluster_handlers[ch_3.id] = ch_3 - available = ep_cluster_handlers.unclaimed_cluster_handlers() - assert ch_1 not in available - assert ch_2 not in available - assert ch_3 not in available - - -def test_epch_claim_cluster_handlers(cluster_handler) -> None: - """Test cluster handler claiming.""" - - ch_1 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) - ch_2 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) - ch_3 = cluster_handler(zha_const.CLUSTER_HANDLER_COLOR, 768) - - ep_cluster_handlers = Endpoint( - mock.MagicMock(spec_set=ZigpyEndpoint), mock.MagicMock(spec_set=ZHADevice) - ) - all_cluster_handlers = {ch_1.id: ch_1, ch_2.id: ch_2, ch_3.id: ch_3} - with mock.patch.dict( - ep_cluster_handlers.all_cluster_handlers, all_cluster_handlers, clear=True - ): - assert ch_1.id not in ep_cluster_handlers.claimed_cluster_handlers - assert ch_2.id not in ep_cluster_handlers.claimed_cluster_handlers - assert ch_3.id not in ep_cluster_handlers.claimed_cluster_handlers - - ep_cluster_handlers.claim_cluster_handlers([ch_2]) - assert ch_1.id not in ep_cluster_handlers.claimed_cluster_handlers - assert ch_2.id in ep_cluster_handlers.claimed_cluster_handlers - assert ep_cluster_handlers.claimed_cluster_handlers[ch_2.id] is ch_2 - assert ch_3.id not in ep_cluster_handlers.claimed_cluster_handlers - - ep_cluster_handlers.claim_cluster_handlers([ch_3, ch_1]) - assert ch_1.id in ep_cluster_handlers.claimed_cluster_handlers - assert ep_cluster_handlers.claimed_cluster_handlers[ch_1.id] is ch_1 - assert ch_2.id in ep_cluster_handlers.claimed_cluster_handlers - assert ep_cluster_handlers.claimed_cluster_handlers[ch_2.id] is ch_2 - assert ch_3.id in ep_cluster_handlers.claimed_cluster_handlers - assert ep_cluster_handlers.claimed_cluster_handlers[ch_3.id] is ch_3 - assert "1:0x0300" in ep_cluster_handlers.claimed_cluster_handlers - - -@mock.patch( - "homeassistant.components.zha.core.endpoint.Endpoint.add_client_cluster_handlers" -) -@mock.patch( - "homeassistant.components.zha.core.discovery.PROBE.discover_entities", - mock.MagicMock(), -) -def test_ep_all_cluster_handlers(m1, zha_device_mock: Callable[..., ZHADevice]) -> None: - """Test Endpoint adding all cluster handlers.""" - zha_device = zha_device_mock( - { - 1: { - SIG_EP_INPUT: [0, 1, 6, 8], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - }, - 2: { - SIG_EP_INPUT: [0, 1, 6, 8, 768], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: 0x0000, - }, - } - ) - assert "1:0x0000" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0001" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0006" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0008" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0300" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0000" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0001" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0006" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0008" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0300" not in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0000" not in zha_device._endpoints[2].all_cluster_handlers - assert "1:0x0001" not in zha_device._endpoints[2].all_cluster_handlers - assert "1:0x0006" not in zha_device._endpoints[2].all_cluster_handlers - assert "1:0x0008" not in zha_device._endpoints[2].all_cluster_handlers - assert "1:0x0300" not in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0000" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0006" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0008" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0300" in zha_device._endpoints[2].all_cluster_handlers - - zha_device.async_cleanup_handles() - - -@mock.patch( - "homeassistant.components.zha.core.endpoint.Endpoint.add_client_cluster_handlers" -) -@mock.patch( - "homeassistant.components.zha.core.discovery.PROBE.discover_entities", - mock.MagicMock(), -) -def test_cluster_handler_power_config( - m1, zha_device_mock: Callable[..., ZHADevice] -) -> None: - """Test that cluster handlers only get a single power cluster handler.""" - in_clusters = [0, 1, 6, 8] - zha_device = zha_device_mock( - { - 1: {SIG_EP_INPUT: in_clusters, SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}, - 2: { - SIG_EP_INPUT: [*in_clusters, 768], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: 0x0000, - }, - } - ) - assert "1:0x0000" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0001" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0006" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0008" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0300" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0000" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0006" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0008" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0300" in zha_device._endpoints[2].all_cluster_handlers - - zha_device.async_cleanup_handles() - - zha_device = zha_device_mock( - { - 1: {SIG_EP_INPUT: [], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}, - 2: {SIG_EP_INPUT: in_clusters, SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}, - } - ) - assert "1:0x0001" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers - - zha_device.async_cleanup_handles() - - zha_device = zha_device_mock( - {2: {SIG_EP_INPUT: in_clusters, SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}} - ) - assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers - - zha_device.async_cleanup_handles() - - -async def test_ep_cluster_handlers_configure(cluster_handler) -> None: - """Test unclaimed cluster handlers.""" - - ch_1 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) - ch_2 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) - ch_3 = cluster_handler(zha_const.CLUSTER_HANDLER_COLOR, 768) - ch_3.async_configure = AsyncMock(side_effect=TimeoutError) - ch_3.async_initialize = AsyncMock(side_effect=TimeoutError) - ch_4 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) - ch_5 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) - ch_5.async_configure = AsyncMock(side_effect=TimeoutError) - ch_5.async_initialize = AsyncMock(side_effect=TimeoutError) - - endpoint_mock = mock.MagicMock(spec_set=ZigpyEndpoint) - type(endpoint_mock).in_clusters = mock.PropertyMock(return_value={}) - type(endpoint_mock).out_clusters = mock.PropertyMock(return_value={}) - endpoint = Endpoint.new(endpoint_mock, mock.MagicMock(spec_set=ZHADevice)) - - claimed = {ch_1.id: ch_1, ch_2.id: ch_2, ch_3.id: ch_3} - client_handlers = {ch_4.id: ch_4, ch_5.id: ch_5} - - with ( - mock.patch.dict(endpoint.claimed_cluster_handlers, claimed, clear=True), - mock.patch.dict(endpoint.client_cluster_handlers, client_handlers, clear=True), - ): - await endpoint.async_configure() - await endpoint.async_initialize(mock.sentinel.from_cache) - - for ch in (*claimed.values(), *client_handlers.values()): - assert ch.async_initialize.call_count == 1 - assert ch.async_initialize.await_count == 1 - assert ch.async_initialize.call_args[0][0] is mock.sentinel.from_cache - assert ch.async_configure.call_count == 1 - assert ch.async_configure.await_count == 1 - - assert ch_3.debug.call_count == 2 - assert ch_5.debug.call_count == 2 - - -async def test_poll_control_configure(poll_control_ch) -> None: - """Test poll control cluster handler configuration.""" - await poll_control_ch.async_configure() - assert poll_control_ch.cluster.write_attributes.call_count == 1 - assert poll_control_ch.cluster.write_attributes.call_args[0][0] == { - "checkin_interval": poll_control_ch.CHECKIN_INTERVAL - } - - -async def test_poll_control_checkin_response(poll_control_ch) -> None: - """Test poll control cluster handler checkin response.""" - rsp_mock = AsyncMock() - set_interval_mock = AsyncMock() - fast_poll_mock = AsyncMock() - cluster = poll_control_ch.cluster - patch_1 = mock.patch.object(cluster, "checkin_response", rsp_mock) - patch_2 = mock.patch.object(cluster, "set_long_poll_interval", set_interval_mock) - patch_3 = mock.patch.object(cluster, "fast_poll_stop", fast_poll_mock) - - with patch_1, patch_2, patch_3: - await poll_control_ch.check_in_response(33) - - assert rsp_mock.call_count == 1 - assert set_interval_mock.call_count == 1 - assert fast_poll_mock.call_count == 1 - - await poll_control_ch.check_in_response(33) - assert cluster.endpoint.request.call_count == 3 - assert cluster.endpoint.request.await_count == 3 - assert cluster.endpoint.request.call_args_list[0][0][1] == 33 - assert cluster.endpoint.request.call_args_list[0][0][0] == 0x0020 - assert cluster.endpoint.request.call_args_list[1][0][0] == 0x0020 - - -async def test_poll_control_cluster_command( - hass: HomeAssistant, poll_control_device -) -> None: - """Test poll control cluster handler response to cluster command.""" - checkin_mock = AsyncMock() - poll_control_ch = poll_control_device._endpoints[1].all_cluster_handlers["1:0x0020"] - cluster = poll_control_ch.cluster - events = async_capture_events(hass, zha_const.ZHA_EVENT) - - with mock.patch.object(poll_control_ch, "check_in_response", checkin_mock): - tsn = 22 - hdr = make_zcl_header(0, global_command=False, tsn=tsn) - assert not events - cluster.handle_message( - hdr, [mock.sentinel.args, mock.sentinel.args2, mock.sentinel.args3] - ) - await hass.async_block_till_done() - - assert checkin_mock.call_count == 1 - assert checkin_mock.await_count == 1 - assert checkin_mock.await_args[0][0] == tsn - assert len(events) == 1 - data = events[0].data - assert data["command"] == "checkin" - assert data["args"][0] is mock.sentinel.args - assert data["args"][1] is mock.sentinel.args2 - assert data["args"][2] is mock.sentinel.args3 - assert data["unique_id"] == "00:11:22:33:44:55:66:77:1:0x0020" - assert data["device_id"] == poll_control_device.device_id - - -async def test_poll_control_ignore_list( - hass: HomeAssistant, poll_control_device -) -> None: - """Test poll control cluster handler ignore list.""" - set_long_poll_mock = AsyncMock() - poll_control_ch = poll_control_device._endpoints[1].all_cluster_handlers["1:0x0020"] - cluster = poll_control_ch.cluster - - with mock.patch.object(cluster, "set_long_poll_interval", set_long_poll_mock): - await poll_control_ch.check_in_response(33) - - assert set_long_poll_mock.call_count == 1 - - set_long_poll_mock.reset_mock() - poll_control_ch.skip_manufacturer_id(4151) - with mock.patch.object(cluster, "set_long_poll_interval", set_long_poll_mock): - await poll_control_ch.check_in_response(33) - - assert set_long_poll_mock.call_count == 0 - - -async def test_poll_control_ikea(hass: HomeAssistant, poll_control_device) -> None: - """Test poll control cluster handler ignore list for ikea.""" - set_long_poll_mock = AsyncMock() - poll_control_ch = poll_control_device._endpoints[1].all_cluster_handlers["1:0x0020"] - cluster = poll_control_ch.cluster - - poll_control_device.device.node_desc.manufacturer_code = 4476 - with mock.patch.object(cluster, "set_long_poll_interval", set_long_poll_mock): - await poll_control_ch.check_in_response(33) - - assert set_long_poll_mock.call_count == 0 - - -@pytest.fixture -def zigpy_zll_device(zigpy_device_mock): - """ZLL device fixture.""" - - return zigpy_device_mock( - {1: {SIG_EP_INPUT: [0x1000], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - -async def test_zll_device_groups( - zigpy_zll_device, endpoint, zigpy_coordinator_device -) -> None: - """Test adding coordinator to ZLL groups.""" - - cluster = zigpy_zll_device.endpoints[1].lightlink - cluster_handler = cluster_handlers.lightlink.LightLinkClusterHandler( - cluster, endpoint - ) - - get_group_identifiers_rsp = zigpy.zcl.clusters.lightlink.LightLink.commands_by_name[ - "get_group_identifiers_rsp" - ].schema - - with patch.object( - cluster, - "command", - AsyncMock( - return_value=get_group_identifiers_rsp( - total=0, start_index=0, group_info_records=[] - ) - ), - ) as cmd_mock: - await cluster_handler.async_configure() - assert cmd_mock.await_count == 1 - assert ( - cluster.server_commands[cmd_mock.await_args[0][0]].name - == "get_group_identifiers" - ) - assert cluster.bind.call_count == 0 - assert zigpy_coordinator_device.add_to_group.await_count == 1 - assert zigpy_coordinator_device.add_to_group.await_args[0][0] == 0x0000 - - zigpy_coordinator_device.add_to_group.reset_mock() - group_1 = zigpy.zcl.clusters.lightlink.GroupInfoRecord(0xABCD, 0x00) - group_2 = zigpy.zcl.clusters.lightlink.GroupInfoRecord(0xAABB, 0x00) - with patch.object( - cluster, - "command", - AsyncMock( - return_value=get_group_identifiers_rsp( - total=2, start_index=0, group_info_records=[group_1, group_2] - ) - ), - ) as cmd_mock: - await cluster_handler.async_configure() - assert cmd_mock.await_count == 1 - assert ( - cluster.server_commands[cmd_mock.await_args[0][0]].name - == "get_group_identifiers" - ) - assert cluster.bind.call_count == 0 - assert zigpy_coordinator_device.add_to_group.await_count == 2 - assert ( - zigpy_coordinator_device.add_to_group.await_args_list[0][0][0] - == group_1.group_id - ) - assert ( - zigpy_coordinator_device.add_to_group.await_args_list[1][0][0] - == group_2.group_id - ) - - -@mock.patch( - "homeassistant.components.zha.core.discovery.PROBE.discover_entities", - mock.MagicMock(), -) -async def test_cluster_no_ep_attribute( - zha_device_mock: Callable[..., ZHADevice], -) -> None: - """Test cluster handlers for clusters without ep_attribute.""" - - zha_device = zha_device_mock( - {1: {SIG_EP_INPUT: [0x042E], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - ) - - assert "1:0x042e" in zha_device._endpoints[1].all_cluster_handlers - assert zha_device._endpoints[1].all_cluster_handlers["1:0x042e"].name - - zha_device.async_cleanup_handles() - - -async def test_configure_reporting(hass: HomeAssistant, endpoint) -> None: - """Test setting up a cluster handler and configuring attribute reporting in two batches.""" - - class TestZigbeeClusterHandler(cluster_handlers.ClusterHandler): - BIND = True - REPORT_CONFIG = ( - # By name - cluster_handlers.AttrReportConfig(attr="current_x", config=(1, 60, 1)), - cluster_handlers.AttrReportConfig(attr="current_hue", config=(1, 60, 2)), - cluster_handlers.AttrReportConfig( - attr="color_temperature", config=(1, 60, 3) - ), - cluster_handlers.AttrReportConfig(attr="current_y", config=(1, 60, 4)), - ) - - mock_ep = mock.AsyncMock(spec_set=zigpy.endpoint.Endpoint) - mock_ep.device.zdo = AsyncMock() - - cluster = zigpy.zcl.clusters.lighting.Color(mock_ep) - cluster.bind = AsyncMock( - spec_set=cluster.bind, - return_value=[zdo_t.Status.SUCCESS], # ZDOCmd.Bind_rsp - ) - cluster.configure_reporting_multiple = AsyncMock( - spec_set=cluster.configure_reporting_multiple, - return_value=[ - foundation.ConfigureReportingResponseRecord( - status=foundation.Status.SUCCESS - ) - ], - ) - - cluster_handler = TestZigbeeClusterHandler(cluster, endpoint) - await cluster_handler.async_configure() - - # Since we request reporting for five attributes, we need to make two calls (3 + 1) - assert cluster.configure_reporting_multiple.mock_calls == [ - mock.call( - { - "current_x": (1, 60, 1), - "current_hue": (1, 60, 2), - "color_temperature": (1, 60, 3), - } - ), - mock.call( - { - "current_y": (1, 60, 4), - } - ), - ] - - -async def test_invalid_cluster_handler( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test setting up a cluster handler that fails to match properly.""" - - class TestZigbeeClusterHandler(cluster_handlers.ClusterHandler): - REPORT_CONFIG = ( - cluster_handlers.AttrReportConfig(attr="missing_attr", config=(1, 60, 1)), - ) - - mock_device = mock.AsyncMock(spec_set=zigpy.device.Device) - zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1) - - cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id) - cluster.configure_reporting_multiple = AsyncMock( - spec_set=cluster.configure_reporting_multiple, - return_value=[ - foundation.ConfigureReportingResponseRecord( - status=foundation.Status.SUCCESS - ) - ], - ) - - mock_zha_device = mock.AsyncMock(spec=ZHADevice) - mock_zha_device.quirk_id = None - zha_endpoint = Endpoint(zigpy_ep, mock_zha_device) - - # The cluster handler throws an error when matching this cluster - with pytest.raises(KeyError): - TestZigbeeClusterHandler(cluster, zha_endpoint) - - # And one is also logged at runtime - with ( - patch.dict( - registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY[cluster.cluster_id], - {None: TestZigbeeClusterHandler}, - ), - caplog.at_level(logging.WARNING), - ): - zha_endpoint.add_all_cluster_handlers() - - assert "missing_attr" in caplog.text - - -async def test_standard_cluster_handler( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test setting up a cluster handler that matches a standard cluster.""" - - class TestZigbeeClusterHandler(ColorClusterHandler): - pass - - mock_device = mock.AsyncMock(spec_set=zigpy.device.Device) - zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1) - - cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id) - cluster.configure_reporting_multiple = AsyncMock( - spec_set=cluster.configure_reporting_multiple, - return_value=[ - foundation.ConfigureReportingResponseRecord( - status=foundation.Status.SUCCESS - ) - ], - ) - - mock_zha_device = mock.AsyncMock(spec=ZHADevice) - mock_zha_device.quirk_id = None - zha_endpoint = Endpoint(zigpy_ep, mock_zha_device) - - with patch.dict( - registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY[cluster.cluster_id], - {"__test_quirk_id": TestZigbeeClusterHandler}, - ): - zha_endpoint.add_all_cluster_handlers() - - assert len(zha_endpoint.all_cluster_handlers) == 1 - assert isinstance( - list(zha_endpoint.all_cluster_handlers.values())[0], ColorClusterHandler - ) - - -async def test_quirk_id_cluster_handler( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test setting up a cluster handler that matches a standard cluster.""" - - class TestZigbeeClusterHandler(ColorClusterHandler): - pass - - mock_device = mock.AsyncMock(spec_set=zigpy.device.Device) - zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1) - - cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id) - cluster.configure_reporting_multiple = AsyncMock( - spec_set=cluster.configure_reporting_multiple, - return_value=[ - foundation.ConfigureReportingResponseRecord( - status=foundation.Status.SUCCESS - ) - ], - ) - - mock_zha_device = mock.AsyncMock(spec=ZHADevice) - mock_zha_device.quirk_id = "__test_quirk_id" - zha_endpoint = Endpoint(zigpy_ep, mock_zha_device) - - with patch.dict( - registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY[cluster.cluster_id], - {"__test_quirk_id": TestZigbeeClusterHandler}, - ): - zha_endpoint.add_all_cluster_handlers() - - assert len(zha_endpoint.all_cluster_handlers) == 1 - assert isinstance( - list(zha_endpoint.all_cluster_handlers.values())[0], TestZigbeeClusterHandler - ) - - -# parametrize side effects: -@pytest.mark.parametrize( - ("side_effect", "expected_error"), - [ - (zigpy.exceptions.ZigbeeException(), "Failed to send request"), - ( - zigpy.exceptions.ZigbeeException("Zigbee exception"), - "Failed to send request: Zigbee exception", - ), - (TimeoutError(), "Failed to send request: device did not respond"), - ], -) -async def test_retry_request( - side_effect: Exception | None, expected_error: str | None -) -> None: - """Test the `retry_request` decorator's handling of zigpy-internal exceptions.""" - - async def func(arg1: int, arg2: int) -> int: - assert arg1 == 1 - assert arg2 == 2 - - raise side_effect - - func = mock.AsyncMock(wraps=func) - decorated_func = cluster_handlers.retry_request(func) - - with pytest.raises(HomeAssistantError) as exc: - await decorated_func(1, arg2=2) - - assert func.await_count == 3 - assert isinstance(exc.value, HomeAssistantError) - assert str(exc.value) == expected_error - - -async def test_cluster_handler_naming() -> None: - """Test that all cluster handlers are named appropriately.""" - for client_cluster_handler in registries.CLIENT_CLUSTER_HANDLER_REGISTRY.values(): - assert issubclass(client_cluster_handler, cluster_handlers.ClientClusterHandler) - assert client_cluster_handler.__name__.endswith("ClientClusterHandler") - - for cluster_handler_dict in registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.values(): - for cluster_handler in cluster_handler_dict.values(): - assert not issubclass( - cluster_handler, cluster_handlers.ClientClusterHandler - ) - assert cluster_handler.__name__.endswith("ClusterHandler") diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index 0c8414f458f..af6f2d9af0c 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -1,14 +1,17 @@ """Tests for ZHA config flow.""" +from collections.abc import Callable, Coroutine, Generator import copy from datetime import timedelta from ipaddress import ip_address import json +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch import uuid import pytest -import serial.tools.list_ports +from serial.tools.list_ports_common import ListPortInfo +from zha.application.const import RadioType from zigpy.backups import BackupManager import zigpy.config from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH, SCHEMA_DEVICE @@ -21,13 +24,12 @@ from homeassistant.components import ssdp, usb, zeroconf from homeassistant.components.hassio import AddonState from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL from homeassistant.components.zha import config_flow, radio_manager -from homeassistant.components.zha.core.const import ( +from homeassistant.components.zha.const import ( CONF_BAUDRATE, CONF_FLOW_CONTROL, CONF_RADIO_TYPE, DOMAIN, EZSP_OVERWRITE_EUI64, - RadioType, ) from homeassistant.components.zha.radio_manager import ProbeResult from homeassistant.config_entries import ( @@ -36,6 +38,7 @@ from homeassistant.config_entries import ( SOURCE_USER, SOURCE_ZEROCONF, ConfigEntryState, + ConfigFlowResult, ) from homeassistant.const import CONF_SOURCE from homeassistant.core import HomeAssistant @@ -43,6 +46,9 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry +type RadioPicker = Callable[ + [RadioType], Coroutine[Any, Any, tuple[ConfigFlowResult, ListPortInfo]] +] PROBE_FUNCTION_PATH = "zigbee.application.ControllerApplication.probe" @@ -70,7 +76,7 @@ def mock_multipan_platform(): @pytest.fixture(autouse=True) -def mock_app(): +def mock_app() -> Generator[AsyncMock]: """Mock zigpy app interface.""" mock_app = AsyncMock() mock_app.backups = create_autospec(BackupManager, instance=True) @@ -130,9 +136,9 @@ def mock_detect_radio_type( return detect -def com_port(device="/dev/ttyUSB1234"): +def com_port(device="/dev/ttyUSB1234") -> ListPortInfo: """Mock of a serial port.""" - port = serial.tools.list_ports_common.ListPortInfo("/dev/ttyUSB1234") + port = ListPortInfo("/dev/ttyUSB1234") port.serial_number = "1234" port.manufacturer = "Virtual serial port" port.device = device @@ -1038,10 +1044,12 @@ def test_prevent_overwrite_ezsp_ieee() -> None: @pytest.fixture -def pick_radio(hass): +def pick_radio( + hass: HomeAssistant, +) -> Generator[RadioPicker]: """Fixture for the first step of the config flow (where a radio is picked).""" - async def wrapper(radio_type): + async def wrapper(radio_type: RadioType) -> tuple[ConfigFlowResult, ListPortInfo]: port = com_port() port_select = f"{port}, s/n: {port.serial_number} - {port.manufacturer}" @@ -1070,7 +1078,7 @@ def pick_radio(hass): async def test_strategy_no_network_settings( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test formation strategy when no network settings are present.""" mock_app.load_network_info = MagicMock(side_effect=NetworkNotFormed()) @@ -1083,7 +1091,7 @@ async def test_strategy_no_network_settings( async def test_formation_strategy_form_new_network( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test forming a new network.""" result, port = await pick_radio(RadioType.ezsp) @@ -1101,7 +1109,7 @@ async def test_formation_strategy_form_new_network( async def test_formation_strategy_form_initial_network( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test forming a new network, with no previous settings on the radio.""" mock_app.load_network_info = AsyncMock(side_effect=NetworkNotFormed()) @@ -1122,7 +1130,7 @@ async def test_formation_strategy_form_initial_network( @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_onboarding_auto_formation_new_hardware( - mock_app, hass: HomeAssistant + mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test auto network formation with new hardware during onboarding.""" mock_app.load_network_info = AsyncMock(side_effect=NetworkNotFormed()) @@ -1157,7 +1165,7 @@ async def test_onboarding_auto_formation_new_hardware( async def test_formation_strategy_reuse_settings( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test reusing existing network settings.""" result, port = await pick_radio(RadioType.ezsp) @@ -1190,7 +1198,10 @@ def test_parse_uploaded_backup(process_mock) -> None: @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_non_ezsp( - allow_overwrite_ieee_mock, pick_radio, mock_app, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + hass: HomeAssistant, ) -> None: """Test restoring a manual backup on non-EZSP coordinators.""" result, port = await pick_radio(RadioType.znp) @@ -1222,7 +1233,11 @@ async def test_formation_strategy_restore_manual_backup_non_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_overwrite_ieee_ezsp( - allow_overwrite_ieee_mock, pick_radio, mock_app, backup, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + backup, + hass: HomeAssistant, ) -> None: """Test restoring a manual backup on EZSP coordinators (overwrite IEEE).""" result, port = await pick_radio(RadioType.ezsp) @@ -1262,7 +1277,10 @@ async def test_formation_strategy_restore_manual_backup_overwrite_ieee_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_ezsp( - allow_overwrite_ieee_mock, pick_radio, mock_app, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + hass: HomeAssistant, ) -> None: """Test restoring a manual backup on EZSP coordinators (don't overwrite IEEE).""" result, port = await pick_radio(RadioType.ezsp) @@ -1303,7 +1321,7 @@ async def test_formation_strategy_restore_manual_backup_ezsp( async def test_formation_strategy_restore_manual_backup_invalid_upload( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test restoring a manual backup but an invalid file is uploaded.""" result, port = await pick_radio(RadioType.ezsp) @@ -1355,7 +1373,7 @@ def test_format_backup_choice() -> None: ) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_formation_strategy_restore_automatic_backup_ezsp( - pick_radio, mock_app, make_backup, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, make_backup, hass: HomeAssistant ) -> None: """Test restoring an automatic backup (EZSP radio).""" mock_app.backups.backups = [ @@ -1404,7 +1422,11 @@ async def test_formation_strategy_restore_automatic_backup_ezsp( @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @pytest.mark.parametrize("is_advanced", [True, False]) async def test_formation_strategy_restore_automatic_backup_non_ezsp( - is_advanced, pick_radio, mock_app, make_backup, hass: HomeAssistant + is_advanced, + pick_radio: RadioPicker, + mock_app: AsyncMock, + make_backup, + hass: HomeAssistant, ) -> None: """Test restoring an automatic backup (non-EZSP radio).""" mock_app.backups.backups = [ @@ -1457,7 +1479,11 @@ async def test_formation_strategy_restore_automatic_backup_non_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_ezsp_restore_without_settings_change_ieee( - allow_overwrite_ieee_mock, pick_radio, mock_app, backup, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + backup, + hass: HomeAssistant, ) -> None: """Test a manual backup on EZSP coordinators without settings (no IEEE write).""" # Fail to load settings diff --git a/tests/components/zha/test_cover.py b/tests/components/zha/test_cover.py index 5f6dac885f2..afef2aab70f 100644 --- a/tests/components/zha/test_cover.py +++ b/tests/components/zha/test_cover.py @@ -1,12 +1,10 @@ """Test ZHA cover.""" -import asyncio from unittest.mock import patch import pytest -import zigpy.profiles.zha -import zigpy.types -from zigpy.zcl.clusters import closures, general +from zigpy.profiles import zha +from zigpy.zcl.clusters import closures import zigpy.zcl.foundation as zcl_f from homeassistant.components.cover import ( @@ -22,34 +20,27 @@ from homeassistant.components.cover import ( SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - SERVICE_TOGGLE_COVER_TILT, ) -from homeassistant.components.zha.core.const import ZHA_EVENT +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import ( - ATTR_COMMAND, STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING, - STATE_UNAVAILABLE, Platform, ) -from homeassistant.core import CoreState, HomeAssistant, State +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_component import async_update_entity -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - make_zcl_header, - send_attributes_report, - update_attribute_cache, -) +from .common import find_entity_id, send_attributes_report, update_attribute_cache from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import async_capture_events, mock_restore_cache - Default_Response = zcl_f.GENERAL_COMMANDS[zcl_f.GeneralCommand.Default_Response].schema @@ -68,135 +59,31 @@ def cover_platform_only(): yield -@pytest.fixture -def zigpy_cover_device(zigpy_device_mock): - """Zigpy cover device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_DEVICE, - SIG_EP_INPUT: [closures.WindowCovering.cluster_id], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -def zigpy_cover_remote(zigpy_device_mock): - """Zigpy cover remote device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_CONTROLLER, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [closures.WindowCovering.cluster_id], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -def zigpy_shade_device(zigpy_device_mock): - """Zigpy shade device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SHADE, - SIG_EP_INPUT: [ - closures.Shade.cluster_id, - general.LevelControl.cluster_id, - general.OnOff.cluster_id, - ], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -def zigpy_keen_vent(zigpy_device_mock): - """Zigpy Keen Vent device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.LEVEL_CONTROLLABLE_OUTPUT, - SIG_EP_INPUT: [general.LevelControl.cluster_id, general.OnOff.cluster_id], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock( - endpoints, manufacturer="Keen Home Inc", model="SV02-612-MP-1.3" - ) - - WCAttrs = closures.WindowCovering.AttributeDefs WCCmds = closures.WindowCovering.ServerCommandDefs WCT = closures.WindowCovering.WindowCoveringType WCCS = closures.WindowCovering.ConfigStatus -async def test_cover_non_tilt_initial_state( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device -) -> None: +async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: """Test ZHA cover platform.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.WINDOW_COVERING_DEVICE, + SIG_EP_INPUT: [closures.WindowCovering.cluster_id], + SIG_EP_OUTPUT: [], + } + }, + ) # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering - cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 0, - WCAttrs.window_covering_type.name: WCT.Drapery, - WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), - } - update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) - assert ( - not zha_device.endpoints[1] - .all_cluster_handlers[f"1:0x{cluster.cluster_id:04x}"] - .inverted - ) - assert cluster.read_attributes.call_count == 3 - assert ( - WCAttrs.current_position_lift_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - assert ( - WCAttrs.current_position_tilt_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test update - prev_call_count = cluster.read_attributes.call_count - await async_update_entity(hass, entity_id) - assert cluster.read_attributes.call_count == prev_call_count + 1 - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OPEN - assert state.attributes[ATTR_CURRENT_POSITION] == 100 - - -async def test_cover( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device -) -> None: - """Test ZHA cover platform.""" - - # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering + cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { WCAttrs.current_position_lift_percentage.name: 0, WCAttrs.current_position_tilt_percentage.name: 42, @@ -204,9 +91,17 @@ async def test_cover( WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), } update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.COVER, zha_device_proxy, hass) + assert entity_id is not None + assert ( - not zha_device.endpoints[1] + not zha_device_proxy.device.endpoints[1] .all_cluster_handlers[f"1:0x{cluster.cluster_id:04x}"] .inverted ) @@ -220,21 +115,7 @@ async def test_cover( in cluster.read_attributes.call_args[0][0] ) - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test update - prev_call_count = cluster.read_attributes.call_count await async_update_entity(hass, entity_id) - assert cluster.read_attributes.call_count == prev_call_count + 1 state = hass.states.get(entity_id) assert state assert state.state == STATE_OPEN @@ -440,61 +321,41 @@ async def test_cover( assert cluster.request.call_args[0][2].command.name == WCCmds.stop.name assert cluster.request.call_args[1]["expect_reply"] is True - # test rejoin - cluster.PLUGGED_ATTR_READS = {WCAttrs.current_position_lift_percentage.name: 0} - await async_test_rejoin(hass, zigpy_cover_device, [cluster], (1,)) - assert hass.states.get(entity_id).state == STATE_OPEN - - # test toggle - with patch("zigpy.zcl.Cluster.request", return_value=[0x2, zcl_f.Status.SUCCESS]): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_TOGGLE_COVER_TILT, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster.request.call_count == 1 - assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == 0x08 - assert ( - cluster.request.call_args[0][2].command.name - == WCCmds.go_to_tilt_percentage.name - ) - assert cluster.request.call_args[0][3] == 100 - assert cluster.request.call_args[1]["expect_reply"] is True - async def test_cover_failures( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device + hass: HomeAssistant, setup_zha, zigpy_device_mock ) -> None: """Test ZHA cover platform failure cases.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.WINDOW_COVERING_DEVICE, + SIG_EP_INPUT: [closures.WindowCovering.cluster_id], + SIG_EP_OUTPUT: [], + } + }, + ) # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering + cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { WCAttrs.current_position_tilt_percentage.name: 42, WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, } update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) - entity_id = find_entity_id(Platform.COVER, zha_device, hass) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.COVER, zha_device_proxy, hass) assert entity_id is not None - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # test update returned None - prev_call_count = cluster.read_attributes.call_count - await async_update_entity(hass, entity_id) - assert cluster.read_attributes.call_count == prev_call_count + 1 - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - # test that the state has changed from unavailable to closed await send_attributes_report(hass, cluster, {0: 0, 8: 100, 1: 1}) assert hass.states.get(entity_id).state == STATE_CLOSED @@ -670,319 +531,3 @@ async def test_cover_failures( cluster.request.call_args[0][1] == closures.WindowCovering.ServerCommandDefs.stop.id ) - - -async def test_shade( - hass: HomeAssistant, zha_device_joined_restored, zigpy_shade_device -) -> None: - """Test ZHA cover platform for shade device type.""" - - # load up cover domain - zha_device = await zha_device_joined_restored(zigpy_shade_device) - - cluster_on_off = zigpy_shade_device.endpoints[1].on_off - cluster_level = zigpy_shade_device.endpoints[1].level - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test that the state has changed from unavailable to off - await send_attributes_report(hass, cluster_on_off, {8: 0, 0: False, 1: 1}) - assert hass.states.get(entity_id).state == STATE_CLOSED - - # test to see if it opens - await send_attributes_report(hass, cluster_on_off, {8: 0, 0: True, 1: 1}) - assert hass.states.get(entity_id).state == STATE_OPEN - - # close from UI command fails - with patch( - "zigpy.zcl.Cluster.request", - return_value=Default_Response( - command_id=closures.WindowCovering.ServerCommandDefs.down_close.id, - status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, - ), - ): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0000 - assert hass.states.get(entity_id).state == STATE_OPEN - - with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): - await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True - ) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0000 - assert hass.states.get(entity_id).state == STATE_CLOSED - - # open from UI command fails - assert ATTR_CURRENT_POSITION not in hass.states.get(entity_id).attributes - await send_attributes_report(hass, cluster_level, {0: 0}) - with patch( - "zigpy.zcl.Cluster.request", - return_value=Default_Response( - command_id=closures.WindowCovering.ServerCommandDefs.up_open.id, - status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, - ), - ): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0001 - assert hass.states.get(entity_id).state == STATE_CLOSED - - # stop from UI command fails - with patch( - "zigpy.zcl.Cluster.request", - return_value=Default_Response( - command_id=general.LevelControl.ServerCommandDefs.stop.id, - status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, - ), - ): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - - assert cluster_level.request.call_count == 1 - assert cluster_level.request.call_args[0][0] is False - assert ( - cluster_level.request.call_args[0][1] - == general.LevelControl.ServerCommandDefs.stop.id - ) - assert hass.states.get(entity_id).state == STATE_CLOSED - - # open from UI succeeds - with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): - await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True - ) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0001 - assert hass.states.get(entity_id).state == STATE_OPEN - - # set position UI command fails - with patch( - "zigpy.zcl.Cluster.request", - return_value=Default_Response( - command_id=closures.WindowCovering.ServerCommandDefs.go_to_lift_percentage.id, - status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, - ), - ): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_SET_COVER_POSITION, - {"entity_id": entity_id, "position": 47}, - blocking=True, - ) - - assert cluster_level.request.call_count == 1 - assert cluster_level.request.call_args[0][0] is False - assert cluster_level.request.call_args[0][1] == 0x0004 - assert int(cluster_level.request.call_args[0][3] * 100 / 255) == 47 - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 0 - - # set position UI success - with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_SET_COVER_POSITION, - {"entity_id": entity_id, "position": 47}, - blocking=True, - ) - assert cluster_level.request.call_count == 1 - assert cluster_level.request.call_args[0][0] is False - assert cluster_level.request.call_args[0][1] == 0x0004 - assert int(cluster_level.request.call_args[0][3] * 100 / 255) == 47 - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 47 - - # report position change - await send_attributes_report(hass, cluster_level, {8: 0, 0: 100, 1: 1}) - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == int( - 100 * 100 / 255 - ) - - # test rejoin - await async_test_rejoin( - hass, zigpy_shade_device, [cluster_level, cluster_on_off], (1,) - ) - assert hass.states.get(entity_id).state == STATE_OPEN - - # test cover stop - with patch("zigpy.zcl.Cluster.request", side_effect=TimeoutError): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster_level.request.call_count == 3 - assert cluster_level.request.call_args[0][0] is False - assert cluster_level.request.call_args[0][1] in (0x0003, 0x0007) - - -async def test_shade_restore_state( - hass: HomeAssistant, zha_device_restored, zigpy_shade_device -) -> None: - """Ensure states are restored on startup.""" - mock_restore_cache( - hass, - ( - State( - "cover.fakemanufacturer_fakemodel_shade", - STATE_OPEN, - {ATTR_CURRENT_POSITION: 50}, - ), - ), - ) - - hass.set_state(CoreState.starting) - - zha_device = await zha_device_restored(zigpy_shade_device) - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - # test that the cover was created and that it is available - assert hass.states.get(entity_id).state == STATE_OPEN - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 50 - - -async def test_cover_restore_state( - hass: HomeAssistant, zha_device_restored, zigpy_cover_device -) -> None: - """Ensure states are restored on startup.""" - cluster = zigpy_cover_device.endpoints[1].window_covering - cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 50, - WCAttrs.current_position_tilt_percentage.name: 42, - WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, - } - update_attribute_cache(cluster) - - hass.set_state(CoreState.starting) - - zha_device = await zha_device_restored(zigpy_cover_device) - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - # test that the cover was created and that it is available - assert hass.states.get(entity_id).state == STATE_OPEN - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 100 - 50 - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_TILT_POSITION] == 100 - 42 - - -async def test_keen_vent( - hass: HomeAssistant, zha_device_joined_restored, zigpy_keen_vent -) -> None: - """Test keen vent.""" - - # load up cover domain - zha_device = await zha_device_joined_restored(zigpy_keen_vent) - - cluster_on_off = zigpy_keen_vent.endpoints[1].on_off - cluster_level = zigpy_keen_vent.endpoints[1].level - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test that the state has changed from unavailable to off - await send_attributes_report(hass, cluster_on_off, {8: 0, 0: False, 1: 1}) - assert hass.states.get(entity_id).state == STATE_CLOSED - - # open from UI command fails - p1 = patch.object(cluster_on_off, "request", side_effect=TimeoutError) - p2 = patch.object(cluster_level, "request", return_value=[4, 0]) - - with p1, p2: - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster_on_off.request.call_count == 3 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0001 - assert cluster_level.request.call_count == 1 - assert hass.states.get(entity_id).state == STATE_CLOSED - - # open from UI command success - p1 = patch.object(cluster_on_off, "request", return_value=[1, 0]) - p2 = patch.object(cluster_level, "request", return_value=[4, 0]) - - with p1, p2: - await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True - ) - await asyncio.sleep(0) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0001 - assert cluster_level.request.call_count == 1 - assert hass.states.get(entity_id).state == STATE_OPEN - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 100 - - -async def test_cover_remote( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_remote -) -> None: - """Test ZHA cover remote.""" - - # load up cover domain - await zha_device_joined_restored(zigpy_cover_remote) - - cluster = zigpy_cover_remote.endpoints[1].out_clusters[ - closures.WindowCovering.cluster_id - ] - zha_events = async_capture_events(hass, ZHA_EVENT) - - # up command - hdr = make_zcl_header(0, global_command=False) - cluster.handle_message(hdr, []) - await hass.async_block_till_done() - - assert len(zha_events) == 1 - assert zha_events[0].data[ATTR_COMMAND] == "up_open" - - # down command - hdr = make_zcl_header(1, global_command=False) - cluster.handle_message(hdr, []) - await hass.async_block_till_done() - - assert len(zha_events) == 2 - assert zha_events[1].data[ATTR_COMMAND] == "down_close" diff --git a/tests/components/zha/test_device.py b/tests/components/zha/test_device.py deleted file mode 100644 index 87acdc5fd1c..00000000000 --- a/tests/components/zha/test_device.py +++ /dev/null @@ -1,363 +0,0 @@ -"""Test ZHA device switch.""" - -from datetime import timedelta -import logging -import time -from unittest import mock -from unittest.mock import patch - -import pytest -import zigpy.profiles.zha -import zigpy.types -from zigpy.zcl.clusters import general -import zigpy.zdo.types as zdo_t - -from homeassistant.components.zha.core.const import ( - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, -) -from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -import homeassistant.helpers.device_registry as dr -import homeassistant.util.dt as dt_util - -from .common import async_enable_traffic, make_zcl_header -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE - -from tests.common import async_fire_time_changed - - -@pytest.fixture(autouse=True) -def required_platforms_only(): - """Only set up the required platform and required base platforms to speed up tests.""" - with patch( - "homeassistant.components.zha.PLATFORMS", - ( - Platform.DEVICE_TRACKER, - Platform.SENSOR, - Platform.SELECT, - Platform.SWITCH, - Platform.BINARY_SENSOR, - ), - ): - yield - - -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - - def _dev(with_basic_cluster_handler: bool = True, **kwargs): - in_clusters = [general.OnOff.cluster_id] - if with_basic_cluster_handler: - in_clusters.append(general.Basic.cluster_id) - - endpoints = { - 3: { - SIG_EP_INPUT: in_clusters, - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - return zigpy_device_mock(endpoints, **kwargs) - - return _dev - - -@pytest.fixture -def zigpy_device_mains(zigpy_device_mock): - """Device tracker zigpy device.""" - - def _dev(with_basic_cluster_handler: bool = True): - in_clusters = [general.OnOff.cluster_id] - if with_basic_cluster_handler: - in_clusters.append(general.Basic.cluster_id) - - endpoints = { - 3: { - SIG_EP_INPUT: in_clusters, - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00" - ) - - return _dev - - -@pytest.fixture -def device_with_basic_cluster_handler(zigpy_device_mains): - """Return a ZHA device with a basic cluster handler present.""" - return zigpy_device_mains(with_basic_cluster_handler=True) - - -@pytest.fixture -def device_without_basic_cluster_handler(zigpy_device): - """Return a ZHA device without a basic cluster handler present.""" - return zigpy_device(with_basic_cluster_handler=False) - - -@pytest.fixture -async def ota_zha_device(zha_device_restored, zigpy_device_mock): - """ZHA device with OTA cluster fixture.""" - zigpy_dev = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - SIG_EP_TYPE: 0x1234, - } - }, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - return await zha_device_restored(zigpy_dev) - - -def _send_time_changed(hass, seconds): - """Send a time changed event.""" - now = dt_util.utcnow() + timedelta(seconds=seconds) - async_fire_time_changed(hass, now) - - -@patch( - "homeassistant.components.zha.core.cluster_handlers.general.BasicClusterHandler.async_initialize", - new=mock.AsyncMock(), -) -async def test_check_available_success( - hass: HomeAssistant, device_with_basic_cluster_handler, zha_device_restored -) -> None: - """Check device availability success on 1st try.""" - zha_device = await zha_device_restored(device_with_basic_cluster_handler) - await async_enable_traffic(hass, [zha_device]) - basic_ch = device_with_basic_cluster_handler.endpoints[3].basic - - basic_ch.read_attributes.reset_mock() - device_with_basic_cluster_handler.last_seen = None - assert zha_device.available is True - _send_time_changed(hass, zha_device.consider_unavailable_time + 2) - await hass.async_block_till_done() - assert zha_device.available is False - assert basic_ch.read_attributes.await_count == 0 - - device_with_basic_cluster_handler.last_seen = ( - time.time() - zha_device.consider_unavailable_time - 2 - ) - _seens = [time.time(), device_with_basic_cluster_handler.last_seen] - - def _update_last_seen(*args, **kwargs): - device_with_basic_cluster_handler.last_seen = _seens.pop() - - basic_ch.read_attributes.side_effect = _update_last_seen - - # successfully ping zigpy device, but zha_device is not yet available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 1 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is False - - # There was traffic from the device: pings, but not yet available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 2 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is False - - # There was traffic from the device: don't try to ping, marked as available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 2 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is True - - -@patch( - "homeassistant.components.zha.core.cluster_handlers.general.BasicClusterHandler.async_initialize", - new=mock.AsyncMock(), -) -async def test_check_available_unsuccessful( - hass: HomeAssistant, device_with_basic_cluster_handler, zha_device_restored -) -> None: - """Check device availability all tries fail.""" - - zha_device = await zha_device_restored(device_with_basic_cluster_handler) - await async_enable_traffic(hass, [zha_device]) - basic_ch = device_with_basic_cluster_handler.endpoints[3].basic - - assert zha_device.available is True - assert basic_ch.read_attributes.await_count == 0 - - device_with_basic_cluster_handler.last_seen = ( - time.time() - zha_device.consider_unavailable_time - 2 - ) - - # unsuccessfully ping zigpy device, but zha_device is still available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 1 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is True - - # still no traffic, but zha_device is still available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 2 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is True - - # not even trying to update, device is unavailable - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 2 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is False - - -@patch( - "homeassistant.components.zha.core.cluster_handlers.general.BasicClusterHandler.async_initialize", - new=mock.AsyncMock(), -) -async def test_check_available_no_basic_cluster_handler( - hass: HomeAssistant, - device_without_basic_cluster_handler, - zha_device_restored, - caplog: pytest.LogCaptureFixture, -) -> None: - """Check device availability for a device without basic cluster.""" - caplog.set_level(logging.DEBUG, logger="homeassistant.components.zha") - - zha_device = await zha_device_restored(device_without_basic_cluster_handler) - await async_enable_traffic(hass, [zha_device]) - - assert zha_device.available is True - - device_without_basic_cluster_handler.last_seen = ( - time.time() - zha_device.consider_unavailable_time - 2 - ) - - assert "does not have a mandatory basic cluster" not in caplog.text - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert zha_device.available is False - assert "does not have a mandatory basic cluster" in caplog.text - - -async def test_ota_sw_version( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, ota_zha_device -) -> None: - """Test device entry gets sw_version updated via OTA cluster handler.""" - - ota_ch = ota_zha_device._endpoints[1].client_cluster_handlers["1:0x0019"] - entry = device_registry.async_get(ota_zha_device.device_id) - assert entry.sw_version is None - - cluster = ota_ch.cluster - hdr = make_zcl_header(1, global_command=False) - sw_version = 0x2345 - cluster.handle_message(hdr, [1, 2, 3, sw_version, None]) - await hass.async_block_till_done() - entry = device_registry.async_get(ota_zha_device.device_id) - assert int(entry.sw_version, base=16) == sw_version - - -@pytest.mark.parametrize( - ("device", "last_seen_delta", "is_available"), - [ - ("zigpy_device", 0, True), - ( - "zigpy_device", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS + 2, - True, - ), - ( - "zigpy_device", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY - 2, - True, - ), - ( - "zigpy_device", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY + 2, - False, - ), - ("zigpy_device_mains", 0, True), - ( - "zigpy_device_mains", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS - 2, - True, - ), - ( - "zigpy_device_mains", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS + 2, - False, - ), - ( - "zigpy_device_mains", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY - 2, - False, - ), - ( - "zigpy_device_mains", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY + 2, - False, - ), - ], -) -async def test_device_restore_availability( - hass: HomeAssistant, - request: pytest.FixtureRequest, - device, - last_seen_delta, - is_available, - zha_device_restored, -) -> None: - """Test initial availability for restored devices.""" - - zigpy_device = request.getfixturevalue(device)() - zha_device = await zha_device_restored( - zigpy_device, last_seen=time.time() - last_seen_delta - ) - entity_id = "switch.fakemanufacturer_fakemodel_switch" - - await hass.async_block_till_done() - # ensure the switch entity was created - assert hass.states.get(entity_id).state is not None - assert zha_device.available is is_available - if is_available: - assert hass.states.get(entity_id).state == STATE_OFF - else: - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - -async def test_device_is_active_coordinator( - hass: HomeAssistant, zha_device_joined, zigpy_device -) -> None: - """Test that the current coordinator is uniquely detected.""" - - current_coord_dev = zigpy_device(ieee="aa:bb:cc:dd:ee:ff:00:11", nwk=0x0000) - current_coord_dev.node_desc = current_coord_dev.node_desc.replace( - logical_type=zdo_t.LogicalType.Coordinator - ) - - old_coord_dev = zigpy_device(ieee="aa:bb:cc:dd:ee:ff:00:12", nwk=0x0000) - old_coord_dev.node_desc = old_coord_dev.node_desc.replace( - logical_type=zdo_t.LogicalType.Coordinator - ) - - # The two coordinators have different IEEE addresses - assert current_coord_dev.ieee != old_coord_dev.ieee - - current_coordinator = await zha_device_joined(current_coord_dev) - stale_coordinator = await zha_device_joined(old_coord_dev) - - # Ensure the current ApplicationController's IEEE matches our coordinator's - current_coordinator.gateway.application_controller.state.node_info.ieee = ( - current_coord_dev.ieee - ) - - assert current_coordinator.is_active_coordinator - assert not stale_coordinator.is_active_coordinator diff --git a/tests/components/zha/test_device_action.py b/tests/components/zha/test_device_action.py index 13e9d789191..8bee821654d 100644 --- a/tests/components/zha/test_device_action.py +++ b/tests/components/zha/test_device_action.py @@ -1,23 +1,23 @@ """The test for ZHA device automation actions.""" -from unittest.mock import call, patch +from unittest.mock import patch import pytest from pytest_unordered import unordered -from zhaquirks.inovelli.VZM31SN import InovelliVZM31SNv11 -import zigpy.profiles.zha +from zigpy.profiles import zha from zigpy.zcl.clusters import general, security import zigpy.zcl.foundation as zcl_f from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.zha import DOMAIN +from homeassistant.components.zha.helpers import get_zha_gateway from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from tests.common import async_get_device_automations, async_mock_service @@ -52,66 +52,37 @@ def required_platforms_only(): yield -@pytest.fixture -async def device_ias(hass, zigpy_device_mock, zha_device_joined_restored): - """IAS device fixture.""" +async def test_get_actions( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + setup_zha, + zigpy_device_mock, +) -> None: + """Test we get the expected actions from a ZHA device.""" - clusters = [general.Basic, security.IasZone, security.IasWd] - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [c.cluster_id for c in clusters], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - }, - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - zha_device.update_available(True) - await hass.async_block_till_done() - return zigpy_device, zha_device - - -@pytest.fixture -async def device_inovelli(hass, zigpy_device_mock, zha_device_joined): - """Inovelli device fixture.""" + await setup_zha() + gateway = get_zha_gateway(hass) zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [ general.Basic.cluster_id, - general.Identify.cluster_id, - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - 0xFC31, + security.IasZone.cluster_id, + security.IasWd.cluster_id, ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.DIMMABLE_LIGHT, + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, + SIG_EP_PROFILE: zha.PROFILE_ID, } - }, - ieee="00:1d:8f:08:0c:90:69:6b", - manufacturer="Inovelli", - model="VZM31-SN", - quirk=InovelliVZM31SNv11, + } ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.update_available(True) - await hass.async_block_till_done() - return zigpy_device, zha_device - - -async def test_get_actions( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - device_ias, -) -> None: - """Test we get the expected actions from a ZHA device.""" - - ieee_address = str(device_ias[0].ieee) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + ieee_address = str(zigpy_device.ieee) reg_device = device_registry.async_get_device(identifiers={(DOMAIN, ieee_address)}) siren_level_select = entity_registry.async_get( @@ -168,112 +139,40 @@ async def test_get_actions( assert actions == unordered(expected_actions) -async def test_get_inovelli_actions( +async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - device_inovelli, -) -> None: - """Test we get the expected actions from a ZHA device.""" - - inovelli_ieee_address = str(device_inovelli[0].ieee) - inovelli_reg_device = device_registry.async_get_device( - identifiers={(DOMAIN, inovelli_ieee_address)} - ) - inovelli_button = entity_registry.async_get("button.inovelli_vzm31_sn_identify") - inovelli_light = entity_registry.async_get("light.inovelli_vzm31_sn_light") - - actions = await async_get_device_automations( - hass, DeviceAutomationType.ACTION, inovelli_reg_device.id - ) - - expected_actions = [ - { - "device_id": inovelli_reg_device.id, - "domain": DOMAIN, - "metadata": {}, - "type": "issue_all_led_effect", - }, - { - "device_id": inovelli_reg_device.id, - "domain": DOMAIN, - "metadata": {}, - "type": "issue_individual_led_effect", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.BUTTON, - "entity_id": inovelli_button.id, - "metadata": {"secondary": True}, - "type": "press", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "turn_off", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "turn_on", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "toggle", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "brightness_increase", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "brightness_decrease", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "flash", - }, - ] - - assert actions == unordered(expected_actions) - - -async def test_action( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, device_ias, device_inovelli + setup_zha, + zigpy_device_mock, ) -> None: """Test for executing a ZHA device action.""" - zigpy_device, zha_device = device_ias - inovelli_zigpy_device, inovelli_zha_device = device_inovelli + await setup_zha() + gateway = get_zha_gateway(hass) + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + security.IasZone.cluster_id, + security.IasWd.cluster_id, + ], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) zigpy_device.device_automation_triggers = { (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE} } - ieee_address = str(zha_device.ieee) - inovelli_ieee_address = str(inovelli_zha_device.ieee) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + ieee_address = str(zigpy_device.ieee) reg_device = device_registry.async_get_device(identifiers={(DOMAIN, ieee_address)}) - inovelli_reg_device = device_registry.async_get_device( - identifiers={(DOMAIN, inovelli_ieee_address)} - ) - - cluster = inovelli_zigpy_device.endpoints[1].in_clusters[0xFC31] with patch( "zigpy.zcl.Cluster.request", @@ -298,25 +197,6 @@ async def test_action( "device_id": reg_device.id, "type": "warn", }, - { - "domain": DOMAIN, - "device_id": inovelli_reg_device.id, - "type": "issue_all_led_effect", - "effect_type": "Open_Close", - "duration": 5, - "level": 10, - "color": 41, - }, - { - "domain": DOMAIN, - "device_id": inovelli_reg_device.id, - "type": "issue_individual_led_effect", - "effect_type": "Falling", - "led_number": 1, - "duration": 5, - "level": 10, - "color": 41, - }, ], } ] @@ -326,7 +206,11 @@ async def test_action( await hass.async_block_till_done() calls = async_mock_service(hass, DOMAIN, "warning_device_warn") - cluster_handler = zha_device.endpoints[1].client_cluster_handlers["1:0x0006"] + cluster_handler = ( + gateway.get_device(zigpy_device.ieee) + .endpoints[1] + .client_cluster_handlers["1:0x0006"] + ) cluster_handler.zha_send_event(COMMAND_SINGLE, []) await hass.async_block_till_done() @@ -335,44 +219,41 @@ async def test_action( assert calls[0].service == "warning_device_warn" assert calls[0].data["ieee"] == ieee_address - assert len(cluster.request.mock_calls) == 2 - assert ( - call( - False, - cluster.commands_by_name["led_effect"].id, - cluster.commands_by_name["led_effect"].schema, - 6, - 41, - 10, - 5, - expect_reply=False, - manufacturer=4151, - tsn=None, - ) - in cluster.request.call_args_list - ) - assert ( - call( - False, - cluster.commands_by_name["individual_led_effect"].id, - cluster.commands_by_name["individual_led_effect"].schema, - 1, - 6, - 41, - 10, - 5, - expect_reply=False, - manufacturer=4151, - tsn=None, - ) - in cluster.request.call_args_list - ) - -async def test_invalid_zha_event_type(hass: HomeAssistant, device_ias) -> None: +async def test_invalid_zha_event_type( + hass: HomeAssistant, setup_zha, zigpy_device_mock +) -> None: """Test that unexpected types are not passed to `zha_send_event`.""" - zigpy_device, zha_device = device_ias - cluster_handler = zha_device._endpoints[1].client_cluster_handlers["1:0x0006"] + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + security.IasZone.cluster_id, + security.IasWd.cluster_id, + ], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) + zigpy_device.device_automation_triggers = { + (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE} + } + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + cluster_handler = ( + gateway.get_device(zigpy_device.ieee) + .endpoints[1] + .client_cluster_handlers["1:0x0006"] + ) # `zha_send_event` accepts only zigpy responses, lists, and dicts with pytest.raises(TypeError): diff --git a/tests/components/zha/test_device_tracker.py b/tests/components/zha/test_device_tracker.py index 64360c8b2ff..ae96de44f17 100644 --- a/tests/components/zha/test_device_tracker.py +++ b/tests/components/zha/test_device_tracker.py @@ -5,23 +5,22 @@ import time from unittest.mock import patch import pytest -import zigpy.profiles.zha +from zha.application.registries import SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE +from zigpy.profiles import zha from zigpy.zcl.clusters import general from homeassistant.components.device_tracker import SourceType -from homeassistant.components.zha.core.registries import ( - SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) -from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_HOME, STATE_NOT_HOME, Platform from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - send_attributes_report, -) +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from tests.common import async_fire_time_changed @@ -44,49 +43,41 @@ def device_tracker_platforms_only(): yield -@pytest.fixture -def zigpy_device_dt(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.PowerConfiguration.cluster_id, - general.Identify.cluster_id, - general.PollControl.cluster_id, - general.BinaryInput.cluster_id, - ], - SIG_EP_OUTPUT: [general.Identify.cluster_id, general.Ota.cluster_id], - SIG_EP_TYPE: SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - } - return zigpy_device_mock(endpoints) - - async def test_device_tracker( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_dt + hass: HomeAssistant, setup_zha, zigpy_device_mock ) -> None: """Test ZHA device tracker platform.""" - zha_device = await zha_device_joined_restored(zigpy_device_dt) - cluster = zigpy_device_dt.endpoints.get(1).power - entity_id = find_entity_id(Platform.DEVICE_TRACKER, zha_device, hass) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.PowerConfiguration.cluster_id, + general.Identify.cluster_id, + general.PollControl.cluster_id, + general.BinaryInput.cluster_id, + ], + SIG_EP_OUTPUT: [general.Identify.cluster_id, general.Ota.cluster_id], + SIG_EP_TYPE: SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.DEVICE_TRACKER, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].power assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_NOT_HOME - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the device tracker was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - zigpy_device_dt.last_seen = time.time() - 120 - next_update = dt_util.utcnow() + timedelta(seconds=30) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - # test that the state has changed from unavailable to not home assert hass.states.get(entity_id).state == STATE_NOT_HOME @@ -95,7 +86,7 @@ async def test_device_tracker( hass, cluster, {0x0000: 0, 0x0020: 23, 0x0021: 200, 0x0001: 2} ) - zigpy_device_dt.last_seen = time.time() + 10 + zigpy_device.last_seen = time.time() + 10 next_update = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() @@ -107,7 +98,3 @@ async def test_device_tracker( assert entity.is_connected is True assert entity.source_type == SourceType.ROUTER assert entity.battery_level == 100 - - # test adding device tracker to the network and HA - await async_test_rejoin(hass, zigpy_device_dt, [cluster], (2,)) - assert hass.states.get(entity_id).state == STATE_HOME diff --git a/tests/components/zha/test_device_trigger.py b/tests/components/zha/test_device_trigger.py index b43392af61a..09b2d155547 100644 --- a/tests/components/zha/test_device_trigger.py +++ b/tests/components/zha/test_device_trigger.py @@ -1,35 +1,26 @@ """ZHA device automation trigger tests.""" -from datetime import timedelta -import time from unittest.mock import patch import pytest +from zha.application.const import ATTR_ENDPOINT_ID from zigpy.application import ControllerApplication +from zigpy.device import Device as ZigpyDevice import zigpy.profiles.zha -from zigpy.zcl.clusters import general +import zigpy.types from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, ) -from homeassistant.components.zha.core.const import ATTR_ENDPOINT_ID +from homeassistant.components.zha.helpers import get_zha_gateway from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -import homeassistant.util.dt as dt_util -from .common import async_enable_traffic -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE - -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -51,16 +42,6 @@ LONG_PRESS = "remote_button_long_press" LONG_RELEASE = "remote_button_long_release" -SWITCH_SIGNATURE = { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } -} - - @pytest.fixture(autouse=True) def sensor_platforms_only(): """Only set up the sensor platform and required base platforms to speed up tests.""" @@ -75,31 +56,21 @@ def _same_lists(list_a, list_b): return all(item in list_b for item in list_a) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - -@pytest.fixture -async def mock_devices(hass, zigpy_device_mock, zha_device_joined_restored): - """IAS device fixture.""" - - zigpy_device = zigpy_device_mock(SWITCH_SIGNATURE) - - zha_device = await zha_device_joined_restored(zigpy_device) - zha_device.update_available(True) - await hass.async_block_till_done() - return zigpy_device, zha_device - - async def test_triggers( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, mock_devices + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + setup_zha, ) -> None: """Test ZHA device triggers.""" - zigpy_device, zha_device = mock_devices + await setup_zha() + gateway = get_zha_gateway(hass) + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) zigpy_device.device_automation_triggers = { (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, @@ -108,9 +79,13 @@ async def test_triggers( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - ieee_address = str(zha_device.ieee) + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) triggers = await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, reg_device.id @@ -170,14 +145,26 @@ async def test_triggers( async def test_no_triggers( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, mock_devices + hass: HomeAssistant, device_registry: dr.DeviceRegistry, setup_zha ) -> None: """Test ZHA device with no triggers.""" + await setup_zha() + gateway = get_zha_gateway(hass) - _, zha_device = mock_devices - ieee_address = str(zha_device.ieee) + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + zigpy_device.device_automation_triggers = {} - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) triggers = await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, reg_device.id @@ -197,12 +184,21 @@ async def test_no_triggers( async def test_if_fires_on_event( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - mock_devices, - calls: list[ServiceCall], + service_calls: list[ServiceCall], + setup_zha, ) -> None: """Test for remote triggers firing.""" - zigpy_device, zha_device = mock_devices + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + ep = zigpy_device.add_endpoint(1) + ep.add_output_cluster(0x0006) zigpy_device.device_automation_triggers = { (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, @@ -212,8 +208,13 @@ async def test_if_fires_on_event( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - ieee_address = str(zha_device.ieee) - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) assert await async_setup_component( hass, @@ -239,35 +240,46 @@ async def test_if_fires_on_event( await hass.async_block_till_done() - cluster_handler = zha_device.endpoints[1].client_cluster_handlers["1:0x0006"] - cluster_handler.zha_send_event(COMMAND_SINGLE, []) + zha_device.emit_zha_event( + { + "unique_id": f"{zha_device.ieee}:1:0x0006", + "endpoint_id": 1, + "cluster_id": 0x0006, + "command": COMMAND_SINGLE, + "args": [], + "params": {}, + }, + ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["message"] == "service called" + assert len(service_calls) == 1 + assert service_calls[0].data["message"] == "service called" async def test_device_offline_fires( hass: HomeAssistant, - zigpy_device_mock, - zha_device_restored, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], + setup_zha, ) -> None: """Test for device offline triggers firing.""" - zigpy_device = zigpy_device_mock( - { - 1: { - "in_clusters": [general.Basic.cluster_id], - "out_clusters": [general.OnOff.cluster_id], - "device_type": 0, - } - } + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, ) - zha_device = await zha_device_restored(zigpy_device, last_seen=time.time()) - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) assert await async_setup_component( hass, @@ -276,7 +288,7 @@ async def test_device_offline_fires( automation.DOMAIN: [ { "trigger": { - "device_id": zha_device.device_id, + "device_id": reg_device.id, "domain": "zha", "platform": "device", "type": "device_offline", @@ -291,44 +303,39 @@ async def test_device_offline_fires( }, ) - await hass.async_block_till_done() assert zha_device.available is True - - zigpy_device.last_seen = time.time() - zha_device.consider_unavailable_time - 2 - - # there are 3 checkins to perform before marking the device unavailable - future = dt_util.utcnow() + timedelta(seconds=90) - async_fire_time_changed(hass, future) + zha_device.available = False + zha_device.emit_zha_event({"device_event_type": "device_offline"}) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=90) - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - future = dt_util.utcnow() + timedelta( - seconds=zha_device.consider_unavailable_time + 100 - ) - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - assert zha_device.available is False - assert len(calls) == 1 - assert calls[0].data["message"] == "service called" + assert len(service_calls) == 1 + assert service_calls[0].data["message"] == "service called" async def test_exception_no_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - mock_devices, - calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, + setup_zha, ) -> None: """Test for exception when validating device triggers.""" - _, zha_device = mock_devices + await setup_zha() + gateway = get_zha_gateway(hass) - ieee_address = str(zha_device.ieee) - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) await async_setup_component( hass, @@ -361,14 +368,19 @@ async def test_exception_no_triggers( async def test_exception_bad_trigger( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - mock_devices, - calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, + setup_zha, ) -> None: """Test for exception when validating device triggers.""" - zigpy_device, zha_device = mock_devices + await setup_zha() + gateway = get_zha_gateway(hass) + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) zigpy_device.device_automation_triggers = { (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, @@ -377,8 +389,13 @@ async def test_exception_bad_trigger( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - ieee_address = str(zha_device.ieee) - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) await async_setup_component( hass, @@ -412,23 +429,37 @@ async def test_validate_trigger_config_missing_info( hass: HomeAssistant, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, - zigpy_device_mock, - mock_zigpy_connect: ControllerApplication, - zha_device_joined, caplog: pytest.LogCaptureFixture, + setup_zha, ) -> None: """Test device triggers referring to a missing device.""" - # Join a device - switch = zigpy_device_mock(SWITCH_SIGNATURE) - await zha_device_joined(switch) + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + zigpy_device.device_automation_triggers = { + (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, + (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, + (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE}, + (LONG_PRESS, LONG_PRESS): {COMMAND: COMMAND_HOLD}, + (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, + } + + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) # After we unload the config entry, trigger info was not cached on startup, nor can # it be pulled from the current device, making it impossible to validate triggers await hass.config_entries.async_unload(config_entry.entry_id) reg_device = device_registry.async_get_device( - identifiers={("zha", str(switch.ieee))} + identifiers={("zha", str(zha_device.ieee))} ) assert await async_setup_component( @@ -465,16 +496,32 @@ async def test_validate_trigger_config_unloaded_bad_info( hass: HomeAssistant, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, - zigpy_device_mock, - mock_zigpy_connect: ControllerApplication, - zha_device_joined, caplog: pytest.LogCaptureFixture, + zigpy_app_controller: ControllerApplication, + setup_zha, ) -> None: """Test device triggers referring to a missing device.""" - # Join a device - switch = zigpy_device_mock(SWITCH_SIGNATURE) - await zha_device_joined(switch) + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + zigpy_device.device_automation_triggers = { + (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, + (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, + (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE}, + (LONG_PRESS, LONG_PRESS): {COMMAND: COMMAND_HOLD}, + (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, + } + + zigpy_app_controller.devices[zigpy_device.ieee] = zigpy_device + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) # After we unload the config entry, trigger info was not cached on startup, nor can # it be pulled from the current device, making it impossible to validate triggers @@ -482,11 +529,12 @@ async def test_validate_trigger_config_unloaded_bad_info( # Reload ZHA to persist the device info in the cache await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + await hass.config_entries.async_unload(config_entry.entry_id) reg_device = device_registry.async_get_device( - identifiers={("zha", str(switch.ieee))} + identifiers={("zha", str(zha_device.ieee))} ) assert await async_setup_component( diff --git a/tests/components/zha/test_diagnostics.py b/tests/components/zha/test_diagnostics.py index 4bb30a5fc8c..ed3f83c0c36 100644 --- a/tests/components/zha/test_diagnostics.py +++ b/tests/components/zha/test_diagnostics.py @@ -3,13 +3,17 @@ from unittest.mock import patch import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from zigpy.profiles import zha from zigpy.zcl.clusters import security -from homeassistant.components.diagnostics import REDACTED -from homeassistant.components.zha.core.device import ZHADevice -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.components.zha.diagnostics import KEYS_TO_REDACT +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -23,14 +27,6 @@ from tests.components.diagnostics import ( ) from tests.typing import ClientSessionGenerator -CONFIG_ENTRY_DIAGNOSTICS_KEYS = [ - "config", - "config_entry", - "application_state", - "versions", - "devices", -] - @pytest.fixture(autouse=True) def required_platforms_only(): @@ -41,33 +37,36 @@ def required_platforms_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id, security.IasZone.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00" - ) - - async def test_diagnostics_for_config_entry( hass: HomeAssistant, hass_client: ClientSessionGenerator, config_entry: MockConfigEntry, - zha_device_joined, - zigpy_device, + setup_zha, + zigpy_device_mock, + snapshot: SnapshotAssertion, ) -> None: """Test diagnostics for config entry.""" - await zha_device_joined(zigpy_device) + await setup_zha() gateway = get_zha_gateway(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id, security.IasZone.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + scan = {c: c for c in range(11, 26 + 1)} with patch.object(gateway.application_controller, "energy_scan", return_value=scan): @@ -75,30 +74,9 @@ async def test_diagnostics_for_config_entry( hass, hass_client, config_entry ) - for key in CONFIG_ENTRY_DIAGNOSTICS_KEYS: - assert key in diagnostics_data - assert diagnostics_data[key] is not None - - # Energy scan results are presented as a percentage. JSON object keys also must be - # strings, not integers. - assert diagnostics_data["energy_scan"] == { - str(k): 100 * v / 255 for k, v in scan.items() - } - - assert isinstance(diagnostics_data["devices"], list) - assert len(diagnostics_data["devices"]) == 2 - assert diagnostics_data["devices"] == [ - { - "manufacturer": "Coordinator Manufacturer", - "model": "Coordinator Model", - "logical_type": "Coordinator", - }, - { - "manufacturer": "FakeManufacturer", - "model": "FakeModel", - "logical_type": "EndDevice", - }, - ] + assert diagnostics_data == snapshot( + exclude=props("created_at", "modified_at", "entry_id", "versions") + ) async def test_diagnostics_for_device( @@ -106,19 +84,41 @@ async def test_diagnostics_for_device( hass_client: ClientSessionGenerator, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, - zha_device_joined, - zigpy_device, + setup_zha, + zigpy_device_mock, + snapshot: SnapshotAssertion, ) -> None: """Test diagnostics for device.""" - zha_device: ZHADevice = await zha_device_joined(zigpy_device) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id, security.IasZone.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) # add unknown unsupported attribute with id and name - zha_device.device.endpoints[1].in_clusters[ + zha_device_proxy.device.device.endpoints[1].in_clusters[ security.IasAce.cluster_id ].unsupported_attributes.update({0x1000, "unknown_attribute_name"}) # add known unsupported attributes with id and name - zha_device.device.endpoints[1].in_clusters[ + zha_device_proxy.device.device.endpoints[1].in_clusters[ security.IasZone.cluster_id ].unsupported_attributes.update( { @@ -128,17 +128,11 @@ async def test_diagnostics_for_device( ) device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} + identifiers={("zha", str(zha_device_proxy.device.ieee))} ) assert device diagnostics_data = await get_diagnostics_for_device( hass, hass_client, config_entry, device ) - assert diagnostics_data - device_info: dict = zha_device.zha_device_info - for key in device_info: - assert key in diagnostics_data - if key not in KEYS_TO_REDACT: - assert key in diagnostics_data - else: - assert diagnostics_data[key] == REDACTED + + assert diagnostics_data == snapshot(exclude=props("device_reg_id", "last_seen")) diff --git a/tests/components/zha/test_discover.py b/tests/components/zha/test_discover.py deleted file mode 100644 index c59acc3395f..00000000000 --- a/tests/components/zha/test_discover.py +++ /dev/null @@ -1,1100 +0,0 @@ -"""Test ZHA device discovery.""" - -from collections.abc import Callable -import enum -import itertools -import re -from typing import Any -from unittest import mock -from unittest.mock import AsyncMock, Mock, patch - -import pytest -from zhaquirks.ikea import PowerConfig1CRCluster, ScenesCluster -from zhaquirks.xiaomi import ( - BasicCluster, - LocalIlluminanceMeasurementCluster, - XiaomiPowerConfigurationPercent, -) -from zhaquirks.xiaomi.aqara.driver_curtain_e1 import ( - WindowCoveringE1, - XiaomiAqaraDriverE1, -) -from zigpy.const import SIG_ENDPOINTS, SIG_MANUFACTURER, SIG_MODEL, SIG_NODE_DESC -import zigpy.profiles.zha -import zigpy.quirks -from zigpy.quirks.v2 import ( - BinarySensorMetadata, - EntityMetadata, - EntityType, - NumberMetadata, - QuirksV2RegistryEntry, - ZCLCommandButtonMetadata, - ZCLSensorMetadata, - add_to_registry_v2, -) -from zigpy.quirks.v2.homeassistant import UnitOfTime -import zigpy.types -from zigpy.zcl import ClusterType -import zigpy.zcl.clusters.closures -import zigpy.zcl.clusters.general -import zigpy.zcl.clusters.security -import zigpy.zcl.foundation as zcl_f - -from homeassistant.components.zha.core import cluster_handlers -import homeassistant.components.zha.core.const as zha_const -from homeassistant.components.zha.core.device import ZHADevice -import homeassistant.components.zha.core.discovery as disc -from homeassistant.components.zha.core.endpoint import Endpoint -from homeassistant.components.zha.core.helpers import get_zha_gateway -import homeassistant.components.zha.core.registries as zha_regs -from homeassistant.const import STATE_OFF, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_platform import EntityPlatform -from homeassistant.util.json import load_json - -from .common import find_entity_id, update_attribute_cache -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from .zha_devices_list import ( - DEV_SIG_ATTRIBUTES, - DEV_SIG_CLUSTER_HANDLERS, - DEV_SIG_ENT_MAP, - DEV_SIG_ENT_MAP_CLASS, - DEV_SIG_ENT_MAP_ID, - DEV_SIG_EVT_CLUSTER_HANDLERS, - DEVICES, -) - -NO_TAIL_ID = re.compile("_\\d$") -UNIQUE_ID_HD = re.compile(r"^(([\da-fA-F]{2}:){7}[\da-fA-F]{2}-\d{1,3})", re.X) - -IGNORE_SUFFIXES = [ - zigpy.zcl.clusters.general.OnOff.StartUpOnOff.__name__, - "on_off_transition_time", - "on_level", - "on_transition_time", - "off_transition_time", - "default_move_rate", - "start_up_current_level", - "counter", -] - - -def contains_ignored_suffix(unique_id: str) -> bool: - """Return true if the unique_id ends with an ignored suffix.""" - return any(suffix.lower() in unique_id.lower() for suffix in IGNORE_SUFFIXES) - - -@patch( - "zigpy.zcl.clusters.general.Identify.request", - new=AsyncMock(return_value=[mock.sentinel.data, zcl_f.Status.SUCCESS]), -) -# We do this here because we are testing ZHA discovery logic. Point being we want to ensure that -# all discovered entities are dispatched for creation. In order to test this we need the entities -# added to HA. So we ensure that they are all enabled even though they won't necessarily be in reality -# at runtime -@patch( - "homeassistant.components.zha.entity.ZhaEntity.entity_registry_enabled_default", - new=Mock(return_value=True), -) -@pytest.mark.parametrize("device", DEVICES) -async def test_devices( - device, - hass_disable_services, - zigpy_device_mock, - zha_device_joined_restored, -) -> None: - """Test device discovery.""" - zigpy_device = zigpy_device_mock( - endpoints=device[SIG_ENDPOINTS], - ieee="00:11:22:33:44:55:66:77", - manufacturer=device[SIG_MANUFACTURER], - model=device[SIG_MODEL], - node_descriptor=device[SIG_NODE_DESC], - attributes=device.get(DEV_SIG_ATTRIBUTES), - patch_cluster=False, - ) - - cluster_identify = _get_first_identify_cluster(zigpy_device) - if cluster_identify: - cluster_identify.request.reset_mock() - - with patch( - "homeassistant.helpers.entity_platform.EntityPlatform._async_schedule_add_entities_for_entry", - side_effect=EntityPlatform._async_schedule_add_entities_for_entry, - autospec=True, - ) as mock_add_entities: - zha_dev = await zha_device_joined_restored(zigpy_device) - await hass_disable_services.async_block_till_done() - - if cluster_identify: - # We only identify on join - should_identify = ( - zha_device_joined_restored.name == "zha_device_joined" - and not zigpy_device.skip_configuration - ) - - if should_identify: - assert cluster_identify.request.mock_calls == [ - mock.call( - False, - cluster_identify.commands_by_name["trigger_effect"].id, - cluster_identify.commands_by_name["trigger_effect"].schema, - effect_id=zigpy.zcl.clusters.general.Identify.EffectIdentifier.Okay, - effect_variant=( - zigpy.zcl.clusters.general.Identify.EffectVariant.Default - ), - expect_reply=True, - manufacturer=None, - tsn=None, - ) - ] - else: - assert cluster_identify.request.mock_calls == [] - - event_cluster_handlers = { - ch.id - for endpoint in zha_dev._endpoints.values() - for ch in endpoint.client_cluster_handlers.values() - } - assert event_cluster_handlers == set(device[DEV_SIG_EVT_CLUSTER_HANDLERS]) - - # Keep track of unhandled entities: they should always be ones we explicitly ignore - created_entities = { - entity.entity_id: entity - for mock_call in mock_add_entities.mock_calls - for entity in mock_call.args[1] - } - unhandled_entities = set(created_entities.keys()) - entity_registry = er.async_get(hass_disable_services) - - for (platform, unique_id), ent_info in device[DEV_SIG_ENT_MAP].items(): - no_tail_id = NO_TAIL_ID.sub("", ent_info[DEV_SIG_ENT_MAP_ID]) - ha_entity_id = entity_registry.async_get_entity_id(platform, "zha", unique_id) - message1 = f"No entity found for platform[{platform}] unique_id[{unique_id}]" - message2 = f"no_tail_id[{no_tail_id}] with entity_id[{ha_entity_id}]" - assert ha_entity_id is not None, f"{message1} {message2}" - assert ha_entity_id.startswith(no_tail_id) - - entity = created_entities[ha_entity_id] - unhandled_entities.remove(ha_entity_id) - - assert entity.platform.domain == platform - assert type(entity).__name__ == ent_info[DEV_SIG_ENT_MAP_CLASS] - # unique_id used for discover is the same for "multi entities" - assert unique_id == entity.unique_id - assert {ch.name for ch in entity.cluster_handlers.values()} == set( - ent_info[DEV_SIG_CLUSTER_HANDLERS] - ) - - # All unhandled entities should be ones we explicitly ignore - for entity_id in unhandled_entities: - domain = entity_id.split(".")[0] - assert domain in zha_const.PLATFORMS - assert contains_ignored_suffix(entity_id) - - -def _get_first_identify_cluster(zigpy_device): - for endpoint in list(zigpy_device.endpoints.values())[1:]: - if hasattr(endpoint, "identify"): - return endpoint.identify - - -@mock.patch( - "homeassistant.components.zha.core.discovery.ProbeEndpoint.discover_by_device_type" -) -@mock.patch( - "homeassistant.components.zha.core.discovery.ProbeEndpoint.discover_by_cluster_id" -) -def test_discover_entities(m1, m2) -> None: - """Test discover endpoint class method.""" - endpoint = mock.MagicMock() - disc.PROBE.discover_entities(endpoint) - assert m1.call_count == 1 - assert m1.call_args[0][0] is endpoint - assert m2.call_count == 1 - assert m2.call_args[0][0] is endpoint - - -@pytest.mark.parametrize( - ("device_type", "platform", "hit"), - [ - (zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT, Platform.LIGHT, True), - (zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST, Platform.SWITCH, True), - (zigpy.profiles.zha.DeviceType.SMART_PLUG, Platform.SWITCH, True), - (0xFFFF, None, False), - ], -) -def test_discover_by_device_type(device_type, platform, hit) -> None: - """Test entity discovery by device type.""" - - endpoint = mock.MagicMock(spec_set=Endpoint) - ep_mock = mock.PropertyMock() - ep_mock.return_value.profile_id = 0x0104 - ep_mock.return_value.device_type = device_type - type(endpoint).zigpy_endpoint = ep_mock - - get_entity_mock = mock.MagicMock( - return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) - ) - with mock.patch( - "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", - get_entity_mock, - ): - disc.PROBE.discover_by_device_type(endpoint) - if hit: - assert get_entity_mock.call_count == 1 - assert endpoint.claim_cluster_handlers.call_count == 1 - assert endpoint.claim_cluster_handlers.call_args[0][0] is mock.sentinel.claimed - assert endpoint.async_new_entity.call_count == 1 - assert endpoint.async_new_entity.call_args[0][0] == platform - assert endpoint.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls - - -def test_discover_by_device_type_override() -> None: - """Test entity discovery by device type overriding.""" - - endpoint = mock.MagicMock(spec_set=Endpoint) - ep_mock = mock.PropertyMock() - ep_mock.return_value.profile_id = 0x0104 - ep_mock.return_value.device_type = 0x0100 - type(endpoint).zigpy_endpoint = ep_mock - - overrides = {endpoint.unique_id: {"type": Platform.SWITCH}} - get_entity_mock = mock.MagicMock( - return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) - ) - with ( - mock.patch( - "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", - get_entity_mock, - ), - mock.patch.dict(disc.PROBE._device_configs, overrides, clear=True), - ): - disc.PROBE.discover_by_device_type(endpoint) - assert get_entity_mock.call_count == 1 - assert endpoint.claim_cluster_handlers.call_count == 1 - assert endpoint.claim_cluster_handlers.call_args[0][0] is mock.sentinel.claimed - assert endpoint.async_new_entity.call_count == 1 - assert endpoint.async_new_entity.call_args[0][0] == Platform.SWITCH - assert endpoint.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls - - -def test_discover_probe_single_cluster() -> None: - """Test entity discovery by single cluster.""" - - endpoint = mock.MagicMock(spec_set=Endpoint) - ep_mock = mock.PropertyMock() - ep_mock.return_value.profile_id = 0x0104 - ep_mock.return_value.device_type = 0x0100 - type(endpoint).zigpy_endpoint = ep_mock - - get_entity_mock = mock.MagicMock( - return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) - ) - cluster_handler_mock = mock.MagicMock(spec_set=cluster_handlers.ClusterHandler) - with mock.patch( - "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", - get_entity_mock, - ): - disc.PROBE.probe_single_cluster(Platform.SWITCH, cluster_handler_mock, endpoint) - - assert get_entity_mock.call_count == 1 - assert endpoint.claim_cluster_handlers.call_count == 1 - assert endpoint.claim_cluster_handlers.call_args[0][0] is mock.sentinel.claimed - assert endpoint.async_new_entity.call_count == 1 - assert endpoint.async_new_entity.call_args[0][0] == Platform.SWITCH - assert endpoint.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls - assert endpoint.async_new_entity.call_args[0][3] == mock.sentinel.claimed - - -@pytest.mark.parametrize("device_info", DEVICES) -async def test_discover_endpoint( - device_info: dict[str, Any], - zha_device_mock: Callable[..., ZHADevice], - hass: HomeAssistant, -) -> None: - """Test device discovery.""" - - with mock.patch( - "homeassistant.components.zha.core.endpoint.Endpoint.async_new_entity" - ) as new_ent: - device = zha_device_mock( - device_info[SIG_ENDPOINTS], - manufacturer=device_info[SIG_MANUFACTURER], - model=device_info[SIG_MODEL], - node_desc=device_info[SIG_NODE_DESC], - patch_cluster=True, - ) - - assert device_info[DEV_SIG_EVT_CLUSTER_HANDLERS] == sorted( - ch.id - for endpoint in device._endpoints.values() - for ch in endpoint.client_cluster_handlers.values() - ) - - # build a dict of entity_class -> (platform, unique_id, cluster_handlers) tuple - ha_ent_info = {} - for call in new_ent.call_args_list: - platform, entity_cls, unique_id, cluster_handlers = call[0] - if not contains_ignored_suffix(unique_id): - unique_id_head = UNIQUE_ID_HD.match(unique_id).group( - 0 - ) # ieee + endpoint_id - ha_ent_info[(unique_id_head, entity_cls.__name__)] = ( - platform, - unique_id, - cluster_handlers, - ) - - for platform_id, ent_info in device_info[DEV_SIG_ENT_MAP].items(): - platform, unique_id = platform_id - - test_ent_class = ent_info[DEV_SIG_ENT_MAP_CLASS] - test_unique_id_head = UNIQUE_ID_HD.match(unique_id).group(0) - assert (test_unique_id_head, test_ent_class) in ha_ent_info - - entity_platform, entity_unique_id, entity_cluster_handlers = ha_ent_info[ - (test_unique_id_head, test_ent_class) - ] - assert platform is entity_platform.value - # unique_id used for discover is the same for "multi entities" - assert unique_id.startswith(entity_unique_id) - assert {ch.name for ch in entity_cluster_handlers} == set( - ent_info[DEV_SIG_CLUSTER_HANDLERS] - ) - - device.async_cleanup_handles() - - -def _ch_mock(cluster): - """Return mock of a cluster_handler with a cluster.""" - cluster_handler = mock.MagicMock() - type(cluster_handler).cluster = mock.PropertyMock( - return_value=cluster(mock.MagicMock()) - ) - return cluster_handler - - -@mock.patch( - ( - "homeassistant.components.zha.core.discovery.ProbeEndpoint" - ".handle_on_off_output_cluster_exception" - ), - new=mock.MagicMock(), -) -@mock.patch( - "homeassistant.components.zha.core.discovery.ProbeEndpoint.probe_single_cluster" -) -def _test_single_input_cluster_device_class(probe_mock): - """Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class.""" - - door_ch = _ch_mock(zigpy.zcl.clusters.closures.DoorLock) - cover_ch = _ch_mock(zigpy.zcl.clusters.closures.WindowCovering) - multistate_ch = _ch_mock(zigpy.zcl.clusters.general.MultistateInput) - - class QuirkedIAS(zigpy.quirks.CustomCluster, zigpy.zcl.clusters.security.IasZone): - pass - - ias_ch = _ch_mock(QuirkedIAS) - - class _Analog(zigpy.quirks.CustomCluster, zigpy.zcl.clusters.general.AnalogInput): - pass - - analog_ch = _ch_mock(_Analog) - - endpoint = mock.MagicMock(spec_set=Endpoint) - endpoint.unclaimed_cluster_handlers.return_value = [ - door_ch, - cover_ch, - multistate_ch, - ias_ch, - ] - - disc.ProbeEndpoint().discover_by_cluster_id(endpoint) - assert probe_mock.call_count == len(endpoint.unclaimed_cluster_handlers()) - probes = ( - (Platform.LOCK, door_ch), - (Platform.COVER, cover_ch), - (Platform.SENSOR, multistate_ch), - (Platform.BINARY_SENSOR, ias_ch), - (Platform.SENSOR, analog_ch), - ) - for call, details in zip(probe_mock.call_args_list, probes, strict=False): - platform, ch = details - assert call[0][0] == platform - assert call[0][1] == ch - - -def test_single_input_cluster_device_class_by_cluster_class() -> None: - """Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class.""" - mock_reg = { - zigpy.zcl.clusters.closures.DoorLock.cluster_id: Platform.LOCK, - zigpy.zcl.clusters.closures.WindowCovering.cluster_id: Platform.COVER, - zigpy.zcl.clusters.general.AnalogInput: Platform.SENSOR, - zigpy.zcl.clusters.general.MultistateInput: Platform.SENSOR, - zigpy.zcl.clusters.security.IasZone: Platform.BINARY_SENSOR, - } - - with mock.patch.dict( - zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS, mock_reg, clear=True - ): - _test_single_input_cluster_device_class() - - -@pytest.mark.parametrize( - ("override", "entity_id"), - [ - (None, "light.manufacturer_model_light"), - ("switch", "switch.manufacturer_model_switch"), - ], -) -async def test_device_override( - hass_disable_services, zigpy_device_mock, setup_zha, override, entity_id -) -> None: - """Test device discovery override.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.COLOR_DIMMABLE_LIGHT, - "endpoint_id": 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - } - }, - "00:11:22:33:44:55:66:77", - "manufacturer", - "model", - patch_cluster=False, - ) - - if override is not None: - override = {"device_config": {"00:11:22:33:44:55:66:77-1": {"type": override}}} - - await setup_zha(override) - assert hass_disable_services.states.get(entity_id) is None - zha_gateway = get_zha_gateway(hass_disable_services) - await zha_gateway.async_device_initialized(zigpy_device) - await hass_disable_services.async_block_till_done() - assert hass_disable_services.states.get(entity_id) is not None - - -async def test_group_probe_cleanup_called( - hass_disable_services, setup_zha, config_entry -) -> None: - """Test cleanup happens when ZHA is unloaded.""" - await setup_zha() - disc.GROUP_PROBE.cleanup = mock.Mock(wraps=disc.GROUP_PROBE.cleanup) - await hass_disable_services.config_entries.async_unload(config_entry.entry_id) - await hass_disable_services.async_block_till_done() - disc.GROUP_PROBE.cleanup.assert_called() - - -async def test_quirks_v2_entity_discovery( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, -) -> None: - """Test quirks v2 discovery.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, - zigpy.zcl.clusters.general.Groups.cluster_id, - zigpy.zcl.clusters.general.OnOff.cluster_id, - ], - SIG_EP_OUTPUT: [ - zigpy.zcl.clusters.general.Scenes.cluster_id, - ], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER, - } - }, - ieee="01:2d:6f:00:0a:90:69:e8", - manufacturer="Ikea of Sweden", - model="TRADFRI remote control", - ) - - ( - add_to_registry_v2( - "Ikea of Sweden", "TRADFRI remote control", zigpy.quirks._DEVICE_REGISTRY - ) - .replaces(PowerConfig1CRCluster) - .replaces(ScenesCluster, cluster_type=ClusterType.Client) - .number( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - min_value=1, - max_value=100, - step=1, - unit=UnitOfTime.SECONDS, - multiplier=1, - translation_key="on_off_transition_time", - ) - ) - - zigpy_device = zigpy.quirks._DEVICE_REGISTRY.get_device(zigpy_device) - zigpy_device.endpoints[1].power.PLUGGED_ATTR_READS = { - "battery_voltage": 3, - "battery_percentage_remaining": 100, - } - update_attribute_cache(zigpy_device.endpoints[1].power) - zigpy_device.endpoints[1].on_off.PLUGGED_ATTR_READS = { - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name: 3, - } - update_attribute_cache(zigpy_device.endpoints[1].on_off) - - zha_device = await zha_device_joined(zigpy_device) - - entity_id = find_entity_id( - Platform.NUMBER, - zha_device, - hass, - ) - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state is not None - - -async def test_quirks_v2_entity_discovery_e1_curtain( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, -) -> None: - """Test quirks v2 discovery for e1 curtain motor.""" - aqara_E1_device = zigpy_device_mock( - { - 1: { - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_DEVICE, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, - zigpy.zcl.clusters.general.Identify.cluster_id, - zigpy.zcl.clusters.general.Time.cluster_id, - WindowCoveringE1.cluster_id, - XiaomiAqaraDriverE1.cluster_id, - ], - SIG_EP_OUTPUT: [ - zigpy.zcl.clusters.general.Identify.cluster_id, - zigpy.zcl.clusters.general.Time.cluster_id, - zigpy.zcl.clusters.general.Ota.cluster_id, - XiaomiAqaraDriverE1.cluster_id, - ], - } - }, - ieee="01:2d:6f:00:0a:90:69:e8", - manufacturer="LUMI", - model="lumi.curtain.agl006", - ) - - class AqaraE1HookState(zigpy.types.enum8): - """Aqara hook state.""" - - Unlocked = 0x00 - Locked = 0x01 - Locking = 0x02 - Unlocking = 0x03 - - class FakeXiaomiAqaraDriverE1(XiaomiAqaraDriverE1): - """Fake XiaomiAqaraDriverE1 cluster.""" - - attributes = XiaomiAqaraDriverE1.attributes.copy() - attributes.update( - { - 0x9999: ("error_detected", zigpy.types.Bool, True), - } - ) - - ( - add_to_registry_v2("LUMI", "lumi.curtain.agl006") - .adds(LocalIlluminanceMeasurementCluster) - .replaces(BasicCluster) - .replaces(XiaomiPowerConfigurationPercent) - .replaces(WindowCoveringE1) - .replaces(FakeXiaomiAqaraDriverE1) - .removes(FakeXiaomiAqaraDriverE1, cluster_type=ClusterType.Client) - .enum( - BasicCluster.AttributeDefs.power_source.name, - BasicCluster.PowerSource, - BasicCluster.cluster_id, - entity_platform=Platform.SENSOR, - entity_type=EntityType.DIAGNOSTIC, - ) - .enum( - "hooks_state", - AqaraE1HookState, - FakeXiaomiAqaraDriverE1.cluster_id, - entity_platform=Platform.SENSOR, - entity_type=EntityType.DIAGNOSTIC, - ) - .binary_sensor( - "error_detected", - FakeXiaomiAqaraDriverE1.cluster_id, - translation_key="valve_alarm", - ) - ) - - aqara_E1_device = zigpy.quirks._DEVICE_REGISTRY.get_device(aqara_E1_device) - - aqara_E1_device.endpoints[1].opple_cluster.PLUGGED_ATTR_READS = { - "hand_open": 0, - "positions_stored": 0, - "hooks_lock": 0, - "hooks_state": AqaraE1HookState.Unlocked, - "light_level": 0, - "error_detected": 0, - } - update_attribute_cache(aqara_E1_device.endpoints[1].opple_cluster) - - aqara_E1_device.endpoints[1].basic.PLUGGED_ATTR_READS = { - BasicCluster.AttributeDefs.power_source.name: BasicCluster.PowerSource.Mains_single_phase, - } - update_attribute_cache(aqara_E1_device.endpoints[1].basic) - - WCAttrs = zigpy.zcl.clusters.closures.WindowCovering.AttributeDefs - WCT = zigpy.zcl.clusters.closures.WindowCovering.WindowCoveringType - WCCS = zigpy.zcl.clusters.closures.WindowCovering.ConfigStatus - aqara_E1_device.endpoints[1].window_covering.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 0, - WCAttrs.window_covering_type.name: WCT.Drapery, - WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), - } - update_attribute_cache(aqara_E1_device.endpoints[1].window_covering) - - zha_device = await zha_device_joined(aqara_E1_device) - - power_source_entity_id = find_entity_id( - Platform.SENSOR, - zha_device, - hass, - qualifier=BasicCluster.AttributeDefs.power_source.name, - ) - assert power_source_entity_id is not None - state = hass.states.get(power_source_entity_id) - assert state is not None - assert state.state == BasicCluster.PowerSource.Mains_single_phase.name - - hook_state_entity_id = find_entity_id( - Platform.SENSOR, - zha_device, - hass, - qualifier="hooks_state", - ) - assert hook_state_entity_id is not None - state = hass.states.get(hook_state_entity_id) - assert state is not None - assert state.state == AqaraE1HookState.Unlocked.name - - error_detected_entity_id = find_entity_id( - Platform.BINARY_SENSOR, - zha_device, - hass, - ) - assert error_detected_entity_id is not None - state = hass.states.get(error_detected_entity_id) - assert state is not None - assert state.state == STATE_OFF - - -def _get_test_device( - zigpy_device_mock, - manufacturer: str, - model: str, - augment_method: Callable[[QuirksV2RegistryEntry], QuirksV2RegistryEntry] - | None = None, -): - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, - zigpy.zcl.clusters.general.Groups.cluster_id, - zigpy.zcl.clusters.general.OnOff.cluster_id, - ], - SIG_EP_OUTPUT: [ - zigpy.zcl.clusters.general.Scenes.cluster_id, - ], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER, - } - }, - ieee="01:2d:6f:00:0a:90:69:e8", - manufacturer=manufacturer, - model=model, - ) - - v2_quirk = ( - add_to_registry_v2(manufacturer, model, zigpy.quirks._DEVICE_REGISTRY) - .replaces(PowerConfig1CRCluster) - .replaces(ScenesCluster, cluster_type=ClusterType.Client) - .number( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - endpoint_id=3, - min_value=1, - max_value=100, - step=1, - unit=UnitOfTime.SECONDS, - multiplier=1, - translation_key="on_off_transition_time", - ) - .number( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.Time.cluster_id, - min_value=1, - max_value=100, - step=1, - unit=UnitOfTime.SECONDS, - multiplier=1, - translation_key="on_off_transition_time", - ) - .sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - entity_type=EntityType.CONFIG, - translation_key="analog_input", - ) - ) - - if augment_method: - v2_quirk = augment_method(v2_quirk) - - zigpy_device = zigpy.quirks._DEVICE_REGISTRY.get_device(zigpy_device) - zigpy_device.endpoints[1].power.PLUGGED_ATTR_READS = { - "battery_voltage": 3, - "battery_percentage_remaining": 100, - } - update_attribute_cache(zigpy_device.endpoints[1].power) - zigpy_device.endpoints[1].on_off.PLUGGED_ATTR_READS = { - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name: 3, - } - update_attribute_cache(zigpy_device.endpoints[1].on_off) - return zigpy_device - - -async def test_quirks_v2_entity_no_metadata( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test quirks v2 discovery skipped - no metadata.""" - - zigpy_device = _get_test_device( - zigpy_device_mock, "Ikea of Sweden2", "TRADFRI remote control2" - ) - setattr(zigpy_device, "_exposes_metadata", {}) - zha_device = await zha_device_joined(zigpy_device) - assert ( - f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not expose any quirks v2 entities" - in caplog.text - ) - - -async def test_quirks_v2_entity_discovery_errors( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test quirks v2 discovery skipped - errors.""" - - zigpy_device = _get_test_device( - zigpy_device_mock, "Ikea of Sweden3", "TRADFRI remote control3" - ) - zha_device = await zha_device_joined(zigpy_device) - - m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not have an" - m2 = " endpoint with id: 3 - unable to create entity with cluster" - m3 = " details: (3, 6, )" - assert f"{m1}{m2}{m3}" in caplog.text - - time_cluster_id = zigpy.zcl.clusters.general.Time.cluster_id - - m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not have a" - m2 = f" cluster with id: {time_cluster_id} - unable to create entity with " - m3 = f"cluster details: (1, {time_cluster_id}, )" - assert f"{m1}{m2}{m3}" in caplog.text - - # fmt: off - entity_details = ( - "{'cluster_details': (1, 6, ), 'entity_metadata': " - "ZCLSensorMetadata(entity_platform=, " - "entity_type=, cluster_id=6, endpoint_id=1, " - "cluster_type=, initially_disabled=False, " - "attribute_initialized_from_cache=True, translation_key='analog_input', " - "attribute_name='off_wait_time', divisor=1, multiplier=1, " - "unit=None, device_class=None, state_class=None)}" - ) - # fmt: on - - m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} has an entity with " - m2 = f"details: {entity_details} that does not have an entity class mapping - " - m3 = "unable to create entity" - assert f"{m1}{m2}{m3}" in caplog.text - - -DEVICE_CLASS_TYPES = [NumberMetadata, BinarySensorMetadata, ZCLSensorMetadata] - - -def validate_device_class_unit( - quirk: QuirksV2RegistryEntry, - entity_metadata: EntityMetadata, - platform: Platform, - translations: dict, -) -> None: - """Ensure device class and unit are used correctly.""" - if ( - hasattr(entity_metadata, "unit") - and entity_metadata.unit is not None - and hasattr(entity_metadata, "device_class") - and entity_metadata.device_class is not None - ): - m1 = "device_class and unit are both set - unit: " - m2 = f"{entity_metadata.unit} device_class: " - m3 = f"{entity_metadata.device_class} for {platform.name} " - raise ValueError(f"{m1}{m2}{m3}{quirk}") - - -def validate_translation_keys( - quirk: QuirksV2RegistryEntry, - entity_metadata: EntityMetadata, - platform: Platform, - translations: dict, -) -> None: - """Ensure translation keys exist for all v2 quirks.""" - if isinstance(entity_metadata, ZCLCommandButtonMetadata): - default_translation_key = entity_metadata.command_name - else: - default_translation_key = entity_metadata.attribute_name - translation_key = entity_metadata.translation_key or default_translation_key - - if ( - translation_key is not None - and translation_key not in translations["entity"][platform] - ): - raise ValueError( - f"Missing translation key: {translation_key} for {platform.name} {quirk}" - ) - - -def validate_translation_keys_device_class( - quirk: QuirksV2RegistryEntry, - entity_metadata: EntityMetadata, - platform: Platform, - translations: dict, -) -> None: - """Validate translation keys and device class usage.""" - if isinstance(entity_metadata, ZCLCommandButtonMetadata): - default_translation_key = entity_metadata.command_name - else: - default_translation_key = entity_metadata.attribute_name - translation_key = entity_metadata.translation_key or default_translation_key - - metadata_type = type(entity_metadata) - if metadata_type in DEVICE_CLASS_TYPES: - device_class = entity_metadata.device_class - if device_class is not None and translation_key is not None: - m1 = "translation_key and device_class are both set - translation_key: " - m2 = f"{translation_key} device_class: {device_class} for {platform.name} " - raise ValueError(f"{m1}{m2}{quirk}") - - -def validate_metadata(validator: Callable) -> None: - """Ensure v2 quirks metadata does not violate HA rules.""" - all_v2_quirks = itertools.chain.from_iterable( - zigpy.quirks._DEVICE_REGISTRY._registry_v2.values() - ) - translations = load_json("homeassistant/components/zha/strings.json") - for quirk in all_v2_quirks: - for entity_metadata in quirk.entity_metadata: - platform = Platform(entity_metadata.entity_platform.value) - validator(quirk, entity_metadata, platform, translations) - - -def bad_translation_key(v2_quirk: QuirksV2RegistryEntry) -> QuirksV2RegistryEntry: - """Introduce a bad translation key.""" - return v2_quirk.sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - entity_type=EntityType.CONFIG, - translation_key="missing_translation_key", - ) - - -def bad_device_class_unit_combination( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class and unit combination.""" - return v2_quirk.sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - entity_type=EntityType.CONFIG, - unit="invalid", - device_class="invalid", - translation_key="analog_input", - ) - - -def bad_device_class_translation_key_usage( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class and translation key combination.""" - return v2_quirk.sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - entity_type=EntityType.CONFIG, - translation_key="invalid", - device_class="invalid", - ) - - -@pytest.mark.parametrize( - ("augment_method", "validate_method", "expected_exception_string"), - [ - ( - bad_translation_key, - validate_translation_keys, - "Missing translation key: missing_translation_key", - ), - ( - bad_device_class_unit_combination, - validate_device_class_unit, - "cannot have both unit and device_class", - ), - ( - bad_device_class_translation_key_usage, - validate_translation_keys_device_class, - "cannot have both a translation_key and a device_class", - ), - ], -) -async def test_quirks_v2_metadata_errors( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - augment_method: Callable[[QuirksV2RegistryEntry], QuirksV2RegistryEntry], - validate_method: Callable, - expected_exception_string: str, -) -> None: - """Ensure all v2 quirks translation keys exist.""" - - # no error yet - validate_metadata(validate_method) - - # ensure the error is caught and raised - try: - # introduce an error - zigpy_device = _get_test_device( - zigpy_device_mock, - "Ikea of Sweden4", - "TRADFRI remote control4", - augment_method=augment_method, - ) - await zha_device_joined(zigpy_device) - - validate_metadata(validate_method) - # if the device was created we remove it - # so we don't pollute the rest of the tests - zigpy.quirks._DEVICE_REGISTRY.remove(zigpy_device) - except ValueError: - # if the device was not created we remove it - # so we don't pollute the rest of the tests - zigpy.quirks._DEVICE_REGISTRY._registry_v2.pop( - ( - "Ikea of Sweden4", - "TRADFRI remote control4", - ) - ) - with pytest.raises(ValueError, match=expected_exception_string): - raise - - -class BadDeviceClass(enum.Enum): - """Bad device class.""" - - BAD = "bad" - - -def bad_binary_sensor_device_class( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class on a binary sensor.""" - - return v2_quirk.binary_sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.on_off.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - device_class=BadDeviceClass.BAD, - ) - - -def bad_sensor_device_class( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class on a sensor.""" - - return v2_quirk.sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - device_class=BadDeviceClass.BAD, - ) - - -def bad_number_device_class( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class on a number.""" - - return v2_quirk.number( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.on_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - device_class=BadDeviceClass.BAD, - ) - - -ERROR_ROOT = "Quirks provided an invalid device class" - - -@pytest.mark.parametrize( - ("augment_method", "expected_exception_string"), - [ - ( - bad_binary_sensor_device_class, - f"{ERROR_ROOT}: BadDeviceClass.BAD for platform binary_sensor", - ), - ( - bad_sensor_device_class, - f"{ERROR_ROOT}: BadDeviceClass.BAD for platform sensor", - ), - ( - bad_number_device_class, - f"{ERROR_ROOT}: BadDeviceClass.BAD for platform number", - ), - ], -) -async def test_quirks_v2_metadata_bad_device_classes( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - caplog: pytest.LogCaptureFixture, - augment_method: Callable[[QuirksV2RegistryEntry], QuirksV2RegistryEntry], - expected_exception_string: str, -) -> None: - """Test bad quirks v2 device classes.""" - - # introduce an error - zigpy_device = _get_test_device( - zigpy_device_mock, - "Ikea of Sweden4", - "TRADFRI remote control4", - augment_method=augment_method, - ) - await zha_device_joined(zigpy_device) - - assert expected_exception_string in caplog.text - - # remove the device so we don't pollute the rest of the tests - zigpy.quirks._DEVICE_REGISTRY.remove(zigpy_device) diff --git a/tests/components/zha/test_fan.py b/tests/components/zha/test_fan.py index 095f505876e..0105c569653 100644 --- a/tests/components/zha/test_fan.py +++ b/tests/components/zha/test_fan.py @@ -1,32 +1,25 @@ """Test ZHA fan.""" -from unittest.mock import AsyncMock, call, patch +from unittest.mock import call, patch import pytest -import zhaquirks.ikea.starkvind -from zigpy.device import Device -from zigpy.exceptions import ZigbeeException +from zha.application.platforms.fan.const import PRESET_MODE_ON from zigpy.profiles import zha from zigpy.zcl.clusters import general, hvac -import zigpy.zcl.foundation as zcl_f from homeassistant.components.fan import ( ATTR_PERCENTAGE, - ATTR_PERCENTAGE_STEP, ATTR_PRESET_MODE, DOMAIN as FAN_DOMAIN, SERVICE_SET_PERCENTAGE, SERVICE_SET_PRESET_MODE, NotValidPresetModeError, ) -from homeassistant.components.zha.core.device import ZHADevice -from homeassistant.components.zha.core.discovery import GROUP_PROBE -from homeassistant.components.zha.core.group import GroupMember -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.components.zha.fan import ( - PRESET_MODE_AUTO, - PRESET_MODE_ON, - PRESET_MODE_SMART, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -34,25 +27,15 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_OFF, STATE_ON, - STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.setup import async_setup_component -from .common import ( - async_enable_traffic, - async_find_group_entity_id, - async_test_rejoin, - async_wait_for_updates, - find_entity_id, - send_attributes_report, -) +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" -IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" +ON = 1 +OFF = 0 @pytest.fixture(autouse=True) @@ -75,122 +58,49 @@ def fan_platform_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Fan zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [hvac.Fan.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00" - ) - - -@pytest.fixture -async def coordinator(hass, zigpy_device_mock, zha_device_joined): +async def test_fan(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: """Test ZHA fan platform.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock( { 1: { - SIG_EP_INPUT: [general.Groups.cluster_id], + SIG_EP_INPUT: [general.Basic.cluster_id, hvac.Fan.cluster_id], SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, SIG_EP_PROFILE: zha.PROFILE_ID, } }, - ieee="00:15:8d:00:02:32:4f:32", - nwk=0x0000, - node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) -@pytest.fixture -async def device_fan_1(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA fan platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Groups.cluster_id, - general.OnOff.cluster_id, - hvac.Fan.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - }, - }, - ieee=IEEE_GROUPABLE_DEVICE, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zha_device - - -@pytest.fixture -async def device_fan_2(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA fan platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Groups.cluster_id, - general.OnOff.cluster_id, - hvac.Fan.cluster_id, - general.LevelControl.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - }, - }, - ieee=IEEE_GROUPABLE_DEVICE2, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zha_device - - -async def test_fan( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device -) -> None: - """Test ZHA fan platform.""" - - zha_device = await zha_device_joined_restored(zigpy_device) - cluster = zigpy_device.endpoints.get(1).fan - entity_id = find_entity_id(Platform.FAN, zha_device, hass) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.FAN, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].fan assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the fan was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on at fan - await send_attributes_report(hass, cluster, {1: 2, 0: 1, 2: 3}) + await send_attributes_report( + hass, + cluster, + {hvac.Fan.AttributeDefs.fan_mode.id: hvac.FanMode.Low}, + ) assert hass.states.get(entity_id).state == STATE_ON # turn off at fan - await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) + await send_attributes_report( + hass, cluster, {hvac.Fan.AttributeDefs.fan_mode.id: hvac.FanMode.Off} + ) assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA @@ -230,11 +140,8 @@ async def test_fan( assert exc.value.translation_key == "not_valid_preset_mode" assert len(cluster.write_attributes.mock_calls) == 0 - # test adding new fan to the network and HA - await async_test_rejoin(hass, zigpy_device, [cluster], (1,)) - -async def async_turn_on(hass, entity_id, percentage=None): +async def async_turn_on(hass: HomeAssistant, entity_id, percentage=None): """Turn fan on.""" data = { key: value @@ -245,14 +152,14 @@ async def async_turn_on(hass, entity_id, percentage=None): await hass.services.async_call(Platform.FAN, SERVICE_TURN_ON, data, blocking=True) -async def async_turn_off(hass, entity_id): +async def async_turn_off(hass: HomeAssistant, entity_id): """Turn fan off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(Platform.FAN, SERVICE_TURN_OFF, data, blocking=True) -async def async_set_percentage(hass, entity_id, percentage=None): +async def async_set_percentage(hass: HomeAssistant, entity_id, percentage=None): """Set percentage for specified fan.""" data = { key: value @@ -265,7 +172,7 @@ async def async_set_percentage(hass, entity_id, percentage=None): ) -async def async_set_preset_mode(hass, entity_id, preset_mode=None): +async def async_set_preset_mode(hass: HomeAssistant, entity_id, preset_mode=None): """Set preset_mode for specified fan.""" data = { key: value @@ -276,633 +183,3 @@ async def async_set_preset_mode(hass, entity_id, preset_mode=None): await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PRESET_MODE, data, blocking=True ) - - -@patch( - "zigpy.zcl.clusters.hvac.Fan.write_attributes", - new=AsyncMock(return_value=zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]), -) -@patch( - "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_zha_group_fan_entity( - hass: HomeAssistant, device_fan_1, device_fan_2, coordinator -) -> None: - """Test the fan entity for a ZHA group.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_fan_1._zha_gateway = zha_gateway - device_fan_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_fan_1.ieee, device_fan_2.ieee] - members = [GroupMember(device_fan_1.ieee, 1), GroupMember(device_fan_2.ieee, 1)] - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - entity_domains = GROUP_PROBE.determine_entity_domains(hass, zha_group) - assert len(entity_domains) == 2 - - assert Platform.LIGHT in entity_domains - assert Platform.FAN in entity_domains - - entity_id = async_find_group_entity_id(hass, Platform.FAN, zha_group) - assert hass.states.get(entity_id) is not None - - group_fan_cluster = zha_group.endpoint[hvac.Fan.cluster_id] - - dev1_fan_cluster = device_fan_1.device.endpoints[1].fan - dev2_fan_cluster = device_fan_2.device.endpoints[1].fan - - await async_enable_traffic(hass, [device_fan_1, device_fan_2], enabled=False) - await async_wait_for_updates(hass) - # test that the fans were created and that they are unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_fan_1, device_fan_2]) - await async_wait_for_updates(hass) - # test that the fan group entity was created and is off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - group_fan_cluster.write_attributes.reset_mock() - await async_turn_on(hass, entity_id) - await hass.async_block_till_done() - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 2} - - # turn off from HA - group_fan_cluster.write_attributes.reset_mock() - await async_turn_off(hass, entity_id) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 0} - - # change speed from HA - group_fan_cluster.write_attributes.reset_mock() - await async_set_percentage(hass, entity_id, percentage=100) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 3} - - # change preset mode from HA - group_fan_cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_ON) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 4} - - # change preset mode from HA - group_fan_cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 5} - - # change preset mode from HA - group_fan_cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_SMART) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 6} - - # test some of the group logic to make sure we key off states correctly - await send_attributes_report(hass, dev1_fan_cluster, {0: 0}) - await send_attributes_report(hass, dev2_fan_cluster, {0: 0}) - await hass.async_block_till_done() - - # test that group fan is off - assert hass.states.get(entity_id).state == STATE_OFF - - await send_attributes_report(hass, dev2_fan_cluster, {0: 2}) - await async_wait_for_updates(hass) - - # test that group fan is speed medium - assert hass.states.get(entity_id).state == STATE_ON - - await send_attributes_report(hass, dev2_fan_cluster, {0: 0}) - await async_wait_for_updates(hass) - - # test that group fan is now off - assert hass.states.get(entity_id).state == STATE_OFF - - -@patch( - "zigpy.zcl.clusters.hvac.Fan.write_attributes", - new=AsyncMock(side_effect=ZigbeeException), -) -@patch( - "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_zha_group_fan_entity_failure_state( - hass: HomeAssistant, - device_fan_1, - device_fan_2, - coordinator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the fan entity for a ZHA group when writing attributes generates an exception.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_fan_1._zha_gateway = zha_gateway - device_fan_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_fan_1.ieee, device_fan_2.ieee] - members = [GroupMember(device_fan_1.ieee, 1), GroupMember(device_fan_2.ieee, 1)] - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - entity_domains = GROUP_PROBE.determine_entity_domains(hass, zha_group) - assert len(entity_domains) == 2 - - assert Platform.LIGHT in entity_domains - assert Platform.FAN in entity_domains - - entity_id = async_find_group_entity_id(hass, Platform.FAN, zha_group) - assert hass.states.get(entity_id) is not None - - group_fan_cluster = zha_group.endpoint[hvac.Fan.cluster_id] - - await async_enable_traffic(hass, [device_fan_1, device_fan_2], enabled=False) - await async_wait_for_updates(hass) - # test that the fans were created and that they are unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_fan_1, device_fan_2]) - await async_wait_for_updates(hass) - # test that the fan group entity was created and is off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - group_fan_cluster.write_attributes.reset_mock() - - with pytest.raises(HomeAssistantError): - await async_turn_on(hass, entity_id) - - await hass.async_block_till_done() - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 2} - - -@pytest.mark.parametrize( - ("plug_read", "expected_state", "expected_percentage"), - [ - (None, STATE_OFF, None), - ({"fan_mode": 0}, STATE_OFF, 0), - ({"fan_mode": 1}, STATE_ON, 33), - ({"fan_mode": 2}, STATE_ON, 66), - ({"fan_mode": 3}, STATE_ON, 100), - ], -) -async def test_fan_init( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device, - plug_read, - expected_state, - expected_percentage, -) -> None: - """Test ZHA fan platform.""" - - cluster = zigpy_device.endpoints.get(1).fan - cluster.PLUGGED_ATTR_READS = plug_read - - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == expected_state - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == expected_percentage - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - - -async def test_fan_update_entity( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device, -) -> None: - """Test ZHA fan platform.""" - - cluster = zigpy_device.endpoints.get(1).fan - cluster.PLUGGED_ATTR_READS = {"fan_mode": 0} - - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 3 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 2 - else: - assert cluster.read_attributes.await_count == 4 - - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_OFF - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 3 - else: - assert cluster.read_attributes.await_count == 5 - - cluster.PLUGGED_ATTR_READS = {"fan_mode": 1} - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_ON - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 33 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 3 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 4 - else: - assert cluster.read_attributes.await_count == 6 - - -@pytest.fixture -def zigpy_device_ikea(zigpy_device_mock): - """Ikea fan zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.Groups.cluster_id, - general.Scenes.cluster_id, - 64637, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COMBINED_INTERFACE, - SIG_EP_PROFILE: zha.PROFILE_ID, - }, - } - return zigpy_device_mock( - endpoints, - manufacturer="IKEA of Sweden", - model="STARKVIND Air purifier", - quirk=zhaquirks.ikea.starkvind.IkeaSTARKVIND, - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", - ) - - -async def test_fan_ikea( - hass: HomeAssistant, - zha_device_joined_restored: ZHADevice, - zigpy_device_ikea: Device, -) -> None: - """Test ZHA fan Ikea platform.""" - zha_device = await zha_device_joined_restored(zigpy_device_ikea) - cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the fan was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on at fan - await send_attributes_report(hass, cluster, {6: 1}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn off at fan - await send_attributes_report(hass, cluster, {6: 0}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - cluster.write_attributes.reset_mock() - await async_turn_on(hass, entity_id) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 1}, manufacturer=None) - ] - - # turn off from HA - cluster.write_attributes.reset_mock() - await async_turn_off(hass, entity_id) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 0}, manufacturer=None) - ] - - # change speed from HA - cluster.write_attributes.reset_mock() - await async_set_percentage(hass, entity_id, percentage=100) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 10}, manufacturer=None) - ] - - # change preset_mode from HA - cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 1}, manufacturer=None) - ] - - # set invalid preset_mode from HA - cluster.write_attributes.reset_mock() - with pytest.raises(NotValidPresetModeError) as exc: - await async_set_preset_mode( - hass, entity_id, preset_mode="invalid does not exist" - ) - assert exc.value.translation_key == "not_valid_preset_mode" - assert len(cluster.write_attributes.mock_calls) == 0 - - # test adding new fan to the network and HA - await async_test_rejoin(hass, zigpy_device_ikea, [cluster], (9,)) - - -@pytest.mark.parametrize( - ( - "ikea_plug_read", - "ikea_expected_state", - "ikea_expected_percentage", - "ikea_preset_mode", - ), - [ - (None, STATE_OFF, None, None), - ({"fan_mode": 0}, STATE_OFF, 0, None), - ({"fan_mode": 1}, STATE_ON, 10, PRESET_MODE_AUTO), - ({"fan_mode": 10}, STATE_ON, 20, "Speed 1"), - ({"fan_mode": 15}, STATE_ON, 30, "Speed 1.5"), - ({"fan_mode": 20}, STATE_ON, 40, "Speed 2"), - ({"fan_mode": 25}, STATE_ON, 50, "Speed 2.5"), - ({"fan_mode": 30}, STATE_ON, 60, "Speed 3"), - ({"fan_mode": 35}, STATE_ON, 70, "Speed 3.5"), - ({"fan_mode": 40}, STATE_ON, 80, "Speed 4"), - ({"fan_mode": 45}, STATE_ON, 90, "Speed 4.5"), - ({"fan_mode": 50}, STATE_ON, 100, "Speed 5"), - ], -) -async def test_fan_ikea_init( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device_ikea, - ikea_plug_read, - ikea_expected_state, - ikea_expected_percentage, - ikea_preset_mode, -) -> None: - """Test ZHA fan platform.""" - cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier - cluster.PLUGGED_ATTR_READS = ikea_plug_read - - zha_device = await zha_device_joined_restored(zigpy_device_ikea) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == ikea_expected_state - assert ( - hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] - == ikea_expected_percentage - ) - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] == ikea_preset_mode - - -async def test_fan_ikea_update_entity( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device_ikea, -) -> None: - """Test ZHA fan platform.""" - cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier - cluster.PLUGGED_ATTR_READS = {"fan_mode": 0} - - zha_device = await zha_device_joined_restored(zigpy_device_ikea) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 10 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 3 - else: - assert cluster.read_attributes.await_count == 6 - - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_OFF - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 4 - else: - assert cluster.read_attributes.await_count == 7 - - cluster.PLUGGED_ATTR_READS = {"fan_mode": 1} - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_ON - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 10 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is PRESET_MODE_AUTO - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 10 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 5 - else: - assert cluster.read_attributes.await_count == 8 - - -@pytest.fixture -def zigpy_device_kof(zigpy_device_mock): - """Fan by King of Fans zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.Groups.cluster_id, - general.Scenes.cluster_id, - 64637, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COMBINED_INTERFACE, - SIG_EP_PROFILE: zha.PROFILE_ID, - }, - } - return zigpy_device_mock( - endpoints, - manufacturer="King Of Fans, Inc.", - model="HBUniversalCFRemote", - quirk=zhaquirks.kof.kof_mr101z.CeilingFan, - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", - ) - - -async def test_fan_kof( - hass: HomeAssistant, - zha_device_joined_restored: ZHADevice, - zigpy_device_kof: Device, -) -> None: - """Test ZHA fan platform for King of Fans.""" - zha_device = await zha_device_joined_restored(zigpy_device_kof) - cluster = zigpy_device_kof.endpoints.get(1).fan - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the fan was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on at fan - await send_attributes_report(hass, cluster, {1: 2, 0: 1, 2: 3}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn off at fan - await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - cluster.write_attributes.reset_mock() - await async_turn_on(hass, entity_id) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 2}, manufacturer=None) - ] - - # turn off from HA - cluster.write_attributes.reset_mock() - await async_turn_off(hass, entity_id) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 0}, manufacturer=None) - ] - - # change speed from HA - cluster.write_attributes.reset_mock() - await async_set_percentage(hass, entity_id, percentage=100) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 4}, manufacturer=None) - ] - - # change preset_mode from HA - cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_SMART) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 6}, manufacturer=None) - ] - - # set invalid preset_mode from HA - cluster.write_attributes.reset_mock() - with pytest.raises(NotValidPresetModeError) as exc: - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) - assert exc.value.translation_key == "not_valid_preset_mode" - assert len(cluster.write_attributes.mock_calls) == 0 - - # test adding new fan to the network and HA - await async_test_rejoin(hass, zigpy_device_kof, [cluster], (1,)) - - -@pytest.mark.parametrize( - ("plug_read", "expected_state", "expected_percentage", "expected_preset"), - [ - (None, STATE_OFF, None, None), - ({"fan_mode": 0}, STATE_OFF, 0, None), - ({"fan_mode": 1}, STATE_ON, 25, None), - ({"fan_mode": 2}, STATE_ON, 50, None), - ({"fan_mode": 3}, STATE_ON, 75, None), - ({"fan_mode": 4}, STATE_ON, 100, None), - ({"fan_mode": 6}, STATE_ON, None, PRESET_MODE_SMART), - ], -) -async def test_fan_kof_init( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device_kof, - plug_read, - expected_state, - expected_percentage, - expected_preset, -) -> None: - """Test ZHA fan platform for King of Fans.""" - - cluster = zigpy_device_kof.endpoints.get(1).fan - cluster.PLUGGED_ATTR_READS = plug_read - - zha_device = await zha_device_joined_restored(zigpy_device_kof) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == expected_state - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == expected_percentage - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] == expected_preset - - -async def test_fan_kof_update_entity( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device_kof, -) -> None: - """Test ZHA fan platform for King of Fans.""" - - cluster = zigpy_device_kof.endpoints.get(1).fan - cluster.PLUGGED_ATTR_READS = {"fan_mode": 0} - - zha_device = await zha_device_joined_restored(zigpy_device_kof) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 4 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 2 - else: - assert cluster.read_attributes.await_count == 4 - - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_OFF - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 3 - else: - assert cluster.read_attributes.await_count == 5 - - cluster.PLUGGED_ATTR_READS = {"fan_mode": 1} - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_ON - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 25 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 4 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 4 - else: - assert cluster.read_attributes.await_count == 6 diff --git a/tests/components/zha/test_gateway.py b/tests/components/zha/test_gateway.py deleted file mode 100644 index 3a576ed6e55..00000000000 --- a/tests/components/zha/test_gateway.py +++ /dev/null @@ -1,404 +0,0 @@ -"""Test ZHA Gateway.""" - -import asyncio -from unittest.mock import MagicMock, PropertyMock, patch - -import pytest -from zigpy.application import ControllerApplication -from zigpy.profiles import zha -import zigpy.types -from zigpy.zcl.clusters import general, lighting -import zigpy.zdo.types - -from homeassistant.components.zha.core.gateway import ZHAGateway -from homeassistant.components.zha.core.group import GroupMember -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from .common import async_find_group_entity_id -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE - -from tests.common import MockConfigEntry - -IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" -IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" - - -@pytest.fixture -def zigpy_dev_basic(zigpy_device_mock): - """Zigpy device with just a basic cluster.""" - return zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - ) - - -@pytest.fixture(autouse=True) -def required_platform_only(): - """Only set up the required and required base platforms to speed up tests.""" - with patch( - "homeassistant.components.zha.PLATFORMS", - ( - Platform.SENSOR, - Platform.LIGHT, - Platform.DEVICE_TRACKER, - Platform.NUMBER, - Platform.SELECT, - ), - ): - yield - - -@pytest.fixture -async def zha_dev_basic(hass, zha_device_restored, zigpy_dev_basic): - """ZHA device with just a basic cluster.""" - - return await zha_device_restored(zigpy_dev_basic) - - -@pytest.fixture -async def coordinator(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee="00:15:8d:00:02:32:4f:32", - nwk=0x0000, - node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_1(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_2(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE2, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -async def test_device_left(hass: HomeAssistant, zigpy_dev_basic, zha_dev_basic) -> None: - """Device leaving the network should become unavailable.""" - - assert zha_dev_basic.available is True - - get_zha_gateway(hass).device_left(zigpy_dev_basic) - await hass.async_block_till_done() - assert zha_dev_basic.available is False - - -async def test_gateway_group_methods( - hass: HomeAssistant, device_light_1, device_light_2, coordinator -) -> None: - """Test creating a group with 2 members.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - device_light_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] - members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)] - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - - entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) - assert hass.states.get(entity_id) is not None - - # test get group by name - assert zha_group == zha_gateway.async_get_group_by_name(zha_group.name) - - # test removing a group - await zha_gateway.async_remove_zigpy_group(zha_group.group_id) - await hass.async_block_till_done() - - # we shouldn't have the group anymore - assert zha_gateway.async_get_group_by_name(zha_group.name) is None - - # the group entity should be cleaned up - assert entity_id not in hass.states.async_entity_ids(Platform.LIGHT) - - # test creating a group with 1 member - zha_group = await zha_gateway.async_create_zigpy_group( - "Test Group", [GroupMember(device_light_1.ieee, 1)] - ) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 1 - for member in zha_group.members: - assert member.device.ieee in [device_light_1.ieee] - - # the group entity should not have been cleaned up - assert entity_id not in hass.states.async_entity_ids(Platform.LIGHT) - - with patch("zigpy.zcl.Cluster.request", side_effect=TimeoutError): - await zha_group.members[0].async_remove_from_group() - assert len(zha_group.members) == 1 - for member in zha_group.members: - assert member.device.ieee in [device_light_1.ieee] - - -async def test_gateway_create_group_with_id( - hass: HomeAssistant, device_light_1, coordinator -) -> None: - """Test creating a group with a specific ID.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - - zha_group = await zha_gateway.async_create_zigpy_group( - "Test Group", [GroupMember(device_light_1.ieee, 1)], group_id=0x1234 - ) - await hass.async_block_till_done() - - assert len(zha_group.members) == 1 - assert zha_group.members[0].device is device_light_1 - assert zha_group.group_id == 0x1234 - - -@patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_load_devices", - MagicMock(), -) -@patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_load_groups", - MagicMock(), -) -@pytest.mark.parametrize( - ("device_path", "thread_state", "config_override"), - [ - ("/dev/ttyUSB0", True, {}), - ("socket://192.168.1.123:9999", False, {}), - ("socket://192.168.1.123:9999", True, {"use_thread": True}), - ], -) -async def test_gateway_initialize_bellows_thread( - device_path: str, - thread_state: bool, - config_override: dict, - hass: HomeAssistant, - zigpy_app_controller: ControllerApplication, - config_entry: MockConfigEntry, -) -> None: - """Test ZHA disabling the UART thread when connecting to a TCP coordinator.""" - data = dict(config_entry.data) - data["device"]["path"] = device_path - config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry(config_entry, data=data) - - zha_gateway = ZHAGateway(hass, {"zigpy_config": config_override}, config_entry) - - with patch( - "bellows.zigbee.application.ControllerApplication.new", - return_value=zigpy_app_controller, - ) as mock_new: - await zha_gateway.async_initialize() - - assert mock_new.mock_calls[-1].kwargs["config"]["use_thread"] is thread_state - - await zha_gateway.shutdown() - - -@pytest.mark.parametrize( - ("device_path", "config_override", "expected_channel"), - [ - ("/dev/ttyUSB0", {}, None), - ("socket://192.168.1.123:9999", {}, None), - ("socket://192.168.1.123:9999", {"network": {"channel": 20}}, 20), - ("socket://core-silabs-multiprotocol:9999", {}, 15), - ("socket://core-silabs-multiprotocol:9999", {"network": {"channel": 20}}, 20), - ], -) -async def test_gateway_force_multi_pan_channel( - device_path: str, - config_override: dict, - expected_channel: int | None, - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test ZHA disabling the UART thread when connecting to a TCP coordinator.""" - data = dict(config_entry.data) - data["device"]["path"] = device_path - config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry(config_entry, data=data) - - zha_gateway = ZHAGateway(hass, {"zigpy_config": config_override}, config_entry) - - _, config = zha_gateway.get_application_controller_data() - assert config["network"]["channel"] == expected_channel - - -async def test_single_reload_on_multiple_connection_loss( - hass: HomeAssistant, - zigpy_app_controller: ControllerApplication, - config_entry: MockConfigEntry, -) -> None: - """Test that we only reload once when we lose the connection multiple times.""" - config_entry.add_to_hass(hass) - - zha_gateway = ZHAGateway(hass, {}, config_entry) - - with patch( - "bellows.zigbee.application.ControllerApplication.new", - return_value=zigpy_app_controller, - ): - await zha_gateway.async_initialize() - - with patch.object( - hass.config_entries, "async_reload", wraps=hass.config_entries.async_reload - ) as mock_reload: - zha_gateway.connection_lost(RuntimeError()) - zha_gateway.connection_lost(RuntimeError()) - zha_gateway.connection_lost(RuntimeError()) - zha_gateway.connection_lost(RuntimeError()) - zha_gateway.connection_lost(RuntimeError()) - - assert len(mock_reload.mock_calls) == 1 - - await hass.async_block_till_done() - - -@pytest.mark.parametrize("radio_concurrency", [1, 2, 8]) -async def test_startup_concurrency_limit( - radio_concurrency: int, - hass: HomeAssistant, - zigpy_app_controller: ControllerApplication, - config_entry: MockConfigEntry, - zigpy_device_mock, -) -> None: - """Test ZHA gateway limits concurrency on startup.""" - config_entry.add_to_hass(hass) - zha_gateway = ZHAGateway(hass, {}, config_entry) - - with patch( - "bellows.zigbee.application.ControllerApplication.new", - return_value=zigpy_app_controller, - ): - await zha_gateway.async_initialize() - - for i in range(50): - zigpy_dev = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=f"11:22:33:44:{i:08x}", - nwk=0x1234 + i, - ) - zigpy_dev.node_desc.mac_capability_flags |= ( - zigpy.zdo.types.NodeDescriptor.MACCapabilityFlags.MainsPowered - ) - - zha_gateway._async_get_or_create_device(zigpy_dev) - - # Keep track of request concurrency during initialization - current_concurrency = 0 - concurrencies = [] - - async def mock_send_packet(*args, **kwargs): - nonlocal current_concurrency - - current_concurrency += 1 - concurrencies.append(current_concurrency) - - await asyncio.sleep(0.001) - - current_concurrency -= 1 - concurrencies.append(current_concurrency) - - type(zha_gateway).radio_concurrency = PropertyMock(return_value=radio_concurrency) - assert zha_gateway.radio_concurrency == radio_concurrency - - with patch( - "homeassistant.components.zha.core.device.ZHADevice.async_initialize", - side_effect=mock_send_packet, - ): - await zha_gateway.async_fetch_updated_state_mains() - - await zha_gateway.shutdown() - - # Make sure concurrency was always limited - assert current_concurrency == 0 - assert min(concurrencies) == 0 - - if radio_concurrency > 1: - assert 1 <= max(concurrencies) < zha_gateway.radio_concurrency - else: - assert 1 == max(concurrencies) == zha_gateway.radio_concurrency diff --git a/tests/components/zha/test_helpers.py b/tests/components/zha/test_helpers.py index 0615fefd644..13c03c17cf7 100644 --- a/tests/components/zha/test_helpers.py +++ b/tests/components/zha/test_helpers.py @@ -1,81 +1,27 @@ """Tests for ZHA helpers.""" -import enum import logging -from unittest.mock import patch +from typing import Any import pytest import voluptuous_serialize -from zigpy.profiles import zha -from zigpy.quirks.v2.homeassistant import UnitOfPower as QuirksUnitOfPower from zigpy.types.basic import uint16_t -from zigpy.zcl.clusters import general, lighting +from zigpy.zcl.clusters import lighting -from homeassistant.components.zha.core.helpers import ( +from homeassistant.components.zha.helpers import ( cluster_command_schema_to_vol_schema, convert_to_zcl_values, - validate_unit, + exclude_none_values, ) -from homeassistant.const import Platform, UnitOfPower from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from .common import async_enable_traffic -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE - _LOGGER = logging.getLogger(__name__) -@pytest.fixture(autouse=True) -def light_platform_only(): - """Only set up the light and required base platforms to speed up tests.""" - with patch( - "homeassistant.components.zha.PLATFORMS", - ( - Platform.BUTTON, - Platform.LIGHT, - Platform.NUMBER, - Platform.SELECT, - ), - ): - yield - - -@pytest.fixture -async def device_light(hass: HomeAssistant, zigpy_device_mock, zha_device_joined): - """Test light.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - ) - color_cluster = zigpy_device.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature - | lighting.Color.ColorCapabilities.XY_attributes - } - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return color_cluster, zha_device - - -async def test_zcl_schema_conversions(hass: HomeAssistant, device_light) -> None: +async def test_zcl_schema_conversions(hass: HomeAssistant) -> None: """Test ZHA ZCL schema conversion helpers.""" - color_cluster, zha_device = device_light - await async_enable_traffic(hass, [zha_device]) - command_schema = color_cluster.commands_by_name["color_loop_set"].schema + command_schema = lighting.Color.ServerCommandDefs.color_loop_set.schema expected_schema = [ { "type": "multi_select", @@ -215,23 +161,21 @@ async def test_zcl_schema_conversions(hass: HomeAssistant, device_light) -> None assert converted_data["update_flags"] == 0 -def test_unit_validation() -> None: - """Test unit validation.""" +@pytest.mark.parametrize( + ("obj", "expected_output"), + [ + ({"a": 1, "b": 2, "c": None}, {"a": 1, "b": 2}), + ({"a": 1, "b": 2, "c": 0}, {"a": 1, "b": 2, "c": 0}), + ({"a": 1, "b": 2, "c": ""}, {"a": 1, "b": 2, "c": ""}), + ({"a": 1, "b": 2, "c": False}, {"a": 1, "b": 2, "c": False}), + ], +) +def test_exclude_none_values( + obj: dict[str, Any], expected_output: dict[str, Any] +) -> None: + """Test exclude_none_values helper.""" + result = exclude_none_values(obj) + assert result == expected_output - assert validate_unit(QuirksUnitOfPower.WATT) == UnitOfPower.WATT - - class FooUnit(enum.Enum): - """Foo unit.""" - - BAR = "bar" - - class UnitOfMass(enum.Enum): - """UnitOfMass.""" - - BAR = "bar" - - with pytest.raises(KeyError): - validate_unit(FooUnit.BAR) - - with pytest.raises(ValueError): - validate_unit(UnitOfMass.BAR) + for key in expected_output: + assert expected_output[key] == obj[key] diff --git a/tests/components/zha/test_init.py b/tests/components/zha/test_init.py index 4d4956d3978..00fc3afd0ea 100644 --- a/tests/components/zha/test_init.py +++ b/tests/components/zha/test_init.py @@ -3,20 +3,21 @@ import asyncio import typing from unittest.mock import AsyncMock, Mock, patch +import zoneinfo import pytest from zigpy.application import ControllerApplication from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH from zigpy.exceptions import TransientConnectionError -from homeassistant.components.zha.core.const import ( +from homeassistant.components.zha.const import ( CONF_BAUDRATE, CONF_FLOW_CONTROL, CONF_RADIO_TYPE, CONF_USB_PATH, DOMAIN, ) -from homeassistant.components.zha.core.helpers import get_zha_data +from homeassistant.components.zha.helpers import get_zha_data, get_zha_gateway from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, MAJOR_VERSION, @@ -43,7 +44,7 @@ def disable_platform_only(): @pytest.fixture -def config_entry_v1(hass): +def config_entry_v1(hass: HomeAssistant): """Config entry version 1 fixture.""" return MockConfigEntry( domain=DOMAIN, @@ -139,7 +140,6 @@ async def test_config_depreciation(hass: HomeAssistant, zha_config) -> None: ("socket://[1.2.3.4]:5678 ", "socket://1.2.3.4:5678"), ], ) -@patch("homeassistant.components.zha.setup_quirks", Mock(return_value=True)) @patch( "homeassistant.components.zha.websocket_api.async_load_api", Mock(return_value=True) ) @@ -282,10 +282,30 @@ async def test_shutdown_on_ha_stop( zha_data = get_zha_data(hass) with patch.object( - zha_data.gateway, "shutdown", wraps=zha_data.gateway.shutdown + zha_data.gateway_proxy, "shutdown", wraps=zha_data.gateway_proxy.shutdown ) as mock_shutdown: hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) hass.set_state(CoreState.stopping) await hass.async_block_till_done() assert len(mock_shutdown.mock_calls) == 1 + + +async def test_timezone_update( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_zigpy_connect: ControllerApplication, +) -> None: + """Test that the ZHA gateway timezone is updated when HA timezone changes.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + gateway = get_zha_gateway(hass) + + assert hass.config.time_zone == "US/Pacific" + assert gateway.config.local_timezone == zoneinfo.ZoneInfo("US/Pacific") + + await hass.config.async_update(time_zone="America/New_York") + + assert hass.config.time_zone == "America/New_York" + assert gateway.config.local_timezone == zoneinfo.ZoneInfo("America/New_York") diff --git a/tests/components/zha/test_light.py b/tests/components/zha/test_light.py index a9d32362863..ef2714b3b58 100644 --- a/tests/components/zha/test_light.py +++ b/tests/components/zha/test_light.py @@ -1,12 +1,11 @@ """Test ZHA light.""" -from collections.abc import Callable -from datetime import timedelta -from typing import Any from unittest.mock import AsyncMock, call, patch, sentinel import pytest +from zha.application.platforms.light.const import FLASH_EFFECTS from zigpy.profiles import zha +from zigpy.zcl import Cluster from zigpy.zcl.clusters import general, lighting import zigpy.zcl.foundation as zcl_f @@ -16,41 +15,23 @@ from homeassistant.components.light import ( FLASH_SHORT, ColorMode, ) -from homeassistant.components.zha.core.const import ( - CONF_ALWAYS_PREFER_XY_COLOR_MODE, - CONF_GROUP_MEMBERS_ASSUME_STATE, - ZHA_OPTIONS, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) -from homeassistant.components.zha.core.group import GroupMember -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.components.zha.light import FLASH_EFFECTS -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -import homeassistant.util.dt as dt_util from .common import ( - async_enable_traffic, - async_find_group_entity_id, async_shift_time, - async_test_rejoin, - async_wait_for_updates, find_entity_id, - patch_zha_config, send_attributes_report, update_attribute_cache, ) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import ( - async_fire_time_changed, - async_mock_load_restore_state_from_storage, -) - -IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" -IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e9" -IEEE_GROUPABLE_DEVICE3 = "03:2d:6f:00:0a:90:69:e7" - LIGHT_ON_OFF = { 1: { SIG_EP_PROFILE: zha.PROFILE_ID, @@ -111,195 +92,6 @@ def light_platform_only(): yield -@pytest.fixture -async def coordinator(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Groups.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee="00:15:8d:00:02:32:4f:32", - nwk=0x0000, - node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_1(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE, - nwk=0xB79D, - ) - color_cluster = zigpy_device.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature - | lighting.Color.ColorCapabilities.XY_attributes - } - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_2(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE2, - manufacturer="sengled", - nwk=0xC79E, - ) - color_cluster = zigpy_device.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature - | lighting.Color.ColorCapabilities.XY_attributes - } - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_3(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE3, - nwk=0xB89F, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def eWeLink_light(hass, zigpy_device_mock, zha_device_joined): - """Mock eWeLink light.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee="03:2d:6f:00:0a:90:69:e3", - manufacturer="eWeLink", - nwk=0xB79D, - ) - color_cluster = zigpy_device.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature - | lighting.Color.ColorCapabilities.XY_attributes, - "color_temp_physical_min": 0, - "color_temp_physical_max": 0, - } - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -async def test_light_refresh( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -) -> None: - """Test ZHA light platform refresh.""" - - # create zigpy devices - zigpy_device = zigpy_device_mock(LIGHT_ON_OFF) - on_off_cluster = zigpy_device.endpoints[1].on_off - on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 0} - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - on_off_cluster.read_attributes.reset_mock() - - # not enough time passed - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=20)) - await hass.async_block_till_done() - assert on_off_cluster.read_attributes.call_count == 0 - assert on_off_cluster.read_attributes.await_count == 0 - assert hass.states.get(entity_id).state == STATE_OFF - - # 1 interval - 1 call - on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 1} - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80)) - await hass.async_block_till_done() - assert on_off_cluster.read_attributes.call_count == 1 - assert on_off_cluster.read_attributes.await_count == 1 - assert hass.states.get(entity_id).state == STATE_ON - - # 2 intervals - 2 calls - on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 0} - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80)) - await hass.async_block_till_done() - assert on_off_cluster.read_attributes.call_count == 2 - assert on_off_cluster.read_attributes.await_count == 2 - assert hass.states.get(entity_id).state == STATE_OFF - - @patch( "zigpy.zcl.clusters.lighting.Color.request", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), @@ -322,34 +114,42 @@ async def test_light_refresh( ) async def test_light( hass: HomeAssistant, + setup_zha, zigpy_device_mock, - zha_device_joined_restored, device, reporting, ) -> None: """Test ZHA light platform.""" - # create zigpy devices - zigpy_device = zigpy_device_mock(device) - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock(device) + cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None) + + if cluster_color: + cluster_color.PLUGGED_ATTR_READS = { + "color_temperature": 100, + "color_temp_physical_min": 0, + "color_temp_physical_max": 600, + "color_capabilities": lighting.ColorCapabilities.XY_attributes + | lighting.ColorCapabilities.Color_temperature, + } + update_attribute_cache(cluster_color) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.LIGHT, zha_device_proxy, hass) assert entity_id is not None cluster_on_off = zigpy_device.endpoints[1].on_off cluster_level = getattr(zigpy_device.endpoints[1], "level", None) - cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None) cluster_identify = getattr(zigpy_device.endpoints[1], "identify", None) - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the lights were created and that they are unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the lights were created and are off assert hass.states.get(entity_id).state == STATE_OFF # test turning the lights on and off from the light @@ -379,889 +179,6 @@ async def test_light( hass, cluster_level, entity_id, 150, STATE_ON ) - # test rejoin - await async_test_off_from_hass(hass, cluster_on_off, entity_id) - clusters = [c for c in (cluster_on_off, cluster_level, cluster_color) if c] - await async_test_rejoin(hass, zigpy_device, clusters, reporting) - - -@pytest.mark.parametrize( - ("plugged_attr_reads", "config_override", "expected_state"), - [ - # HS light without cached hue or saturation - ( - { - "color_capabilities": ( - lighting.Color.ColorCapabilities.Hue_and_saturation - ), - }, - {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, - {}, - ), - # HS light with cached hue - ( - { - "color_capabilities": ( - lighting.Color.ColorCapabilities.Hue_and_saturation - ), - "current_hue": 100, - }, - {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, - {}, - ), - # HS light with cached saturation - ( - { - "color_capabilities": ( - lighting.Color.ColorCapabilities.Hue_and_saturation - ), - "current_saturation": 100, - }, - {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, - {}, - ), - # HS light with both - ( - { - "color_capabilities": ( - lighting.Color.ColorCapabilities.Hue_and_saturation - ), - "current_hue": 100, - "current_saturation": 100, - }, - {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, - {}, - ), - ], -) -async def test_light_initialization( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined_restored, - plugged_attr_reads, - config_override, - expected_state, -) -> None: - """Test ZHA light initialization with cached attributes and color modes.""" - - # create zigpy devices - zigpy_device = zigpy_device_mock(LIGHT_COLOR) - - # mock attribute reads - zigpy_device.endpoints[1].light_color.PLUGGED_ATTR_READS = plugged_attr_reads - - with patch_zha_config("light", config_override): - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) - - assert entity_id is not None - - # pylint: disable-next=fixme - # TODO ensure hue and saturation are properly set on startup - - -@patch( - "zigpy.zcl.clusters.lighting.Color.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -@patch( - "zigpy.zcl.clusters.general.Identify.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -@patch( - "zigpy.zcl.clusters.general.LevelControl.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -@patch( - "zigpy.zcl.clusters.general.OnOff.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -async def test_transitions( - hass: HomeAssistant, device_light_1, device_light_2, eWeLink_light, coordinator -) -> None: - """Test ZHA light transition code.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - device_light_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] - members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)] - - assert coordinator.is_coordinator - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) - device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass) - eWeLink_light_entity_id = find_entity_id(Platform.LIGHT, eWeLink_light, hass) - assert device_1_entity_id != device_2_entity_id - - group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) - assert hass.states.get(group_entity_id) is not None - - assert device_1_entity_id in zha_group.member_entity_ids - assert device_2_entity_id in zha_group.member_entity_ids - - dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off - dev2_cluster_on_off = device_light_2.device.endpoints[1].on_off - eWeLink_cluster_on_off = eWeLink_light.device.endpoints[1].on_off - - dev1_cluster_level = device_light_1.device.endpoints[1].level - dev2_cluster_level = device_light_2.device.endpoints[1].level - eWeLink_cluster_level = eWeLink_light.device.endpoints[1].level - - dev1_cluster_color = device_light_1.device.endpoints[1].light_color - dev2_cluster_color = device_light_2.device.endpoints[1].light_color - eWeLink_cluster_color = eWeLink_light.device.endpoints[1].light_color - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_light_1, device_light_2]) - await async_wait_for_updates(hass) - - # test that the lights were created and are off - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_OFF - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_OFF - - # first test 0 length transition with no color and no brightness provided - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_level.request.reset_mock() - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": device_1_entity_id, "transition": 0}, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 1 - assert dev1_cluster_level.request.await_count == 1 - assert dev1_cluster_level.request.call_args == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=254, # default "full on" brightness - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 254 - - # test 0 length transition with no color and no brightness provided again, but for "force on" lights - eWeLink_cluster_on_off.request.reset_mock() - eWeLink_cluster_level.request.reset_mock() - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": eWeLink_light_entity_id, "transition": 0}, - blocking=True, - ) - assert eWeLink_cluster_on_off.request.call_count == 1 - assert eWeLink_cluster_on_off.request.await_count == 1 - assert eWeLink_cluster_on_off.request.call_args_list[0] == call( - False, - eWeLink_cluster_on_off.commands_by_name["on"].id, - eWeLink_cluster_on_off.commands_by_name["on"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert eWeLink_cluster_color.request.call_count == 0 - assert eWeLink_cluster_color.request.await_count == 0 - assert eWeLink_cluster_level.request.call_count == 1 - assert eWeLink_cluster_level.request.await_count == 1 - assert eWeLink_cluster_level.request.call_args == call( - False, - eWeLink_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - eWeLink_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=254, # default "full on" brightness - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - eWeLink_state = hass.states.get(eWeLink_light_entity_id) - assert eWeLink_state.state == STATE_ON - assert eWeLink_state.attributes["brightness"] == 254 - - eWeLink_cluster_on_off.request.reset_mock() - eWeLink_cluster_level.request.reset_mock() - - # test 0 length transition with brightness, but no color provided - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_level.request.reset_mock() - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": device_1_entity_id, "transition": 0, "brightness": 50}, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 1 - assert dev1_cluster_level.request.await_count == 1 - assert dev1_cluster_level.request.call_args == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=50, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 50 - - dev1_cluster_level.request.reset_mock() - - # test non 0 length transition with color provided while light is on - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_1_entity_id, - "transition": 3.5, - "brightness": 18, - "color_temp": 432, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 1 - assert dev1_cluster_color.request.await_count == 1 - assert dev1_cluster_level.request.call_count == 1 - assert dev1_cluster_level.request.await_count == 1 - assert dev1_cluster_level.request.call_args == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=18, - transition_time=35, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=432, - transition_time=35, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 18 - assert light1_state.attributes["color_temp"] == 432 - assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev1_cluster_level.request.reset_mock() - dev1_cluster_color.request.reset_mock() - - # test 0 length transition to turn light off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_1_entity_id, - "transition": 0, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 1 - assert dev1_cluster_level.request.await_count == 1 - assert dev1_cluster_level.request.call_args == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=0, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_OFF - - dev1_cluster_level.request.reset_mock() - - # test non 0 length transition and color temp while turning light on (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_1_entity_id, - "transition": 1, - "brightness": 25, - "color_temp": 235, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 1 - assert dev1_cluster_color.request.await_count == 1 - assert dev1_cluster_level.request.call_count == 2 - assert dev1_cluster_level.request.await_count == 2 - - # first it comes on with no transition at 2 brightness - assert dev1_cluster_level.request.call_args_list[0] == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=2, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=235, - transition_time=0, # no transition when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_level.request.call_args_list[1] == call( - False, - dev1_cluster_level.commands_by_name["move_to_level"].id, - dev1_cluster_level.commands_by_name["move_to_level"].schema, - level=25, - transition_time=10, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 25 - assert light1_state.attributes["color_temp"] == 235 - assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev1_cluster_level.request.reset_mock() - dev1_cluster_color.request.reset_mock() - - # turn light 1 back off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_1_entity_id, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 1 - assert dev1_cluster_on_off.request.await_count == 1 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 0 - assert dev1_cluster_level.request.await_count == 0 - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_color.request.reset_mock() - dev1_cluster_level.request.reset_mock() - - # test no transition provided and color temp while turning light on (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_1_entity_id, - "brightness": 25, - "color_temp": 236, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 1 - assert dev1_cluster_color.request.await_count == 1 - assert dev1_cluster_level.request.call_count == 2 - assert dev1_cluster_level.request.await_count == 2 - - # first it comes on with no transition at 2 brightness - assert dev1_cluster_level.request.call_args_list[0] == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=2, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=236, - transition_time=0, # no transition when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_level.request.call_args_list[1] == call( - False, - dev1_cluster_level.commands_by_name["move_to_level"].id, - dev1_cluster_level.commands_by_name["move_to_level"].schema, - level=25, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 25 - assert light1_state.attributes["color_temp"] == 236 - assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev1_cluster_level.request.reset_mock() - dev1_cluster_color.request.reset_mock() - - # turn light 1 back off to setup group test - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_1_entity_id, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 1 - assert dev1_cluster_on_off.request.await_count == 1 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 0 - assert dev1_cluster_level.request.await_count == 0 - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_color.request.reset_mock() - dev1_cluster_level.request.reset_mock() - - # test no transition when the same color temp is provided from off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_1_entity_id, - "color_temp": 236, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 1 - assert dev1_cluster_on_off.request.await_count == 1 - assert dev1_cluster_color.request.call_count == 1 - assert dev1_cluster_color.request.await_count == 1 - assert dev1_cluster_level.request.call_count == 0 - assert dev1_cluster_level.request.await_count == 0 - - assert dev1_cluster_on_off.request.call_args == call( - False, - dev1_cluster_on_off.commands_by_name["on"].id, - dev1_cluster_on_off.commands_by_name["on"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=236, - transition_time=0, # no transition when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 25 - assert light1_state.attributes["color_temp"] == 236 - assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_color.request.reset_mock() - - # turn light 1 back off to setup group test - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_1_entity_id, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 1 - assert dev1_cluster_on_off.request.await_count == 1 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 0 - assert dev1_cluster_level.request.await_count == 0 - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_color.request.reset_mock() - dev1_cluster_level.request.reset_mock() - - # test sengled light uses default minimum transition time - dev2_cluster_on_off.request.reset_mock() - dev2_cluster_color.request.reset_mock() - dev2_cluster_level.request.reset_mock() - - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": device_2_entity_id, "transition": 0, "brightness": 100}, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 0 - assert dev2_cluster_on_off.request.await_count == 0 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 1 - assert dev2_cluster_level.request.await_count == 1 - assert dev2_cluster_level.request.call_args == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=100, - transition_time=1, # transition time - sengled light uses default minimum - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_ON - assert light2_state.attributes["brightness"] == 100 - - dev2_cluster_level.request.reset_mock() - - # turn the sengled light back off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_2_entity_id, - }, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 1 - assert dev2_cluster_on_off.request.await_count == 1 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 0 - assert dev2_cluster_level.request.await_count == 0 - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_OFF - - dev2_cluster_on_off.request.reset_mock() - - # test non 0 length transition and color temp while turning light on and sengled (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_2_entity_id, - "transition": 1, - "brightness": 25, - "color_temp": 235, - }, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 0 - assert dev2_cluster_on_off.request.await_count == 0 - assert dev2_cluster_color.request.call_count == 1 - assert dev2_cluster_color.request.await_count == 1 - assert dev2_cluster_level.request.call_count == 2 - assert dev2_cluster_level.request.await_count == 2 - - # first it comes on with no transition at 2 brightness - assert dev2_cluster_level.request.call_args_list[0] == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=2, - transition_time=1, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev2_cluster_color.request.call_args == call( - False, - dev2_cluster_color.commands_by_name["move_to_color_temp"].id, - dev2_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=235, - transition_time=1, # sengled transition == 1 when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev2_cluster_level.request.call_args_list[1] == call( - False, - dev2_cluster_level.commands_by_name["move_to_level"].id, - dev2_cluster_level.commands_by_name["move_to_level"].schema, - level=25, - transition_time=10, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_ON - assert light2_state.attributes["brightness"] == 25 - assert light2_state.attributes["color_temp"] == 235 - assert light2_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev2_cluster_level.request.reset_mock() - dev2_cluster_color.request.reset_mock() - - # turn the sengled light back off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_2_entity_id, - }, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 1 - assert dev2_cluster_on_off.request.await_count == 1 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 0 - assert dev2_cluster_level.request.await_count == 0 - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_OFF - - dev2_cluster_on_off.request.reset_mock() - - # test non 0 length transition and color temp while turning group light on (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": group_entity_id, - "transition": 1, - "brightness": 25, - "color_temp": 235, - }, - blocking=True, - ) - - group_on_off_cluster_handler = zha_group.endpoint[general.OnOff.cluster_id] - group_level_cluster_handler = zha_group.endpoint[general.LevelControl.cluster_id] - group_color_cluster_handler = zha_group.endpoint[lighting.Color.cluster_id] - assert group_on_off_cluster_handler.request.call_count == 0 - assert group_on_off_cluster_handler.request.await_count == 0 - assert group_color_cluster_handler.request.call_count == 1 - assert group_color_cluster_handler.request.await_count == 1 - assert group_level_cluster_handler.request.call_count == 1 - assert group_level_cluster_handler.request.await_count == 1 - - # groups are omitted from the 3 call dance for new_color_provided_while_off - assert group_color_cluster_handler.request.call_args == call( - False, - dev2_cluster_color.commands_by_name["move_to_color_temp"].id, - dev2_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=235, - transition_time=10, # sengled transition == 1 when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert group_level_cluster_handler.request.call_args == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=25, - transition_time=10, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_ON - assert group_state.attributes["brightness"] == 25 - assert group_state.attributes["color_temp"] == 235 - assert group_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - group_on_off_cluster_handler.request.reset_mock() - group_color_cluster_handler.request.reset_mock() - group_level_cluster_handler.request.reset_mock() - - # turn the sengled light back on - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_2_entity_id, - }, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 1 - assert dev2_cluster_on_off.request.await_count == 1 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 0 - assert dev2_cluster_level.request.await_count == 0 - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_ON - - dev2_cluster_on_off.request.reset_mock() - - # turn the light off with a transition - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - {"entity_id": device_2_entity_id, "transition": 2}, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 0 - assert dev2_cluster_on_off.request.await_count == 0 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 1 - assert dev2_cluster_level.request.await_count == 1 - assert dev2_cluster_level.request.call_args == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=0, - transition_time=20, # transition time - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_OFF - - dev2_cluster_level.request.reset_mock() - - # turn the light back on with no args should use a transition and last known brightness - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": device_2_entity_id}, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 0 - assert dev2_cluster_on_off.request.await_count == 0 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 1 - assert dev2_cluster_level.request.await_count == 1 - assert dev2_cluster_level.request.call_args == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=25, - transition_time=1, # transition time - sengled light uses default minimum - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_ON - - dev2_cluster_level.request.reset_mock() - - # test eWeLink color temp while turning light on from off (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": eWeLink_light_entity_id, - "color_temp": 235, - }, - blocking=True, - ) - assert eWeLink_cluster_on_off.request.call_count == 1 - assert eWeLink_cluster_on_off.request.await_count == 1 - assert eWeLink_cluster_color.request.call_count == 1 - assert eWeLink_cluster_color.request.await_count == 1 - assert eWeLink_cluster_level.request.call_count == 0 - assert eWeLink_cluster_level.request.await_count == 0 - - # first it comes on - assert eWeLink_cluster_on_off.request.call_args_list[0] == call( - False, - eWeLink_cluster_on_off.commands_by_name["on"].id, - eWeLink_cluster_on_off.commands_by_name["on"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=235, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - eWeLink_state = hass.states.get(eWeLink_light_entity_id) - assert eWeLink_state.state == STATE_ON - assert eWeLink_state.attributes["color_temp"] == 235 - assert eWeLink_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - assert eWeLink_state.attributes["min_mireds"] == 153 - assert eWeLink_state.attributes["max_mireds"] == 500 - @patch( "zigpy.zcl.clusters.lighting.Color.request", @@ -1275,13 +192,51 @@ async def test_transitions( "zigpy.zcl.clusters.general.OnOff.request", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), ) -async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None: +async def test_on_with_off_color( + hass: HomeAssistant, setup_zha, zigpy_device_mock +) -> None: """Test turning on the light and sending color commands before on/level commands for supporting lights.""" - device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) - dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off - dev1_cluster_level = device_light_1.device.endpoints[1].level - dev1_cluster_color = device_light_1.device.endpoints[1].light_color + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + general.Identify.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + nwk=0xB79D, + ) + + dev1_cluster_color = zigpy_device.endpoints[1].light_color + + dev1_cluster_color.PLUGGED_ATTR_READS = { + "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature + | lighting.Color.ColorCapabilities.XY_attributes + } + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.LIGHT, zha_device_proxy, hass) + assert entity_id is not None + + device_1_entity_id = find_entity_id(Platform.LIGHT, zha_device_proxy, hass) + dev1_cluster_on_off = zigpy_device.endpoints[1].on_off + dev1_cluster_level = zigpy_device.endpoints[1].level # Execute_if_off will override the "enhanced turn on from an off-state" config option that's enabled here dev1_cluster_color.PLUGGED_ATTR_READS = { @@ -1403,28 +358,34 @@ async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None: assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP -async def async_test_on_off_from_light(hass, cluster, entity_id): +async def async_test_on_off_from_light( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test on off functionality from the light.""" # turn on at light await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 3}) - await async_wait_for_updates(hass) + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(entity_id).state == STATE_ON # turn off at light await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 3}) - await async_wait_for_updates(hass) + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(entity_id).state == STATE_OFF -async def async_test_on_from_light(hass, cluster, entity_id): +async def async_test_on_from_light( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test on off functionality from the light.""" # turn on at light await send_attributes_report(hass, cluster, {1: -1, 0: 1, 2: 2}) - await async_wait_for_updates(hass) + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(entity_id).state == STATE_ON -async def async_test_on_off_from_hass(hass, cluster, entity_id): +async def async_test_on_off_from_hass( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test on off functionality from hass.""" # turn on via UI cluster.request.reset_mock() @@ -1445,7 +406,9 @@ async def async_test_on_off_from_hass(hass, cluster, entity_id): await async_test_off_from_hass(hass, cluster, entity_id) -async def async_test_off_from_hass(hass, cluster, entity_id): +async def async_test_off_from_hass( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test turning off the light from Home Assistant.""" # turn off via UI @@ -1467,9 +430,9 @@ async def async_test_off_from_hass(hass, cluster, entity_id): async def async_test_level_on_off_from_hass( hass: HomeAssistant, - on_off_cluster, - level_cluster, - entity_id, + on_off_cluster: Cluster, + level_cluster: Cluster, + entity_id: str, expected_default_transition: int = 0, ): """Test on off functionality from hass.""" @@ -1549,13 +512,19 @@ async def async_test_level_on_off_from_hass( await async_test_off_from_hass(hass, on_off_cluster, entity_id) -async def async_test_dimmer_from_light(hass, cluster, entity_id, level, expected_state): +async def async_test_dimmer_from_light( + hass: HomeAssistant, + cluster: Cluster, + entity_id: str, + level: int, + expected_state: str, +): """Test dimmer functionality from the light.""" await send_attributes_report( hass, cluster, {1: level + 10, 0: level, 2: level - 10 or 22} ) - await async_wait_for_updates(hass) + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(entity_id).state == expected_state # hass uses None for brightness of 0 in state attributes if level == 0: @@ -1563,7 +532,9 @@ async def async_test_dimmer_from_light(hass, cluster, entity_id, level, expected assert hass.states.get(entity_id).attributes.get("brightness") == level -async def async_test_flash_from_hass(hass, cluster, entity_id, flash): +async def async_test_flash_from_hass( + hass: HomeAssistant, cluster: Cluster, entity_id: str, flash +): """Test flash functionality from hass.""" # turn on via UI cluster.request.reset_mock() @@ -1603,405 +574,23 @@ async def async_test_flash_from_hass(hass, cluster, entity_id, flash): "zigpy.zcl.clusters.general.OnOff.request", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), ) -@patch( - "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_zha_group_light_entity( +async def test_light_exception_on_creation( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_light_1, - device_light_2, - device_light_3, - coordinator, -) -> None: - """Test the light entity for a ZHA group.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - device_light_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] - members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)] - - assert coordinator.is_coordinator - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) - device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass) - device_3_entity_id = find_entity_id(Platform.LIGHT, device_light_3, hass) - - assert device_1_entity_id not in (device_2_entity_id, device_3_entity_id) - assert device_2_entity_id != device_3_entity_id - - group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) - assert hass.states.get(group_entity_id) is not None - - assert device_1_entity_id in zha_group.member_entity_ids - assert device_2_entity_id in zha_group.member_entity_ids - assert device_3_entity_id not in zha_group.member_entity_ids - - group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id] - group_cluster_level = zha_group.endpoint[general.LevelControl.cluster_id] - group_cluster_identify = zha_group.endpoint[general.Identify.cluster_id] - - dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off - dev2_cluster_on_off = device_light_2.device.endpoints[1].on_off - dev3_cluster_on_off = device_light_3.device.endpoints[1].on_off - - dev1_cluster_level = device_light_1.device.endpoints[1].level - - await async_enable_traffic( - hass, [device_light_1, device_light_2, device_light_3], enabled=False - ) - await async_wait_for_updates(hass) - # test that the lights were created and that they are unavailable - assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_light_1, device_light_2, device_light_3]) - await async_wait_for_updates(hass) - - # test that the lights were created and are off - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - assert group_state.attributes["supported_color_modes"] == [ - ColorMode.COLOR_TEMP, - ColorMode.XY, - ] - # Light which is off has no color mode - assert group_state.attributes["color_mode"] is None - - # test turning the lights on and off from the HA - await async_test_on_off_from_hass(hass, group_cluster_on_off, group_entity_id) - - await async_shift_time(hass) - - # test short flashing the lights from the HA - await async_test_flash_from_hass( - hass, group_cluster_identify, group_entity_id, FLASH_SHORT - ) - - await async_shift_time(hass) - - # test turning the lights on and off from the light - await async_test_on_off_from_light(hass, dev1_cluster_on_off, group_entity_id) - - # test turning the lights on and off from the HA - await async_test_level_on_off_from_hass( - hass, - group_cluster_on_off, - group_cluster_level, - group_entity_id, - expected_default_transition=1, # a Sengled light is in that group and needs a minimum 0.1s transition - ) - - await async_shift_time(hass) - - # test getting a brightness change from the network - await async_test_on_from_light(hass, dev1_cluster_on_off, group_entity_id) - await async_test_dimmer_from_light( - hass, dev1_cluster_level, group_entity_id, 150, STATE_ON - ) - # Check state - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_ON - assert group_state.attributes["supported_color_modes"] == [ - ColorMode.COLOR_TEMP, - ColorMode.XY, - ] - assert group_state.attributes["color_mode"] == ColorMode.XY - - # test long flashing the lights from the HA - await async_test_flash_from_hass( - hass, group_cluster_identify, group_entity_id, FLASH_LONG - ) - - await async_shift_time(hass) - - assert len(zha_group.members) == 2 - # test some of the group logic to make sure we key off states correctly - await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) - await send_attributes_report(hass, dev2_cluster_on_off, {0: 1}) - await hass.async_block_till_done() - - # test that group light is on - assert hass.states.get(device_1_entity_id).state == STATE_ON - assert hass.states.get(device_2_entity_id).state == STATE_ON - assert hass.states.get(group_entity_id).state == STATE_ON - - await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) - await hass.async_block_till_done() - - # test that group light is still on - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_ON - assert hass.states.get(group_entity_id).state == STATE_ON - - await send_attributes_report(hass, dev2_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - - # test that group light is now off - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(group_entity_id).state == STATE_OFF - - await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - - # test that group light is now back on - assert hass.states.get(device_1_entity_id).state == STATE_ON - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(group_entity_id).state == STATE_ON - - # turn it off to test a new member add being tracked - await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(group_entity_id).state == STATE_OFF - - # add a new member and test that his state is also tracked - await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)]) - await send_attributes_report(hass, dev3_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - assert device_3_entity_id in zha_group.member_entity_ids - assert len(zha_group.members) == 3 - - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(device_3_entity_id).state == STATE_ON - assert hass.states.get(group_entity_id).state == STATE_ON - - # make the group have only 1 member and now there should be no entity - await zha_group.async_remove_members( - [GroupMember(device_light_2.ieee, 1), GroupMember(device_light_3.ieee, 1)] - ) - assert len(zha_group.members) == 1 - assert hass.states.get(group_entity_id) is None - assert device_2_entity_id not in zha_group.member_entity_ids - assert device_3_entity_id not in zha_group.member_entity_ids - - # make sure the entity registry entry is still there - assert entity_registry.async_get(group_entity_id) is not None - - # add a member back and ensure that the group entity was created again - await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)]) - await send_attributes_report(hass, dev3_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - assert len(zha_group.members) == 2 - assert hass.states.get(group_entity_id).state == STATE_ON - - # add a 3rd member and ensure we still have an entity and we track the new one - await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) - await send_attributes_report(hass, dev3_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - assert hass.states.get(group_entity_id).state == STATE_OFF - - # this will test that _reprobe_group is used correctly - await zha_group.async_add_members( - [GroupMember(device_light_2.ieee, 1), GroupMember(coordinator.ieee, 1)] - ) - await send_attributes_report(hass, dev2_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - assert len(zha_group.members) == 4 - assert hass.states.get(group_entity_id).state == STATE_ON - - await zha_group.async_remove_members([GroupMember(coordinator.ieee, 1)]) - await hass.async_block_till_done() - assert hass.states.get(group_entity_id).state == STATE_ON - assert len(zha_group.members) == 3 - - # remove the group and ensure that there is no entity and that the entity registry is cleaned up - assert entity_registry.async_get(group_entity_id) is not None - await zha_gateway.async_remove_zigpy_group(zha_group.group_id) - assert hass.states.get(group_entity_id) is None - assert entity_registry.async_get(group_entity_id) is None - - -@patch( - "zigpy.zcl.clusters.general.OnOff.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -@patch( - "homeassistant.components.zha.light.ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_group_member_assume_state( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, + setup_zha, zigpy_device_mock, - zha_device_joined, - coordinator, - device_light_1, - device_light_2, + caplog: pytest.LogCaptureFixture, ) -> None: - """Test the group members assume state function.""" - with patch_zha_config( - "light", {(ZHA_OPTIONS, CONF_GROUP_MEMBERS_ASSUME_STATE): True} - ): - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - device_light_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] - members = [ - GroupMember(device_light_1.ieee, 1), - GroupMember(device_light_2.ieee, 1), - ] - - assert coordinator.is_coordinator - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) - device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass) - - assert device_1_entity_id != device_2_entity_id - - group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) - assert hass.states.get(group_entity_id) is not None - - assert device_1_entity_id in zha_group.member_entity_ids - assert device_2_entity_id in zha_group.member_entity_ids - - group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id] - - await async_enable_traffic( - hass, [device_light_1, device_light_2], enabled=False - ) - await async_wait_for_updates(hass) - # test that the lights were created and that they are unavailable - assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_light_1, device_light_2]) - await async_wait_for_updates(hass) - - # test that the lights were created and are off - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - - group_cluster_on_off.request.reset_mock() - await async_shift_time(hass) - - # turn on via UI - await hass.services.async_call( - LIGHT_DOMAIN, "turn_on", {"entity_id": group_entity_id}, blocking=True - ) - - # members also instantly assume STATE_ON - assert hass.states.get(device_1_entity_id).state == STATE_ON - assert hass.states.get(device_2_entity_id).state == STATE_ON - assert hass.states.get(group_entity_id).state == STATE_ON - - # turn off via UI - await hass.services.async_call( - LIGHT_DOMAIN, "turn_off", {"entity_id": group_entity_id}, blocking=True - ) - - # members also instantly assume STATE_OFF - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(group_entity_id).state == STATE_OFF - - # remove the group and ensure that there is no entity and that the entity registry is cleaned up - assert entity_registry.async_get(group_entity_id) is not None - await zha_gateway.async_remove_zigpy_group(zha_group.group_id) - assert hass.states.get(group_entity_id) is None - assert entity_registry.async_get(group_entity_id) is None - - -@pytest.mark.parametrize( - ("restored_state", "expected_state"), - [ - ( - STATE_ON, - { - "brightness": None, - "off_with_transition": None, - "off_brightness": None, - "color_mode": ColorMode.XY, # color_mode defaults to what the light supports when restored with ON state - "color_temp": None, - "xy_color": None, - "hs_color": None, - "effect": None, - }, - ), - ( - STATE_OFF, - { - "brightness": None, - "off_with_transition": None, - "off_brightness": None, - "color_mode": None, - "color_temp": None, - "xy_color": None, - "hs_color": None, - "effect": None, - }, - ), - ], -) -async def test_restore_light_state( - hass: HomeAssistant, - zigpy_device_mock, - core_rs: Callable[[str, Any, dict[str, Any]], None], - zha_device_restored, - restored_state: str, - expected_state: dict[str, Any], -) -> None: - """Test ZHA light restores without throwing an error when attributes are None.""" - - # restore state with None values - attributes = { - "brightness": None, - "off_with_transition": None, - "off_brightness": None, - "color_mode": None, - "color_temp": None, - "xy_color": None, - "hs_color": None, - "effect": None, - } - - entity_id = "light.fakemanufacturer_fakemodel_light" - core_rs( - entity_id, - state=restored_state, - attributes=attributes, - ) - await async_mock_load_restore_state_from_storage(hass) + """Test ZHA light entity creation exception.""" + await setup_zha() + gateway = get_zha_gateway(hass) zigpy_device = zigpy_device_mock(LIGHT_COLOR) - zha_device = await zha_device_restored(zigpy_device) - entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == restored_state + gateway.get_or_create_device(zigpy_device) + with patch( + "homeassistant.components.zha.light.Light.__init__", side_effect=Exception + ): + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - # compare actual restored state to expected state - for attribute, expected_value in expected_state.items(): - assert hass.states.get(entity_id).attributes.get(attribute) == expected_value + assert "Error while adding entity from entity data" in caplog.text diff --git a/tests/components/zha/test_lock.py b/tests/components/zha/test_lock.py index b16d7a31828..4e1d092af9b 100644 --- a/tests/components/zha/test_lock.py +++ b/tests/components/zha/test_lock.py @@ -3,27 +3,23 @@ from unittest.mock import patch import pytest -import zigpy.profiles.zha +from zigpy.profiles import zha +from zigpy.zcl import Cluster from zigpy.zcl.clusters import closures, general import zigpy.zcl.foundation as zcl_f from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN -from homeassistant.const import ( - STATE_LOCKED, - STATE_UNAVAILABLE, - STATE_UNLOCKED, - Platform, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) +from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED, Platform from homeassistant.core import HomeAssistant -from .common import async_enable_traffic, find_entity_id, send_attributes_report -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE - -LOCK_DOOR = 0 -UNLOCK_DOOR = 1 -SET_PIN_CODE = 5 -CLEAR_PIN_CODE = 7 -SET_USER_STATUS = 9 +from .common import find_entity_id, send_attributes_report +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @pytest.fixture(autouse=True) @@ -40,48 +36,51 @@ def lock_platform_only(): yield -@pytest.fixture -async def lock(hass, zigpy_device_mock, zha_device_joined_restored): - """Lock cluster fixture.""" +async def test_lock(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: + """Test ZHA lock platform.""" + + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [closures.DoorLock.cluster_id, general.Basic.cluster_id], SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.DOOR_LOCK, + SIG_EP_TYPE: zha.DeviceType.DOOR_LOCK, + SIG_EP_PROFILE: zha.PROFILE_ID, } }, + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", ) - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].door_lock + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - -async def test_lock(hass: HomeAssistant, lock) -> None: - """Test ZHA lock platform.""" - - zha_device, cluster = lock - entity_id = find_entity_id(Platform.LOCK, zha_device, hass) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.LOCK, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].door_lock assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNLOCKED - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the lock was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to unlocked assert hass.states.get(entity_id).state == STATE_UNLOCKED # set state to locked - await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2}) + await send_attributes_report( + hass, + cluster, + {closures.DoorLock.AttributeDefs.lock_state.id: closures.LockState.Locked}, + ) assert hass.states.get(entity_id).state == STATE_LOCKED # set state to unlocked - await send_attributes_report(hass, cluster, {1: 0, 0: 2, 2: 3}) + await send_attributes_report( + hass, + cluster, + {closures.DoorLock.AttributeDefs.lock_state.id: closures.LockState.Unlocked}, + ) assert hass.states.get(entity_id).state == STATE_UNLOCKED # lock from HA @@ -103,7 +102,7 @@ async def test_lock(hass: HomeAssistant, lock) -> None: await async_disable_user_code(hass, cluster, entity_id) -async def async_lock(hass, cluster, entity_id): +async def async_lock(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test lock functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # lock via UI @@ -112,10 +111,13 @@ async def async_lock(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == LOCK_DOOR + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.lock_door.id + ) -async def async_unlock(hass, cluster, entity_id): +async def async_unlock(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test lock functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # lock via UI @@ -124,10 +126,13 @@ async def async_unlock(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == UNLOCK_DOOR + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.unlock_door.id + ) -async def async_set_user_code(hass, cluster, entity_id): +async def async_set_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test set lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -139,7 +144,10 @@ async def async_set_user_code(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == SET_PIN_CODE + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.set_pin_code.id + ) assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Enabled assert ( @@ -148,7 +156,7 @@ async def async_set_user_code(hass, cluster, entity_id): assert cluster.request.call_args[0][6] == "13246579" -async def async_clear_user_code(hass, cluster, entity_id): +async def async_clear_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test clear lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -163,11 +171,14 @@ async def async_clear_user_code(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == CLEAR_PIN_CODE + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.clear_pin_code.id + ) assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 -async def async_enable_user_code(hass, cluster, entity_id): +async def async_enable_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test enable lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -182,12 +193,17 @@ async def async_enable_user_code(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == SET_USER_STATUS + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.set_user_status.id + ) assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Enabled -async def async_disable_user_code(hass, cluster, entity_id): +async def async_disable_user_code( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test disable lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -202,6 +218,9 @@ async def async_disable_user_code(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == SET_USER_STATUS + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.set_user_status.id + ) assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Disabled diff --git a/tests/components/zha/test_logbook.py b/tests/components/zha/test_logbook.py index 19a6f9d359f..0b27cd095a9 100644 --- a/tests/components/zha/test_logbook.py +++ b/tests/components/zha/test_logbook.py @@ -3,10 +3,16 @@ from unittest.mock import patch import pytest +from zha.application.const import ZHA_EVENT import zigpy.profiles.zha from zigpy.zcl.clusters import general -from homeassistant.components.zha.core.const import ZHA_EVENT +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import CONF_DEVICE_ID, CONF_UNIQUE_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -40,9 +46,13 @@ def sensor_platform_only(): @pytest.fixture -async def mock_devices(hass, zigpy_device_mock, zha_device_joined): +async def mock_devices(hass: HomeAssistant, setup_zha, zigpy_device_mock): """IAS device fixture.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock( { 1: { @@ -54,10 +64,13 @@ async def mock_devices(hass, zigpy_device_mock, zha_device_joined): } ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.update_available(True) - await hass.async_block_till_done() - return zigpy_device, zha_device + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + + return zigpy_device, zha_device_proxy async def test_zha_logbook_event_device_with_triggers( @@ -76,7 +89,7 @@ async def test_zha_logbook_event_device_with_triggers( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - ieee_address = str(zha_device.ieee) + ieee_address = str(zha_device.device.ieee) reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) @@ -153,7 +166,7 @@ async def test_zha_logbook_event_device_no_triggers( """Test ZHA logbook events with device and without triggers.""" zigpy_device, zha_device = mock_devices - ieee_address = str(zha_device.ieee) + ieee_address = str(zha_device.device.ieee) reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) hass.config.components.add("recorder") diff --git a/tests/components/zha/test_number.py b/tests/components/zha/test_number.py index 6b302f9cbd9..180f16e9ae2 100644 --- a/tests/components/zha/test_number.py +++ b/tests/components/zha/test_number.py @@ -3,26 +3,22 @@ from unittest.mock import call, patch import pytest -from zigpy.exceptions import ZigbeeException from zigpy.profiles import zha -from zigpy.zcl.clusters import general, lighting +from zigpy.zcl.clusters import general import zigpy.zcl.foundation as zcl_f from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN -from homeassistant.components.zha.core.device import ZHADevice -from homeassistant.const import STATE_UNAVAILABLE, EntityCategory, Platform +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - send_attributes_report, - update_attribute_cache, -) +from .common import find_entity_id, send_attributes_report, update_attribute_cache from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @@ -43,49 +39,28 @@ def number_platform_only(): yield -@pytest.fixture -def zigpy_analog_output_device(zigpy_device_mock): - """Zigpy analog_output device.""" - - endpoints = { - 1: { - SIG_EP_TYPE: zha.DeviceType.LEVEL_CONTROL_SWITCH, - SIG_EP_INPUT: [general.AnalogOutput.cluster_id, general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -async def light(zigpy_device_mock): - """Siren fixture.""" - - return zigpy_device_mock( - { - 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - ], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - } - }, - node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - ) - - -async def test_number( - hass: HomeAssistant, zha_device_joined_restored, zigpy_analog_output_device -) -> None: +async def test_number(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: """Test ZHA number platform.""" - cluster = zigpy_analog_output_device.endpoints.get(1).analog_output + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_TYPE: zha.DeviceType.LEVEL_CONTROL_SWITCH, + SIG_EP_INPUT: [ + general.AnalogOutput.cluster_id, + general.Basic.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) + + cluster = zigpy_device.endpoints[1].analog_output cluster.PLUGGED_ATTR_READS = { "max_present_value": 100.0, "min_present_value": 1.0, @@ -98,34 +73,14 @@ async def test_number( update_attribute_cache(cluster) cluster.PLUGGED_ATTR_READS["present_value"] = 15.0 - zha_device = await zha_device_joined_restored(zigpy_analog_output_device) - # one for present_value and one for the rest configuration attributes - assert cluster.read_attributes.call_count == 3 - attr_reads = set() - for call_args in cluster.read_attributes.call_args_list: - attr_reads |= set(call_args[0][0]) - assert "max_present_value" in attr_reads - assert "min_present_value" in attr_reads - assert "relinquish_default" in attr_reads - assert "resolution" in attr_reads - assert "description" in attr_reads - assert "engineering_units" in attr_reads - assert "application_type" in attr_reads + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - entity_id = find_entity_id(Platform.NUMBER, zha_device, hass) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.NUMBER, zha_device_proxy, hass) assert entity_id is not None - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the number was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - assert cluster.read_attributes.call_count == 3 - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - assert cluster.read_attributes.call_count == 6 - - # test that the state has changed from unavailable to 15.0 assert hass.states.get(entity_id).state == "15.0" # test attributes @@ -134,13 +89,13 @@ async def test_number( assert hass.states.get(entity_id).attributes.get("step") == 1.1 assert hass.states.get(entity_id).attributes.get("icon") == "mdi:percent" assert hass.states.get(entity_id).attributes.get("unit_of_measurement") == "%" + assert ( hass.states.get(entity_id).attributes.get("friendly_name") == "FakeManufacturer FakeModel Number PWM1" ) # change value from device - assert cluster.read_attributes.call_count == 6 await send_attributes_report(hass, cluster, {0x0055: 15}) assert hass.states.get(entity_id).state == "15.0" @@ -165,16 +120,8 @@ async def test_number( ] cluster.PLUGGED_ATTR_READS["present_value"] = 30.0 - # test rejoin - assert cluster.read_attributes.call_count == 6 - await async_test_rejoin(hass, zigpy_analog_output_device, [cluster], (1,)) - assert hass.states.get(entity_id).state == "30.0" - assert cluster.read_attributes.call_count == 9 - # update device value with failed attribute report cluster.PLUGGED_ATTR_READS["present_value"] = 40.0 - # validate the entity still contains old value - assert hass.states.get(entity_id).state == "30.0" await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() @@ -183,251 +130,4 @@ async def test_number( "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == "40.0" - assert cluster.read_attributes.call_count == 10 assert "present_value" in cluster.read_attributes.call_args[0][0] - - -@pytest.mark.parametrize( - ("attr", "initial_value", "new_value"), - [ - ("on_off_transition_time", 20, 5), - ("on_level", 255, 50), - ("on_transition_time", 5, 1), - ("off_transition_time", 5, 1), - ("default_move_rate", 1, 5), - ("start_up_current_level", 254, 125), - ], -) -async def test_level_control_number( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - light: ZHADevice, - zha_device_joined, - attr: str, - initial_value: int, - new_value: int, -) -> None: - """Test ZHA level control number entities - new join.""" - level_control_cluster = light.endpoints[1].level - level_control_cluster.PLUGGED_ATTR_READS = { - attr: initial_value, - } - zha_device = await zha_device_joined(light) - - entity_id = find_entity_id( - Platform.NUMBER, - zha_device, - hass, - qualifier=attr, - ) - assert entity_id is not None - - assert level_control_cluster.read_attributes.mock_calls == [ - call( - [ - "on_off_transition_time", - "on_level", - "on_transition_time", - "off_transition_time", - "default_move_rate", - ], - allow_cache=True, - only_cache=False, - manufacturer=None, - ), - call( - ["start_up_current_level"], - allow_cache=True, - only_cache=False, - manufacturer=None, - ), - call( - [ - "current_level", - ], - allow_cache=False, - only_cache=False, - manufacturer=None, - ), - ] - - state = hass.states.get(entity_id) - assert state - assert state.state == str(initial_value) - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - - # Test number set_value - await hass.services.async_call( - "number", - "set_value", - { - "entity_id": entity_id, - "value": new_value, - }, - blocking=True, - ) - - assert level_control_cluster.write_attributes.mock_calls == [ - call({attr: new_value}, manufacturer=None) - ] - - state = hass.states.get(entity_id) - assert state - assert state.state == str(new_value) - - level_control_cluster.read_attributes.reset_mock() - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - # the mocking doesn't update the attr cache so this flips back to initial value - assert hass.states.get(entity_id).state == str(initial_value) - assert level_control_cluster.read_attributes.mock_calls == [ - call( - [attr], - allow_cache=False, - only_cache=False, - manufacturer=None, - ) - ] - - level_control_cluster.write_attributes.reset_mock() - level_control_cluster.write_attributes.side_effect = ZigbeeException - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - "number", - "set_value", - { - "entity_id": entity_id, - "value": new_value, - }, - blocking=True, - ) - - assert level_control_cluster.write_attributes.mock_calls == [ - call({attr: new_value}, manufacturer=None), - call({attr: new_value}, manufacturer=None), - call({attr: new_value}, manufacturer=None), - ] - assert hass.states.get(entity_id).state == str(initial_value) - - -@pytest.mark.parametrize( - ("attr", "initial_value", "new_value"), - [("start_up_color_temperature", 500, 350)], -) -async def test_color_number( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - light: ZHADevice, - zha_device_joined, - attr: str, - initial_value: int, - new_value: int, -) -> None: - """Test ZHA color number entities - new join.""" - color_cluster = light.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - attr: initial_value, - } - zha_device = await zha_device_joined(light) - - entity_id = find_entity_id( - Platform.NUMBER, - zha_device, - hass, - qualifier=attr, - ) - assert entity_id is not None - - assert color_cluster.read_attributes.call_count == 3 - assert ( - call( - [ - "color_temp_physical_min", - "color_temp_physical_max", - "color_capabilities", - "start_up_color_temperature", - "options", - ], - allow_cache=True, - only_cache=False, - manufacturer=None, - ) - in color_cluster.read_attributes.call_args_list - ) - - state = hass.states.get(entity_id) - assert state - assert state.state == str(initial_value) - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - - # Test number set_value - await hass.services.async_call( - "number", - "set_value", - { - "entity_id": entity_id, - "value": new_value, - }, - blocking=True, - ) - - assert color_cluster.write_attributes.call_count == 1 - assert color_cluster.write_attributes.call_args[0][0] == { - attr: new_value, - } - - state = hass.states.get(entity_id) - assert state - assert state.state == str(new_value) - - color_cluster.read_attributes.reset_mock() - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - # the mocking doesn't update the attr cache so this flips back to initial value - assert hass.states.get(entity_id).state == str(initial_value) - assert color_cluster.read_attributes.call_count == 1 - assert ( - call( - [attr], - allow_cache=False, - only_cache=False, - manufacturer=None, - ) - in color_cluster.read_attributes.call_args_list - ) - - color_cluster.write_attributes.reset_mock() - color_cluster.write_attributes.side_effect = ZigbeeException - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - "number", - "set_value", - { - "entity_id": entity_id, - "value": new_value, - }, - blocking=True, - ) - - assert color_cluster.write_attributes.mock_calls == [ - call({attr: new_value}, manufacturer=None), - call({attr: new_value}, manufacturer=None), - call({attr: new_value}, manufacturer=None), - ] - assert hass.states.get(entity_id).state == str(initial_value) diff --git a/tests/components/zha/test_radio_manager.py b/tests/components/zha/test_radio_manager.py index 280b3d05daf..0a51aaa6dba 100644 --- a/tests/components/zha/test_radio_manager.py +++ b/tests/components/zha/test_radio_manager.py @@ -1,10 +1,11 @@ """Tests for ZHA config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import pytest import serial.tools.list_ports -from typing_extensions import Generator +from zha.application.const import RadioType from zigpy.backups import BackupManager import zigpy.config from zigpy.config import CONF_DEVICE_PATH @@ -12,7 +13,7 @@ import zigpy.types from homeassistant.components.usb import UsbServiceInfo from homeassistant.components.zha import radio_manager -from homeassistant.components.zha.core.const import DOMAIN, RadioType +from homeassistant.components.zha.const import DOMAIN from homeassistant.components.zha.radio_manager import ProbeResult, ZhaRadioManager from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant diff --git a/tests/components/zha/test_registries.py b/tests/components/zha/test_registries.py deleted file mode 100644 index 2b1c0dcc561..00000000000 --- a/tests/components/zha/test_registries.py +++ /dev/null @@ -1,602 +0,0 @@ -"""Test ZHA registries.""" - -from __future__ import annotations - -from unittest import mock - -import pytest -from typing_extensions import Generator -import zigpy.quirks as zigpy_quirks - -from homeassistant.components.zha.binary_sensor import IASZone -from homeassistant.components.zha.core import registries -from homeassistant.components.zha.core.const import ATTR_QUIRK_ID -from homeassistant.components.zha.entity import ZhaEntity -from homeassistant.helpers import entity_registry as er - -MANUFACTURER = "mock manufacturer" -MODEL = "mock model" -QUIRK_CLASS = "mock.test.quirk.class" -QUIRK_ID = "quirk_id" - - -@pytest.fixture -def zha_device(): - """Return a mock of ZHA device.""" - dev = mock.MagicMock() - dev.manufacturer = MANUFACTURER - dev.model = MODEL - dev.quirk_class = QUIRK_CLASS - dev.quirk_id = QUIRK_ID - return dev - - -@pytest.fixture -def cluster_handlers(cluster_handler): - """Return a mock of cluster_handlers.""" - - return [cluster_handler("level", 8), cluster_handler("on_off", 6)] - - -@pytest.mark.parametrize( - ("rule", "matched"), - [ - (registries.MatchRule(), False), - (registries.MatchRule(cluster_handler_names={"level"}), True), - (registries.MatchRule(cluster_handler_names={"level", "no match"}), False), - (registries.MatchRule(cluster_handler_names={"on_off"}), True), - (registries.MatchRule(cluster_handler_names={"on_off", "no match"}), False), - (registries.MatchRule(cluster_handler_names={"on_off", "level"}), True), - ( - registries.MatchRule(cluster_handler_names={"on_off", "level", "no match"}), - False, - ), - # test generic_id matching - (registries.MatchRule(generic_ids={"cluster_handler_0x0006"}), True), - (registries.MatchRule(generic_ids={"cluster_handler_0x0008"}), True), - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"} - ), - True, - ), - ( - registries.MatchRule( - generic_ids={ - "cluster_handler_0x0006", - "cluster_handler_0x0008", - "cluster_handler_0x0009", - } - ), - False, - ), - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, - cluster_handler_names={"on_off", "level"}, - ), - True, - ), - # manufacturer matching - (registries.MatchRule(manufacturers="no match"), False), - (registries.MatchRule(manufacturers=MANUFACTURER), True), - ( - registries.MatchRule( - manufacturers="no match", aux_cluster_handlers="aux_cluster_handler" - ), - False, - ), - ( - registries.MatchRule( - manufacturers=MANUFACTURER, aux_cluster_handlers="aux_cluster_handler" - ), - True, - ), - (registries.MatchRule(models=MODEL), True), - (registries.MatchRule(models="no match"), False), - ( - registries.MatchRule( - models=MODEL, aux_cluster_handlers="aux_cluster_handler" - ), - True, - ), - ( - registries.MatchRule( - models="no match", aux_cluster_handlers="aux_cluster_handler" - ), - False, - ), - (registries.MatchRule(quirk_ids=QUIRK_ID), True), - (registries.MatchRule(quirk_ids="no match"), False), - ( - registries.MatchRule( - quirk_ids=QUIRK_ID, aux_cluster_handlers="aux_cluster_handler" - ), - True, - ), - ( - registries.MatchRule( - quirk_ids="no match", aux_cluster_handlers="aux_cluster_handler" - ), - False, - ), - # match everything - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, - cluster_handler_names={"on_off", "level"}, - manufacturers=MANUFACTURER, - models=MODEL, - quirk_ids=QUIRK_ID, - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - manufacturers={"random manuf", MANUFACTURER}, - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - manufacturers={"random manuf", "Another manuf"}, - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - manufacturers=lambda x: x == MANUFACTURER, - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - manufacturers=lambda x: x != MANUFACTURER, - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", models={"random model", MODEL} - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", models={"random model", "Another model"} - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", models=lambda x: x == MODEL - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", models=lambda x: x != MODEL - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - quirk_ids={"random quirk", QUIRK_ID}, - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - quirk_ids={"random quirk", "another quirk"}, - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", quirk_ids=lambda x: x == QUIRK_ID - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", quirk_ids=lambda x: x != QUIRK_ID - ), - False, - ), - ( - registries.MatchRule(cluster_handler_names="on_off", quirk_ids=QUIRK_ID), - True, - ), - ], -) -def test_registry_matching(rule, matched, cluster_handlers) -> None: - """Test strict rule matching.""" - assert ( - rule.strict_matched(MANUFACTURER, MODEL, cluster_handlers, QUIRK_ID) is matched - ) - - -@pytest.mark.parametrize( - ("rule", "matched"), - [ - (registries.MatchRule(), False), - (registries.MatchRule(cluster_handler_names={"level"}), True), - (registries.MatchRule(cluster_handler_names={"level", "no match"}), False), - (registries.MatchRule(cluster_handler_names={"on_off"}), True), - (registries.MatchRule(cluster_handler_names={"on_off", "no match"}), False), - (registries.MatchRule(cluster_handler_names={"on_off", "level"}), True), - ( - registries.MatchRule(cluster_handler_names={"on_off", "level", "no match"}), - False, - ), - ( - registries.MatchRule( - cluster_handler_names={"on_off", "level"}, models="no match" - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names={"on_off", "level"}, - models="no match", - manufacturers="no match", - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names={"on_off", "level"}, - models="no match", - manufacturers=MANUFACTURER, - ), - True, - ), - # test generic_id matching - (registries.MatchRule(generic_ids={"cluster_handler_0x0006"}), True), - (registries.MatchRule(generic_ids={"cluster_handler_0x0008"}), True), - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"} - ), - True, - ), - ( - registries.MatchRule( - generic_ids={ - "cluster_handler_0x0006", - "cluster_handler_0x0008", - "cluster_handler_0x0009", - } - ), - False, - ), - ( - registries.MatchRule( - generic_ids={ - "cluster_handler_0x0006", - "cluster_handler_0x0008", - "cluster_handler_0x0009", - }, - models="mo match", - ), - False, - ), - ( - registries.MatchRule( - generic_ids={ - "cluster_handler_0x0006", - "cluster_handler_0x0008", - "cluster_handler_0x0009", - }, - models=MODEL, - ), - True, - ), - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, - cluster_handler_names={"on_off", "level"}, - ), - True, - ), - # manufacturer matching - (registries.MatchRule(manufacturers="no match"), False), - (registries.MatchRule(manufacturers=MANUFACTURER), True), - (registries.MatchRule(models=MODEL), True), - (registries.MatchRule(models="no match"), False), - (registries.MatchRule(quirk_ids=QUIRK_ID), True), - (registries.MatchRule(quirk_ids="no match"), False), - # match everything - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, - cluster_handler_names={"on_off", "level"}, - manufacturers=MANUFACTURER, - models=MODEL, - quirk_ids=QUIRK_ID, - ), - True, - ), - ], -) -def test_registry_loose_matching(rule, matched, cluster_handlers) -> None: - """Test loose rule matching.""" - assert ( - rule.loose_matched(MANUFACTURER, MODEL, cluster_handlers, QUIRK_ID) is matched - ) - - -def test_match_rule_claim_cluster_handlers_color(cluster_handler) -> None: - """Test cluster handler claiming.""" - ch_color = cluster_handler("color", 0x300) - ch_level = cluster_handler("level", 8) - ch_onoff = cluster_handler("on_off", 6) - - rule = registries.MatchRule( - cluster_handler_names="on_off", aux_cluster_handlers={"color", "level"} - ) - claimed = rule.claim_cluster_handlers([ch_color, ch_level, ch_onoff]) - assert {"color", "level", "on_off"} == {ch.name for ch in claimed} - - -@pytest.mark.parametrize( - ("rule", "match"), - [ - (registries.MatchRule(cluster_handler_names={"level"}), {"level"}), - (registries.MatchRule(cluster_handler_names={"level", "no match"}), {"level"}), - (registries.MatchRule(cluster_handler_names={"on_off"}), {"on_off"}), - (registries.MatchRule(generic_ids="cluster_handler_0x0000"), {"basic"}), - ( - registries.MatchRule( - cluster_handler_names="level", generic_ids="cluster_handler_0x0000" - ), - {"basic", "level"}, - ), - ( - registries.MatchRule(cluster_handler_names={"level", "power"}), - {"level", "power"}, - ), - ( - registries.MatchRule( - cluster_handler_names={"level", "on_off"}, - aux_cluster_handlers={"basic", "power"}, - ), - {"basic", "level", "on_off", "power"}, - ), - (registries.MatchRule(cluster_handler_names={"color"}), set()), - ], -) -def test_match_rule_claim_cluster_handlers( - rule, match, cluster_handler, cluster_handlers -) -> None: - """Test cluster handler claiming.""" - ch_basic = cluster_handler("basic", 0) - cluster_handlers.append(ch_basic) - ch_power = cluster_handler("power", 1) - cluster_handlers.append(ch_power) - - claimed = rule.claim_cluster_handlers(cluster_handlers) - assert match == {ch.name for ch in claimed} - - -@pytest.fixture -def entity_registry(): - """Registry fixture.""" - return registries.ZHAEntityRegistry() - - -@pytest.mark.parametrize( - ("manufacturer", "model", "quirk_id", "match_name"), - [ - ("random manufacturer", "random model", "random.class", "OnOff"), - ("random manufacturer", MODEL, "random.class", "OnOffModel"), - (MANUFACTURER, "random model", "random.class", "OnOffManufacturer"), - ("random manufacturer", "random model", QUIRK_ID, "OnOffQuirk"), - (MANUFACTURER, MODEL, "random.class", "OnOffModelManufacturer"), - (MANUFACTURER, "some model", "random.class", "OnOffMultimodel"), - ], -) -def test_weighted_match( - cluster_handler, - entity_registry: er.EntityRegistry, - manufacturer, - model, - quirk_id, - match_name, -) -> None: - """Test weightedd match.""" - - s = mock.sentinel - - @entity_registry.strict_match( - s.component, - cluster_handler_names="on_off", - models={MODEL, "another model", "some model"}, - ) - class OnOffMultimodel: - pass - - @entity_registry.strict_match(s.component, cluster_handler_names="on_off") - class OnOff: - pass - - @entity_registry.strict_match( - s.component, cluster_handler_names="on_off", manufacturers=MANUFACTURER - ) - class OnOffManufacturer: - pass - - @entity_registry.strict_match( - s.component, cluster_handler_names="on_off", models=MODEL - ) - class OnOffModel: - pass - - @entity_registry.strict_match( - s.component, - cluster_handler_names="on_off", - models=MODEL, - manufacturers=MANUFACTURER, - ) - class OnOffModelManufacturer: - pass - - @entity_registry.strict_match( - s.component, cluster_handler_names="on_off", quirk_ids=QUIRK_ID - ) - class OnOffQuirk: - pass - - ch_on_off = cluster_handler("on_off", 6) - ch_level = cluster_handler("level", 8) - - match, claimed = entity_registry.get_entity( - s.component, manufacturer, model, [ch_on_off, ch_level], quirk_id - ) - - assert match.__name__ == match_name - assert claimed == [ch_on_off] - - -def test_multi_sensor_match( - cluster_handler, entity_registry: er.EntityRegistry -) -> None: - """Test multi-entity match.""" - - s = mock.sentinel - - @entity_registry.multipass_match( - s.binary_sensor, - cluster_handler_names="smartenergy_metering", - ) - class SmartEnergySensor2: - pass - - ch_se = cluster_handler("smartenergy_metering", 0x0702) - ch_illuminati = cluster_handler("illuminance", 0x0401) - - match, claimed = entity_registry.get_multi_entity( - "manufacturer", - "model", - cluster_handlers=[ch_se, ch_illuminati], - quirk_id="quirk_id", - ) - - assert s.binary_sensor in match - assert s.component not in match - assert set(claimed) == {ch_se} - assert {cls.entity_class.__name__ for cls in match[s.binary_sensor]} == { - SmartEnergySensor2.__name__ - } - - @entity_registry.multipass_match( - s.component, - cluster_handler_names="smartenergy_metering", - aux_cluster_handlers="illuminance", - ) - class SmartEnergySensor1: - pass - - @entity_registry.multipass_match( - s.binary_sensor, - cluster_handler_names="smartenergy_metering", - aux_cluster_handlers="illuminance", - ) - class SmartEnergySensor3: - pass - - match, claimed = entity_registry.get_multi_entity( - "manufacturer", - "model", - cluster_handlers={ch_se, ch_illuminati}, - quirk_id="quirk_id", - ) - - assert s.binary_sensor in match - assert s.component in match - assert set(claimed) == {ch_se, ch_illuminati} - assert {cls.entity_class.__name__ for cls in match[s.binary_sensor]} == { - SmartEnergySensor2.__name__, - SmartEnergySensor3.__name__, - } - assert {cls.entity_class.__name__ for cls in match[s.component]} == { - SmartEnergySensor1.__name__ - } - - -def iter_all_rules() -> Generator[tuple[registries.MatchRule, list[type[ZhaEntity]]]]: - """Iterate over all match rules and their corresponding entities.""" - - for rules in registries.ZHA_ENTITIES._strict_registry.values(): - for rule, entity in rules.items(): - yield rule, [entity] - - for rules in registries.ZHA_ENTITIES._multi_entity_registry.values(): - for multi in rules.values(): - for rule, entities in multi.items(): - yield rule, entities - - for rules in registries.ZHA_ENTITIES._config_diagnostic_entity_registry.values(): - for multi in rules.values(): - for rule, entities in multi.items(): - yield rule, entities - - -def test_quirk_classes() -> None: - """Make sure that all quirk IDs in components matches exist.""" - - def quirk_class_validator(value): - """Validate quirk IDs during self test.""" - if callable(value): - # Callables cannot be tested - return - - if isinstance(value, (frozenset, set, list)): - for v in value: - # Unpack the value if needed - quirk_class_validator(v) - return - - if value not in all_quirk_ids: - raise ValueError(f"Quirk ID '{value}' does not exist.") - - # get all quirk ID from zigpy quirks registry - all_quirk_ids = [] - for manufacturer in zigpy_quirks._DEVICE_REGISTRY._registry.values(): - for model_quirk_list in manufacturer.values(): - for quirk in model_quirk_list: - quirk_id = getattr(quirk, ATTR_QUIRK_ID, None) - if quirk_id is not None and quirk_id not in all_quirk_ids: - all_quirk_ids.append(quirk_id) - # pylint: disable-next=undefined-loop-variable - del quirk, model_quirk_list, manufacturer - - # validate all quirk IDs used in component match rules - for rule, _ in iter_all_rules(): - quirk_class_validator(rule.quirk_ids) - - -def test_entity_names() -> None: - """Make sure that all handlers expose entities with valid names.""" - - for _, entity_classes in iter_all_rules(): - for entity_class in entity_classes: - if hasattr(entity_class, "__attr_name"): - # The entity has a name - assert (name := entity_class.__attr_name) and isinstance(name, str) - elif hasattr(entity_class, "__attr_translation_key"): - assert ( - isinstance(entity_class.__attr_translation_key, str) - and entity_class.__attr_translation_key - ) - elif hasattr(entity_class, "__attr_device_class"): - assert entity_class.__attr_device_class - else: - # The only exception (for now) is IASZone - assert entity_class is IASZone diff --git a/tests/components/zha/test_repairs.py b/tests/components/zha/test_repairs.py index c093fe266bd..c2925161748 100644 --- a/tests/components/zha/test_repairs.py +++ b/tests/components/zha/test_repairs.py @@ -16,7 +16,7 @@ from homeassistant.components.homeassistant_sky_connect.const import ( # pylint DOMAIN as SKYCONNECT_DOMAIN, ) from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN -from homeassistant.components.zha.core.const import DOMAIN +from homeassistant.components.zha.const import DOMAIN from homeassistant.components.zha.repairs.network_settings_inconsistent import ( ISSUE_INCONSISTENT_NETWORK_SETTINGS, ) @@ -148,7 +148,7 @@ async def test_multipan_firmware_repair( autospec=True, ), patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), patch( @@ -199,7 +199,7 @@ async def test_multipan_firmware_no_repair_on_probe_failure( autospec=True, ), patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -236,7 +236,7 @@ async def test_multipan_firmware_retry_on_probe_ezsp( autospec=True, ), patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -311,7 +311,7 @@ async def test_inconsistent_settings_keep_new( old_state = network_backup with patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, @@ -390,7 +390,7 @@ async def test_inconsistent_settings_restore_old( old_state = network_backup with patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, diff --git a/tests/components/zha/test_select.py b/tests/components/zha/test_select.py index 70f58ee4e6d..f0f742503e3 100644 --- a/tests/components/zha/test_select.py +++ b/tests/components/zha/test_select.py @@ -1,34 +1,30 @@ """Test ZHA select entities.""" -from typing import Any -from unittest.mock import call, patch +from unittest.mock import patch import pytest -from zhaquirks import ( - DEVICE_TYPE, - ENDPOINTS, - INPUT_CLUSTERS, - OUTPUT_CLUSTERS, - PROFILE_ID, -) -from zigpy.const import SIG_EP_PROFILE +from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from zigpy.profiles import zha -from zigpy.quirks import CustomCluster, CustomDevice -from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2 -import zigpy.types as t from zigpy.zcl.clusters import general, security -from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster -from homeassistant.components.zha.select import AqaraMotionSensitivities -from homeassistant.const import STATE_UNKNOWN, EntityCategory, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, restore_state -from homeassistant.util import dt as dt_util +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import ( + STATE_UNAVAILABLE, + STATE_UNKNOWN, + EntityCategory, + Platform, +) +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import entity_registry as er -from .common import async_enable_traffic, find_entity_id, send_attributes_report -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .common import find_entity_id -from tests.common import async_mock_load_restore_state_from_storage +from tests.common import mock_restore_cache @pytest.fixture(autouse=True) @@ -50,9 +46,17 @@ def select_select_only(): yield -@pytest.fixture -async def siren(hass, zigpy_device_mock, zha_device_joined_restored): - """Siren fixture.""" +async def test_select( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + setup_zha, + zigpy_device_mock, +) -> None: + """Test ZHA select platform.""" + + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) zigpy_device = zigpy_device_mock( { @@ -62,75 +66,16 @@ async def siren(hass, zigpy_device_mock, zha_device_joined_restored): SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, SIG_EP_PROFILE: zha.PROFILE_ID, } - }, - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].ias_wd - - -@pytest.fixture -async def light(hass, zigpy_device_mock): - """Siren fixture.""" - - return zigpy_device_mock( - { - 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT, - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.OnOff.cluster_id, - ], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - } - }, - node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - ) - - -@pytest.fixture -def core_rs(hass_storage: dict[str, Any]): - """Core.restore_state fixture.""" - - def _storage(entity_id, state): - now = dt_util.utcnow().isoformat() - - hass_storage[restore_state.STORAGE_KEY] = { - "version": restore_state.STORAGE_VERSION, - "key": restore_state.STORAGE_KEY, - "data": [ - { - "state": { - "entity_id": entity_id, - "state": str(state), - "last_changed": now, - "last_updated": now, - "context": { - "id": "3c2243ff5f30447eb12e7348cfd5b8ff", - "user_id": None, - }, - }, - "last_seen": now, - } - ], } + ) - return _storage + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - -async def test_select( - hass: HomeAssistant, entity_registry: er.EntityRegistry, siren -) -> None: - """Test ZHA select platform.""" - zha_device, cluster = siren - assert cluster is not None + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier="tone", + Platform.SELECT, zha_device_proxy, hass, qualifier="tone" ) assert entity_id is not None @@ -167,17 +112,32 @@ async def test_select( assert state.state == security.IasWd.Warning.WarningMode.Burglar.name +@pytest.mark.parametrize( + ("restored_state", "expected_state"), + [ + # Unavailable is not restored + (STATE_UNAVAILABLE, STATE_UNKNOWN), + # Normal state is + ( + security.IasWd.Warning.WarningMode.Burglar.name, + security.IasWd.Warning.WarningMode.Burglar.name, + ), + ], +) async def test_select_restore_state( hass: HomeAssistant, + entity_registry: er.EntityRegistry, + setup_zha, zigpy_device_mock, - core_rs, - zha_device_restored, + restored_state: str, + expected_state: str, ) -> None: - """Test ZHA select entity restore state.""" - + """Test ZHA select platform restore state.""" entity_id = "select.fakemanufacturer_fakemodel_default_siren_tone" - core_rs(entity_id, state="Burglar") - await async_mock_load_restore_state_from_storage(hass) + + mock_restore_cache(hass, [State(entity_id, restored_state)]) + + await setup_zha() zigpy_device = zigpy_device_mock( { @@ -187,307 +147,14 @@ async def test_select_restore_state( SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, SIG_EP_PROFILE: zha.PROFILE_ID, } - }, - ) - - zha_device = await zha_device_restored(zigpy_device) - cluster = zigpy_device.endpoints[1].ias_wd - assert cluster is not None - entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier="tone", - ) - - assert entity_id is not None - state = hass.states.get(entity_id) - assert state - assert state.state == security.IasWd.Warning.WarningMode.Burglar.name - - -async def test_on_off_select_new_join( - hass: HomeAssistant, entity_registry: er.EntityRegistry, light, zha_device_joined -) -> None: - """Test ZHA on off select - new join.""" - on_off_cluster = light.endpoints[1].on_off - on_off_cluster.PLUGGED_ATTR_READS = { - "start_up_on_off": general.OnOff.StartUpOnOff.On - } - zha_device = await zha_device_joined(light) - select_name = "start_up_behavior" - entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier=select_name, - ) - assert entity_id is not None - - assert on_off_cluster.read_attributes.call_count == 2 - assert ( - call(["start_up_on_off"], allow_cache=True, only_cache=False, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - assert ( - call(["on_off"], allow_cache=False, only_cache=False, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - - state = hass.states.get(entity_id) - assert state - assert state.state == general.OnOff.StartUpOnOff.On.name - - assert state.attributes["options"] == ["Off", "On", "Toggle", "PreviousValue"] - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - - # Test select option with string value - await hass.services.async_call( - "select", - "select_option", - { - "entity_id": entity_id, - "option": general.OnOff.StartUpOnOff.Off.name, - }, - blocking=True, - ) - - assert on_off_cluster.write_attributes.call_count == 1 - assert on_off_cluster.write_attributes.call_args[0][0] == { - "start_up_on_off": general.OnOff.StartUpOnOff.Off - } - - state = hass.states.get(entity_id) - assert state - assert state.state == general.OnOff.StartUpOnOff.Off.name - - -async def test_on_off_select_restored( - hass: HomeAssistant, entity_registry: er.EntityRegistry, light, zha_device_restored -) -> None: - """Test ZHA on off select - restored.""" - on_off_cluster = light.endpoints[1].on_off - on_off_cluster.PLUGGED_ATTR_READS = { - "start_up_on_off": general.OnOff.StartUpOnOff.On - } - zha_device = await zha_device_restored(light) - - assert zha_device.is_mains_powered - - assert on_off_cluster.read_attributes.call_count == 4 - # first 2 calls hit cache only - assert ( - call(["start_up_on_off"], allow_cache=True, only_cache=True, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - assert ( - call(["on_off"], allow_cache=True, only_cache=True, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - - # 2nd set of calls can actually read from the device - assert ( - call(["start_up_on_off"], allow_cache=True, only_cache=False, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - assert ( - call(["on_off"], allow_cache=False, only_cache=False, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - - select_name = "start_up_behavior" - entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier=select_name, - ) - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state - assert state.state == general.OnOff.StartUpOnOff.On.name - assert state.attributes["options"] == ["Off", "On", "Toggle", "PreviousValue"] - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - - -async def test_on_off_select_unsupported( - hass: HomeAssistant, light, zha_device_joined_restored -) -> None: - """Test ZHA on off select unsupported.""" - - on_off_cluster = light.endpoints[1].on_off - on_off_cluster.add_unsupported_attribute("start_up_on_off") - zha_device = await zha_device_joined_restored(light) - select_name = general.OnOff.StartUpOnOff.__name__ - entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier=select_name.lower(), - ) - assert entity_id is None - - -class MotionSensitivityQuirk(CustomDevice): - """Quirk with motion sensitivity attribute.""" - - class OppleCluster(CustomCluster, ManufacturerSpecificCluster): - """Aqara manufacturer specific cluster.""" - - cluster_id = 0xFCC0 - ep_attribute = "opple_cluster" - attributes = { - 0x010C: ("motion_sensitivity", t.uint8_t, True), - 0x020C: ("motion_sensitivity_disabled", t.uint8_t, True), } - - def __init__(self, *args, **kwargs): - """Initialize.""" - super().__init__(*args, **kwargs) - # populate cache to create config entity - self._attr_cache.update( - { - 0x010C: AqaraMotionSensitivities.Medium, - 0x020C: AqaraMotionSensitivities.Medium, - } - ) - - replacement = { - ENDPOINTS: { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, - INPUT_CLUSTERS: [general.Basic.cluster_id, OppleCluster], - OUTPUT_CLUSTERS: [], - }, - } - } - - -@pytest.fixture -async def zigpy_device_aqara_sensor(hass, zigpy_device_mock, zha_device_joined): - """Device tracker zigpy Aqara motion sensor device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, - } - }, - manufacturer="LUMI", - model="lumi.motion.ac02", - quirk=MotionSensitivityQuirk, ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zigpy_device + gateway = get_zha_gateway(hass) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - -async def test_on_off_select_attribute_report( - hass: HomeAssistant, light, zha_device_restored, zigpy_device_aqara_sensor -) -> None: - """Test ZHA attribute report parsing for select platform.""" - - zha_device = await zha_device_restored(zigpy_device_aqara_sensor) - cluster = zigpy_device_aqara_sensor.endpoints.get(1).opple_cluster - entity_id = find_entity_id(Platform.SELECT, zha_device, hass) - assert entity_id is not None - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state is in default medium state - assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Medium.name - - # send attribute report from device - await send_attributes_report( - hass, cluster, {"motion_sensitivity": AqaraMotionSensitivities.Low} - ) - assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Low.name - - -( - add_to_registry_v2("Fake_Manufacturer", "Fake_Model") - .replaces(MotionSensitivityQuirk.OppleCluster) - .enum( - "motion_sensitivity", - AqaraMotionSensitivities, - MotionSensitivityQuirk.OppleCluster.cluster_id, - ) - .enum( - "motion_sensitivity_disabled", - AqaraMotionSensitivities, - MotionSensitivityQuirk.OppleCluster.cluster_id, - translation_key="motion_sensitivity", - initially_disabled=True, - ) -) - - -@pytest.fixture -async def zigpy_device_aqara_sensor_v2( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Device tracker zigpy Aqara motion sensor device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - MotionSensitivityQuirk.OppleCluster.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, - } - }, - manufacturer="Fake_Manufacturer", - model="Fake_Model", - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].opple_cluster - - -async def test_on_off_select_attribute_report_v2( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - zigpy_device_aqara_sensor_v2, -) -> None: - """Test ZHA attribute report parsing for select platform.""" - - zha_device, cluster = zigpy_device_aqara_sensor_v2 - assert isinstance(zha_device.device, CustomDeviceV2) - entity_id = find_entity_id( - Platform.SELECT, zha_device, hass, qualifier="motion_sensitivity" - ) - assert entity_id is not None - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state is in default medium state - assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Medium.name - - # send attribute report from device - await send_attributes_report( - hass, cluster, {"motion_sensitivity": AqaraMotionSensitivities.Low} - ) - assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Low.name - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - assert entity_entry.disabled is False - assert entity_entry.translation_key == "motion_sensitivity" + state = hass.states.get(entity_id) + assert state + assert state.state == expected_state diff --git a/tests/components/zha/test_sensor.py b/tests/components/zha/test_sensor.py index 8443c4ced07..2d69cf1ff36 100644 --- a/tests/components/zha/test_sensor.py +++ b/tests/components/zha/test_sensor.py @@ -1,33 +1,20 @@ """Test ZHA sensor.""" -from collections.abc import Callable -from datetime import timedelta -import math -from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest -from zhaquirks.danfoss import thermostat as danfoss_thermostat -import zigpy.profiles.zha -from zigpy.quirks import CustomCluster -from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2 -from zigpy.quirks.v2.homeassistant import UnitOfMass -import zigpy.types as t +from zigpy.profiles import zha +from zigpy.zcl import Cluster from zigpy.zcl.clusters import general, homeautomation, hvac, measurement, smartenergy from zigpy.zcl.clusters.hvac import Thermostat -from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.components.zha.core import ZHADevice -from homeassistant.components.zha.core.const import ZHA_CLUSTER_HANDLER_READS_PER_REQ -import homeassistant.config as config_util +from homeassistant.components.zha.helpers import get_zha_gateway from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT, - CONF_UNIT_SYSTEM, LIGHT_LUX, PERCENTAGE, - STATE_UNAVAILABLE, STATE_UNKNOWN, Platform, UnitOfApparentPower, @@ -37,29 +24,12 @@ from homeassistant.const import ( UnitOfPower, UnitOfPressure, UnitOfTemperature, - UnitOfVolume, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, restore_state -from homeassistant.helpers.entity_component import async_update_entity -from homeassistant.util import dt as dt_util -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - find_entity_ids, - send_attribute_report, - send_attributes_report, -) +from .common import send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - async_mock_load_restore_state_from_storage, -) - ENTITY_ID_PREFIX = "sensor.fakemanufacturer_fakemodel_{}" @@ -76,60 +46,19 @@ def sensor_platform_only(): yield -@pytest.fixture -async def elec_measurement_zigpy_dev(hass: HomeAssistant, zigpy_device_mock): - """Electric Measurement zigpy device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - homeautomation.ElectricalMeasurement.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SIMPLE_SENSOR, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - }, - ) - zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 - zigpy_device.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS = { - "ac_current_divisor": 10, - "ac_current_multiplier": 1, - "ac_power_divisor": 10, - "ac_power_multiplier": 1, - "ac_voltage_divisor": 10, - "ac_voltage_multiplier": 1, - "measurement_type": 8, - "power_divisor": 10, - "power_multiplier": 1, - } - return zigpy_device - - -@pytest.fixture -async def elec_measurement_zha_dev(elec_measurement_zigpy_dev, zha_device_joined): - """Electric Measurement ZHA device.""" - - zha_dev = await zha_device_joined(elec_measurement_zigpy_dev) - zha_dev.available = True - return zha_dev - - -async def async_test_humidity(hass: HomeAssistant, cluster, entity_id): +async def async_test_humidity(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test humidity sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 1000, 2: 100}) assert_state(hass, entity_id, "10.0", PERCENTAGE) -async def async_test_temperature(hass: HomeAssistant, cluster, entity_id): +async def async_test_temperature(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test temperature sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 2900, 2: 100}) assert_state(hass, entity_id, "29.0", UnitOfTemperature.CELSIUS) -async def async_test_pressure(hass: HomeAssistant, cluster, entity_id): +async def async_test_pressure(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test pressure sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 1000, 2: 10000}) assert_state(hass, entity_id, "1000", UnitOfPressure.HPA) @@ -138,7 +67,7 @@ async def async_test_pressure(hass: HomeAssistant, cluster, entity_id): assert_state(hass, entity_id, "1000", UnitOfPressure.HPA) -async def async_test_illuminance(hass: HomeAssistant, cluster, entity_id): +async def async_test_illuminance(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test illuminance sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 10, 2: 20}) assert_state(hass, entity_id, "1", LIGHT_LUX) @@ -150,7 +79,7 @@ async def async_test_illuminance(hass: HomeAssistant, cluster, entity_id): assert_state(hass, entity_id, "unknown", LIGHT_LUX) -async def async_test_metering(hass: HomeAssistant, cluster, entity_id): +async def async_test_metering(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test Smart Energy metering sensor.""" await send_attributes_report(hass, cluster, {1025: 1, 1024: 12345, 1026: 100}) assert_state(hass, entity_id, "12345.0", None) @@ -159,13 +88,14 @@ async def async_test_metering(hass: HomeAssistant, cluster, entity_id): await send_attributes_report(hass, cluster, {1024: 12346, "status": 64 + 8}) assert_state(hass, entity_id, "12346.0", None) + assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|POWER_FAILURE", "POWER_FAILURE|SERVICE_DISCONNECT", ) await send_attributes_report( - hass, cluster, {"status": 64 + 8, "metering_device_type": 1} + hass, cluster, {"metering_device_type": 1, "status": 64 + 8} ) assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|NOT_DEFINED", @@ -173,7 +103,7 @@ async def async_test_metering(hass: HomeAssistant, cluster, entity_id): ) await send_attributes_report( - hass, cluster, {"status": 64 + 8, "metering_device_type": 2} + hass, cluster, {"metering_device_type": 2, "status": 64 + 8} ) assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|PIPE_EMPTY", @@ -181,7 +111,7 @@ async def async_test_metering(hass: HomeAssistant, cluster, entity_id): ) await send_attributes_report( - hass, cluster, {"status": 64 + 8, "metering_device_type": 5} + hass, cluster, {"metering_device_type": 5, "status": 64 + 8} ) assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|TEMPERATURE_SENSOR", @@ -190,13 +120,13 @@ async def async_test_metering(hass: HomeAssistant, cluster, entity_id): # Status for other meter types await send_attributes_report( - hass, cluster, {"status": 32, "metering_device_type": 4} + hass, cluster, {"metering_device_type": 4, "status": 32} ) assert hass.states.get(entity_id).attributes["status"] in ("", "32") async def async_test_smart_energy_summation_delivered( - hass: HomeAssistant, cluster, entity_id + hass: HomeAssistant, cluster: Cluster, entity_id: str ): """Test SmartEnergy Summation delivered sensor.""" @@ -213,7 +143,7 @@ async def async_test_smart_energy_summation_delivered( async def async_test_smart_energy_summation_received( - hass: HomeAssistant, cluster, entity_id + hass: HomeAssistant, cluster: Cluster, entity_id: str ): """Test SmartEnergy Summation received sensor.""" @@ -229,7 +159,9 @@ async def async_test_smart_energy_summation_received( ) -async def async_test_electrical_measurement(hass: HomeAssistant, cluster, entity_id): +async def async_test_electrical_measurement( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement sensor.""" # update divisor cached value await send_attributes_report(hass, cluster, {"ac_power_divisor": 1}) @@ -248,10 +180,12 @@ async def async_test_electrical_measurement(hass: HomeAssistant, cluster, entity assert "active_power_max" not in hass.states.get(entity_id).attributes await send_attributes_report(hass, cluster, {0: 1, 0x050D: 88, 10: 5000}) - assert hass.states.get(entity_id).attributes["active_power_max"] == "8.8" + assert hass.states.get(entity_id).attributes["active_power_max"] == 8.8 -async def async_test_em_apparent_power(hass: HomeAssistant, cluster, entity_id): +async def async_test_em_apparent_power( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement Apparent Power sensor.""" # update divisor cached value await send_attributes_report(hass, cluster, {"ac_power_divisor": 1}) @@ -269,7 +203,9 @@ async def async_test_em_apparent_power(hass: HomeAssistant, cluster, entity_id): assert_state(hass, entity_id, "9.9", UnitOfApparentPower.VOLT_AMPERE) -async def async_test_em_power_factor(hass: HomeAssistant, cluster, entity_id): +async def async_test_em_power_factor( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement Power Factor sensor.""" # update divisor cached value await send_attributes_report(hass, cluster, {"ac_power_divisor": 1}) @@ -287,7 +223,9 @@ async def async_test_em_power_factor(hass: HomeAssistant, cluster, entity_id): assert_state(hass, entity_id, "99", PERCENTAGE) -async def async_test_em_rms_current(hass: HomeAssistant, cluster, entity_id): +async def async_test_em_rms_current( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement RMS Current sensor.""" await send_attributes_report(hass, cluster, {0: 1, 0x0508: 1234, 10: 1000}) @@ -302,10 +240,12 @@ async def async_test_em_rms_current(hass: HomeAssistant, cluster, entity_id): assert "rms_current_max" not in hass.states.get(entity_id).attributes await send_attributes_report(hass, cluster, {0: 1, 0x050A: 88, 10: 5000}) - assert hass.states.get(entity_id).attributes["rms_current_max"] == "8.8" + assert hass.states.get(entity_id).attributes["rms_current_max"] == 8.8 -async def async_test_em_rms_voltage(hass: HomeAssistant, cluster, entity_id): +async def async_test_em_rms_voltage( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement RMS Voltage sensor.""" await send_attributes_report(hass, cluster, {0: 1, 0x0505: 1234, 10: 1000}) @@ -320,10 +260,12 @@ async def async_test_em_rms_voltage(hass: HomeAssistant, cluster, entity_id): assert "rms_voltage_max" not in hass.states.get(entity_id).attributes await send_attributes_report(hass, cluster, {0: 1, 0x0507: 888, 10: 5000}) - assert hass.states.get(entity_id).attributes["rms_voltage_max"] == "8.9" + assert hass.states.get(entity_id).attributes["rms_voltage_max"] == 8.9 -async def async_test_powerconfiguration(hass: HomeAssistant, cluster, entity_id): +async def async_test_powerconfiguration( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test powerconfiguration/battery sensor.""" await send_attributes_report(hass, cluster, {33: 98}) assert_state(hass, entity_id, "49", "%") @@ -334,7 +276,9 @@ async def async_test_powerconfiguration(hass: HomeAssistant, cluster, entity_id) assert hass.states.get(entity_id).attributes["battery_voltage"] == 2.0 -async def async_test_powerconfiguration2(hass: HomeAssistant, cluster, entity_id): +async def async_test_powerconfiguration2( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test powerconfiguration/battery sensor.""" await send_attributes_report(hass, cluster, {33: -1}) assert_state(hass, entity_id, STATE_UNKNOWN, "%") @@ -346,13 +290,17 @@ async def async_test_powerconfiguration2(hass: HomeAssistant, cluster, entity_id assert_state(hass, entity_id, "49", "%") -async def async_test_device_temperature(hass: HomeAssistant, cluster, entity_id): +async def async_test_device_temperature( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test temperature sensor.""" await send_attributes_report(hass, cluster, {0: 2900}) assert_state(hass, entity_id, "29.0", UnitOfTemperature.CELSIUS) -async def async_test_setpoint_change_source(hass, cluster, entity_id): +async def async_test_setpoint_change_source( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test the translation of numerical state into enum text.""" await send_attributes_report( hass, cluster, {Thermostat.AttributeDefs.setpoint_change_source.id: 0x01} @@ -361,7 +309,9 @@ async def async_test_setpoint_change_source(hass, cluster, entity_id): assert hass_state.state == "Schedule" -async def async_test_pi_heating_demand(hass, cluster, entity_id): +async def async_test_pi_heating_demand( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test pi heating demand is correctly returned.""" await send_attributes_report( hass, cluster, {Thermostat.AttributeDefs.pi_heating_demand.id: 1} @@ -568,8 +518,8 @@ async def async_test_pi_heating_demand(hass, cluster, entity_id): ) async def test_sensor( hass: HomeAssistant, + setup_zha, zigpy_device_mock, - zha_device_joined_restored, cluster_id, entity_suffix, test_func, @@ -580,14 +530,18 @@ async def test_sensor( ) -> None: """Test ZHA sensor platform.""" + await setup_zha() + gateway = get_zha_gateway(hass) + zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, } - } + }, ) cluster = zigpy_device.endpoints[1].in_clusters[cluster_id] if unsupported_attrs: @@ -600,26 +554,27 @@ async def test_sensor( # this one is mains powered zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 cluster.PLUGGED_ATTR_READS = read_plug - zha_device = await zha_device_joined_restored(zigpy_device) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) entity_id = ENTITY_ID_PREFIX.format(entity_suffix) - await async_enable_traffic(hass, [zha_device], enabled=False) - await hass.async_block_till_done() - # ensure the sensor entity was created - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + } + } + ) - # allow traffic to flow through the gateway and devices - await async_enable_traffic(hass, [zha_device]) - - # test that the sensor now have their correct initial state (mostly unknown) assert hass.states.get(entity_id).state == initial_sensor_state # test sensor associated logic await test_func(hass, cluster, entity_id) - # test rejoin - await async_test_rejoin(hass, zigpy_device, [cluster], (report_count,)) - def assert_state(hass: HomeAssistant, entity_id, state, unit_of_measurement): """Check that the state is what is expected. @@ -630,748 +585,3 @@ def assert_state(hass: HomeAssistant, entity_id, state, unit_of_measurement): hass_state = hass.states.get(entity_id) assert hass_state.state == state assert hass_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == unit_of_measurement - - -@pytest.fixture -def hass_ms(hass: HomeAssistant) -> Callable[[str], HomeAssistant]: - """Hass instance with measurement system.""" - - async def _hass_ms(meas_sys: str) -> HomeAssistant: - await config_util.async_process_ha_core_config( - hass, {CONF_UNIT_SYSTEM: meas_sys} - ) - await hass.async_block_till_done() - return hass - - return _hass_ms - - -@pytest.fixture -def core_rs(hass_storage: dict[str, Any]): - """Core.restore_state fixture.""" - - def _storage(entity_id, uom, state): - now = dt_util.utcnow().isoformat() - - hass_storage[restore_state.STORAGE_KEY] = { - "version": restore_state.STORAGE_VERSION, - "key": restore_state.STORAGE_KEY, - "data": [ - { - "state": { - "entity_id": entity_id, - "state": str(state), - "attributes": {ATTR_UNIT_OF_MEASUREMENT: uom}, - "last_changed": now, - "last_updated": now, - "context": { - "id": "3c2243ff5f30447eb12e7348cfd5b8ff", - "user_id": None, - }, - }, - "last_seen": now, - } - ], - } - - return _storage - - -@pytest.mark.parametrize( - ("uom", "raw_temp", "expected", "restore"), - [ - (UnitOfTemperature.CELSIUS, 2900, 29, False), - (UnitOfTemperature.CELSIUS, 2900, 29, True), - (UnitOfTemperature.FAHRENHEIT, 2900, 84, False), - (UnitOfTemperature.FAHRENHEIT, 2900, 84, True), - ], -) -async def test_temp_uom( - hass: HomeAssistant, - uom: UnitOfTemperature, - raw_temp: int, - expected: int, - restore: bool, - hass_ms: Callable[[str], HomeAssistant], - core_rs, - zigpy_device_mock, - zha_device_restored, -) -> None: - """Test ZHA temperature sensor unit of measurement.""" - - entity_id = "sensor.fake1026_fakemodel1026_004f3202_temperature" - if restore: - core_rs(entity_id, uom, state=(expected - 2)) - await async_mock_load_restore_state_from_storage(hass) - - hass = await hass_ms("metric" if uom == UnitOfTemperature.CELSIUS else "imperial") - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - measurement.TemperatureMeasurement.cluster_id, - general.Basic.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - ) - cluster = zigpy_device.endpoints[1].temperature - zha_device = await zha_device_restored(zigpy_device) - entity_id = find_entity_id(Platform.SENSOR, zha_device, hass) - - if not restore: - await async_enable_traffic(hass, [zha_device], enabled=False) - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and devices - await async_enable_traffic(hass, [zha_device]) - - # test that the sensors now have a state of unknown - if not restore: - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - await send_attribute_report(hass, cluster, 0, raw_temp) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state is not None - assert round(float(state.state)) == expected - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == uom - - -@patch( - "zigpy.zcl.ClusterPersistingListener", - MagicMock(), -) -async def test_electrical_measurement_init( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, -) -> None: - """Test proper initialization of the electrical measurement cluster.""" - - cluster_id = homeautomation.ElectricalMeasurement.cluster_id - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - ) - cluster = zigpy_device.endpoints[1].in_clusters[cluster_id] - zha_device = await zha_device_joined(zigpy_device) - entity_id = "sensor.fakemanufacturer_fakemodel_power" - - # allow traffic to flow through the gateway and devices - await async_enable_traffic(hass, [zha_device]) - - # test that the sensor now have a state of unknown - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - await send_attributes_report(hass, cluster, {0: 1, 1291: 100, 10: 1000}) - assert int(hass.states.get(entity_id).state) == 100 - - cluster_handler = zha_device._endpoints[1].all_cluster_handlers["1:0x0b04"] - assert cluster_handler.ac_power_divisor == 1 - assert cluster_handler.ac_power_multiplier == 1 - - # update power divisor - await send_attributes_report(hass, cluster, {0: 1, 1291: 20, 0x0403: 5, 10: 1000}) - assert cluster_handler.ac_power_divisor == 5 - assert cluster_handler.ac_power_multiplier == 1 - assert hass.states.get(entity_id).state == "4.0" - - await send_attributes_report(hass, cluster, {0: 1, 1291: 30, 0x0605: 10, 10: 1000}) - assert cluster_handler.ac_power_divisor == 10 - assert cluster_handler.ac_power_multiplier == 1 - assert hass.states.get(entity_id).state == "3.0" - - # update power multiplier - await send_attributes_report(hass, cluster, {0: 1, 1291: 20, 0x0402: 6, 10: 1000}) - assert cluster_handler.ac_power_divisor == 10 - assert cluster_handler.ac_power_multiplier == 6 - assert hass.states.get(entity_id).state == "12.0" - - await send_attributes_report(hass, cluster, {0: 1, 1291: 30, 0x0604: 20, 10: 1000}) - assert cluster_handler.ac_power_divisor == 10 - assert cluster_handler.ac_power_multiplier == 20 - assert hass.states.get(entity_id).state == "60.0" - - -@pytest.mark.parametrize( - ("cluster_id", "unsupported_attributes", "entity_ids", "missing_entity_ids"), - [ - ( - homeautomation.ElectricalMeasurement.cluster_id, - {"apparent_power", "rms_voltage", "rms_current"}, - { - "power", - "ac_frequency", - "power_factor", - }, - { - "apparent_power", - "voltage", - "current", - }, - ), - ( - homeautomation.ElectricalMeasurement.cluster_id, - {"apparent_power", "rms_current", "ac_frequency", "power_factor"}, - {"voltage", "power"}, - { - "apparent_power", - "current", - "ac_frequency", - "power_factor", - }, - ), - ( - homeautomation.ElectricalMeasurement.cluster_id, - set(), - { - "voltage", - "power", - "apparent_power", - "current", - "ac_frequency", - "power_factor", - }, - set(), - ), - ( - smartenergy.Metering.cluster_id, - { - "instantaneous_demand", - }, - { - "summation_delivered", - }, - { - "instantaneous_demand", - }, - ), - ( - smartenergy.Metering.cluster_id, - {"instantaneous_demand", "current_summ_delivered"}, - {}, - { - "instantaneous_demand", - "summation_delivered", - }, - ), - ( - smartenergy.Metering.cluster_id, - {}, - { - "instantaneous_demand", - "summation_delivered", - }, - {}, - ), - ], -) -async def test_unsupported_attributes_sensor( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined_restored, - cluster_id, - unsupported_attributes, - entity_ids, - missing_entity_ids, -) -> None: - """Test ZHA sensor platform.""" - - entity_ids = {ENTITY_ID_PREFIX.format(e) for e in entity_ids} - missing_entity_ids = {ENTITY_ID_PREFIX.format(e) for e in missing_entity_ids} - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - ) - cluster = zigpy_device.endpoints[1].in_clusters[cluster_id] - if cluster_id == smartenergy.Metering.cluster_id: - # this one is mains powered - zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 - for attr in unsupported_attributes: - cluster.add_unsupported_attribute(attr) - zha_device = await zha_device_joined_restored(zigpy_device) - - await async_enable_traffic(hass, [zha_device], enabled=False) - await hass.async_block_till_done() - present_entity_ids = set(find_entity_ids(Platform.SENSOR, zha_device, hass)) - assert present_entity_ids == entity_ids - assert missing_entity_ids not in present_entity_ids - - -@pytest.mark.parametrize( - ("raw_uom", "raw_value", "expected_state", "expected_uom"), - [ - ( - 1, - 12320, - "1.23", - UnitOfVolume.CUBIC_METERS, - ), - ( - 1, - 1232000, - "123.2", - UnitOfVolume.CUBIC_METERS, - ), - ( - 3, - 2340, - "0.65", - UnitOfVolume.CUBIC_METERS, - ), - ( - 3, - 2360, - "0.68", - UnitOfVolume.CUBIC_METERS, - ), - ( - 8, - 23660, - "2.37", - UnitOfPressure.KPA, - ), - ( - 0, - 9366, - "0.937", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 999, - "0.1", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 10091, - "1.009", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 10099, - "1.01", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 100999, - "10.1", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 100023, - "10.002", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 102456, - "10.246", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 5, - 102456, - "10.25", - "IMP gal", - ), - ( - 7, - 50124, - "5.01", - UnitOfVolume.LITERS, - ), - ], -) -async def test_se_summation_uom( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - raw_uom, - raw_value, - expected_state, - expected_uom, -) -> None: - """Test ZHA smart energy summation.""" - - entity_id = ENTITY_ID_PREFIX.format("summation_delivered") - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - smartenergy.Metering.cluster_id, - general.Basic.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SIMPLE_SENSOR, - } - } - ) - zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 - - cluster = zigpy_device.endpoints[1].in_clusters[smartenergy.Metering.cluster_id] - for attr in ("instanteneous_demand",): - cluster.add_unsupported_attribute(attr) - cluster.PLUGGED_ATTR_READS = { - "current_summ_delivered": raw_value, - "demand_formatting": 0xF9, - "divisor": 10000, - "metering_device_type": 0x00, - "multiplier": 1, - "status": 0x00, - "summation_formatting": 0b1_0111_010, - "unit_of_measure": raw_uom, - } - await zha_device_joined(zigpy_device) - - assert_state(hass, entity_id, expected_state, expected_uom) - - -@pytest.mark.parametrize( - ("raw_measurement_type", "expected_type"), - [ - (1, "ACTIVE_MEASUREMENT"), - (8, "PHASE_A_MEASUREMENT"), - (9, "ACTIVE_MEASUREMENT, PHASE_A_MEASUREMENT"), - ( - 15, - ( - "ACTIVE_MEASUREMENT, REACTIVE_MEASUREMENT, APPARENT_MEASUREMENT," - " PHASE_A_MEASUREMENT" - ), - ), - ], -) -async def test_elec_measurement_sensor_type( - hass: HomeAssistant, - elec_measurement_zigpy_dev, - raw_measurement_type, - expected_type, - zha_device_joined, -) -> None: - """Test ZHA electrical measurement sensor type.""" - - entity_id = ENTITY_ID_PREFIX.format("power") - zigpy_dev = elec_measurement_zigpy_dev - zigpy_dev.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS[ - "measurement_type" - ] = raw_measurement_type - - await zha_device_joined(zigpy_dev) - - state = hass.states.get(entity_id) - assert state is not None - assert state.attributes["measurement_type"] == expected_type - - -async def test_elec_measurement_sensor_polling( - hass: HomeAssistant, - elec_measurement_zigpy_dev, - zha_device_joined_restored, -) -> None: - """Test ZHA electrical measurement sensor polling.""" - - entity_id = ENTITY_ID_PREFIX.format("power") - zigpy_dev = elec_measurement_zigpy_dev - zigpy_dev.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS["active_power"] = ( - 20 - ) - - await zha_device_joined_restored(zigpy_dev) - - # test that the sensor has an initial state of 2.0 - state = hass.states.get(entity_id) - assert state.state == "2.0" - - # update the value for the power reading - zigpy_dev.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS["active_power"] = ( - 60 - ) - - # ensure the state is still 2.0 - state = hass.states.get(entity_id) - assert state.state == "2.0" - - # let the polling happen - future = dt_util.utcnow() + timedelta(seconds=90) - async_fire_time_changed(hass, future) - await hass.async_block_till_done(wait_background_tasks=True) - - # ensure the state has been updated to 6.0 - state = hass.states.get(entity_id) - assert state.state == "6.0" - - -@pytest.mark.parametrize( - "supported_attributes", - [ - set(), - { - "active_power", - "active_power_max", - "rms_current", - "rms_current_max", - "rms_voltage", - "rms_voltage_max", - }, - { - "active_power", - }, - { - "active_power", - "active_power_max", - }, - { - "rms_current", - "rms_current_max", - }, - { - "rms_voltage", - "rms_voltage_max", - }, - ], -) -async def test_elec_measurement_skip_unsupported_attribute( - hass: HomeAssistant, - elec_measurement_zha_dev, - supported_attributes, -) -> None: - """Test ZHA electrical measurement skipping update of unsupported attributes.""" - - entity_id = ENTITY_ID_PREFIX.format("power") - zha_dev = elec_measurement_zha_dev - - cluster = zha_dev.device.endpoints[1].electrical_measurement - - all_attrs = { - "active_power", - "active_power_max", - "apparent_power", - "rms_current", - "rms_current_max", - "rms_voltage", - "rms_voltage_max", - "power_factor", - "ac_frequency", - "ac_frequency_max", - } - for attr in all_attrs - supported_attributes: - cluster.add_unsupported_attribute(attr) - cluster.read_attributes.reset_mock() - - await async_update_entity(hass, entity_id) - await hass.async_block_till_done() - assert cluster.read_attributes.call_count == math.ceil( - len(supported_attributes) / ZHA_CLUSTER_HANDLER_READS_PER_REQ - ) - read_attrs = { - a for call in cluster.read_attributes.call_args_list for a in call[0][0] - } - assert read_attrs == supported_attributes - - -class OppleCluster(CustomCluster, ManufacturerSpecificCluster): - """Aqara manufacturer specific cluster.""" - - cluster_id = 0xFCC0 - ep_attribute = "opple_cluster" - attributes = { - 0x010C: ("last_feeding_size", t.uint16_t, True), - } - - def __init__(self, *args, **kwargs) -> None: - """Initialize.""" - super().__init__(*args, **kwargs) - # populate cache to create config entity - self._attr_cache.update({0x010C: 10}) - - -( - add_to_registry_v2("Fake_Manufacturer_sensor", "Fake_Model_sensor") - .replaces(OppleCluster) - .sensor( - "last_feeding_size", - OppleCluster.cluster_id, - divisor=1, - multiplier=1, - unit=UnitOfMass.GRAMS, - ) -) - - -@pytest.fixture -async def zigpy_device_aqara_sensor_v2( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Device tracker zigpy Aqara motion sensor device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - OppleCluster.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.OCCUPANCY_SENSOR, - } - }, - manufacturer="Fake_Manufacturer_sensor", - model="Fake_Model_sensor", - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].opple_cluster - - -async def test_last_feeding_size_sensor_v2( - hass: HomeAssistant, zigpy_device_aqara_sensor_v2 -) -> None: - """Test quirks defined sensor.""" - - zha_device, cluster = zigpy_device_aqara_sensor_v2 - assert isinstance(zha_device.device, CustomDeviceV2) - entity_id = find_entity_id( - Platform.SENSOR, zha_device, hass, qualifier="last_feeding_size" - ) - assert entity_id is not None - - await send_attributes_report(hass, cluster, {0x010C: 1}) - assert_state(hass, entity_id, "1.0", UnitOfMass.GRAMS.value) - - await send_attributes_report(hass, cluster, {0x010C: 5}) - assert_state(hass, entity_id, "5.0", UnitOfMass.GRAMS.value) - - -@pytest.fixture -async def coordinator(hass: HomeAssistant, zigpy_device_mock, zha_device_joined): - """Test ZHA fan platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Groups.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.CONTROL_BRIDGE, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - }, - ieee="00:15:8d:00:02:32:4f:32", - nwk=0x0000, - node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -async def test_device_counter_sensors( - hass: HomeAssistant, - coordinator: ZHADevice, - entity_registry: er.EntityRegistry, - config_entry: MockConfigEntry, -) -> None: - """Test quirks defined sensor.""" - - entity_id = "sensor.coordinator_manufacturer_coordinator_model_counter_1" - state = hass.states.get(entity_id) - assert state is None - - # Enable the entity. - entity_registry.async_update_entity(entity_id, disabled_by=None) - await hass.config_entries.async_reload(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "1" - - # simulate counter increment on application - coordinator.device.application.state.counters["ezsp_counters"][ - "counter_1" - ].increment() - - next_update = dt_util.utcnow() + timedelta(seconds=60) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "2" - - -@pytest.fixture -async def zigpy_device_danfoss_thermostat( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Device tracker zigpy danfoss thermostat device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.PowerConfiguration.cluster_id, - general.Identify.cluster_id, - general.Time.cluster_id, - general.PollControl.cluster_id, - Thermostat.cluster_id, - hvac.UserInterface.cluster_id, - homeautomation.Diagnostic.cluster_id, - ], - SIG_EP_OUTPUT: [general.Basic.cluster_id, general.Ota.cluster_id], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, - } - }, - manufacturer="Danfoss", - model="eTRV0100", - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device - - -async def test_danfoss_thermostat_sw_error( - hass: HomeAssistant, zigpy_device_danfoss_thermostat -) -> None: - """Test quirks defined thermostat.""" - - zha_device, zigpy_device = zigpy_device_danfoss_thermostat - - entity_id = find_entity_id( - Platform.SENSOR, zha_device, hass, qualifier="software_error" - ) - assert entity_id is not None - - cluster = zigpy_device.endpoints[1].diagnostic - - await send_attributes_report( - hass, - cluster, - { - danfoss_thermostat.DanfossDiagnosticCluster.AttributeDefs.sw_error_code.id: 0x0001 - }, - ) - - hass_state = hass.states.get(entity_id) - assert hass_state.state == "something" - assert hass_state.attributes["Top_pcb_sensor_error"] diff --git a/tests/components/zha/test_silabs_multiprotocol.py b/tests/components/zha/test_silabs_multiprotocol.py index 03c845269e0..a5f2db22ce5 100644 --- a/tests/components/zha/test_silabs_multiprotocol.py +++ b/tests/components/zha/test_silabs_multiprotocol.py @@ -11,7 +11,7 @@ import zigpy.state from homeassistant.components import zha from homeassistant.components.zha import silabs_multiprotocol -from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.components.zha.helpers import get_zha_data from homeassistant.core import HomeAssistant if TYPE_CHECKING: @@ -38,8 +38,7 @@ async def test_async_get_channel_missing( """Test reading channel with an inactive ZHA installation, no valid channel.""" await setup_zha() - gateway = get_zha_gateway(hass) - await zha.async_unload_entry(hass, gateway.config_entry) + await zha.async_unload_entry(hass, get_zha_data(hass).config_entry) # Network settings were never loaded for whatever reason zigpy_app_controller.state.network_info = zigpy.state.NetworkInfo() diff --git a/tests/components/zha/test_siren.py b/tests/components/zha/test_siren.py index 652955ef98d..f9837a7d016 100644 --- a/tests/components/zha/test_siren.py +++ b/tests/components/zha/test_siren.py @@ -4,7 +4,11 @@ from datetime import timedelta from unittest.mock import ANY, call, patch import pytest -from zigpy.const import SIG_EP_PROFILE +from zha.application.const import ( + WARNING_DEVICE_MODE_EMERGENCY_PANIC, + WARNING_DEVICE_SOUND_MEDIUM, +) +from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from zigpy.profiles import zha import zigpy.zcl from zigpy.zcl.clusters import general, security @@ -16,16 +20,17 @@ from homeassistant.components.siren import ( ATTR_VOLUME_LEVEL, DOMAIN as SIREN_DOMAIN, ) -from homeassistant.components.zha.core.const import ( - WARNING_DEVICE_MODE_EMERGENCY_PANIC, - WARNING_DEVICE_SOUND_MEDIUM, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from .common import async_enable_traffic, find_entity_id -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .common import find_entity_id from tests.common import async_fire_time_changed @@ -46,9 +51,12 @@ def siren_platform_only(): yield -@pytest.fixture -async def siren(hass, zigpy_device_mock, zha_device_joined_restored): - """Siren fixture.""" +async def test_siren(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: + """Test zha siren platform.""" + + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) zigpy_device = zigpy_device_mock( { @@ -58,30 +66,18 @@ async def siren(hass, zigpy_device_mock, zha_device_joined_restored): SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, SIG_EP_PROFILE: zha.PROFILE_ID, } - }, + } ) - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].ias_wd + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - -async def test_siren(hass: HomeAssistant, siren) -> None: - """Test zha siren platform.""" - - zha_device, cluster = siren - assert cluster is not None - entity_id = find_entity_id(Platform.SIREN, zha_device, hass) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.SIREN, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].ias_wd assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA diff --git a/tests/components/zha/test_switch.py b/tests/components/zha/test_switch.py index c8c2842c400..cc4e41485f9 100644 --- a/tests/components/zha/test_switch.py +++ b/tests/components/zha/test_switch.py @@ -1,51 +1,28 @@ """Test ZHA switch.""" -from unittest.mock import AsyncMock, call, patch +from unittest.mock import call, patch import pytest -from zhaquirks.const import ( - DEVICE_TYPE, - ENDPOINTS, - INPUT_CLUSTERS, - OUTPUT_CLUSTERS, - PROFILE_ID, -) -from zigpy.exceptions import ZigbeeException from zigpy.profiles import zha -from zigpy.quirks import _DEVICE_REGISTRY, CustomCluster, CustomDevice -from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2 -import zigpy.types as t -from zigpy.zcl.clusters import closures, general -from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster +from zigpy.zcl.clusters import general import zigpy.zcl.foundation as zcl_f from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.components.zha.core.group import GroupMember -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_component import async_update_entity from homeassistant.setup import async_setup_component -from .common import ( - async_enable_traffic, - async_find_group_entity_id, - async_test_rejoin, - async_wait_for_updates, - find_entity_id, - send_attributes_report, - update_attribute_cache, -) +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import MockConfigEntry - ON = 1 OFF = 0 -IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" -IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" @pytest.fixture(autouse=True) @@ -63,104 +40,51 @@ def switch_platform_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -def zigpy_cover_device(zigpy_device_mock): - """Zigpy cover device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.WINDOW_COVERING_DEVICE, - SIG_EP_INPUT: [ - general.Basic.cluster_id, - closures.WindowCovering.cluster_id, - ], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -async def device_switch_1(hass, zigpy_device_mock, zha_device_joined): +async def test_switch(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: """Test ZHA switch platform.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock( { 1: { - SIG_EP_INPUT: [general.OnOff.cluster_id, general.Groups.cluster_id], + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.OnOff.cluster_id, + general.Groups.cluster_id, + ], SIG_EP_OUTPUT: [], SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, } }, - ieee=IEEE_GROUPABLE_DEVICE, + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zha_device + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) -@pytest.fixture -async def device_switch_2(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA switch platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.OnOff.cluster_id, general.Groups.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - ieee=IEEE_GROUPABLE_DEVICE2, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zha_device - - -async def test_switch( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device -) -> None: - """Test ZHA switch platform.""" - - zha_device = await zha_device_joined_restored(zigpy_device) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.SWITCH, zha_device_proxy, hass) cluster = zigpy_device.endpoints[1].on_off - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on at switch - await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2}) + await send_attributes_report( + hass, cluster, {general.OnOff.AttributeDefs.on_off.id: ON} + ) assert hass.states.get(entity_id).state == STATE_ON # turn off at switch - await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) + await send_attributes_report( + hass, cluster, {general.OnOff.AttributeDefs.on_off.id: OFF} + ) assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA @@ -217,765 +141,3 @@ async def test_switch( assert cluster.read_attributes.call_args == call( ["on_off"], allow_cache=False, only_cache=False, manufacturer=None ) - - # test joining a new switch to the network and HA - await async_test_rejoin(hass, zigpy_device, [cluster], (1,)) - - -class WindowDetectionFunctionQuirk(CustomDevice): - """Quirk with window detection function attribute.""" - - class TuyaManufCluster(CustomCluster, ManufacturerSpecificCluster): - """Tuya manufacturer specific cluster.""" - - cluster_id = 0xEF00 - ep_attribute = "tuya_manufacturer" - - attributes = { - 0xEF01: ("window_detection_function", t.Bool), - 0xEF02: ("window_detection_function_inverter", t.Bool), - } - - def __init__(self, *args, **kwargs): - """Initialize with task.""" - super().__init__(*args, **kwargs) - self._attr_cache.update( - {0xEF01: False} - ) # entity won't be created without this - - replacement = { - ENDPOINTS: { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH, - INPUT_CLUSTERS: [general.Basic.cluster_id, TuyaManufCluster], - OUTPUT_CLUSTERS: [], - }, - } - } - - -@pytest.fixture -async def zigpy_device_tuya(hass, zigpy_device_mock, zha_device_joined): - """Device tracker zigpy tuya device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - manufacturer="_TZE200_b6wax7g0", - quirk=WindowDetectionFunctionQuirk, - ) - - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zigpy_device - - -@patch( - "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_zha_group_switch_entity( - hass: HomeAssistant, - device_switch_1, - device_switch_2, - entity_registry: er.EntityRegistry, - config_entry: MockConfigEntry, -) -> None: - """Test the switch entity for a ZHA group.""" - - # make sure we can still get groups when counter entities exist - entity_id = "sensor.coordinator_manufacturer_coordinator_model_counter_1" - state = hass.states.get(entity_id) - assert state is None - - # Enable the entity. - entity_registry.async_update_entity(entity_id, disabled_by=None) - await hass.config_entries.async_reload(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "1" - - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - device_switch_1._zha_gateway = zha_gateway - device_switch_2._zha_gateway = zha_gateway - member_ieee_addresses = [ - device_switch_1.ieee, - device_switch_2.ieee, - zha_gateway.coordinator_zha_device.ieee, - ] - members = [ - GroupMember(device_switch_1.ieee, 1), - GroupMember(device_switch_2.ieee, 1), - GroupMember(zha_gateway.coordinator_zha_device.ieee, 1), - ] - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 3 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - entity_id = async_find_group_entity_id(hass, Platform.SWITCH, zha_group) - assert hass.states.get(entity_id) is not None - - group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id] - dev1_cluster_on_off = device_switch_1.device.endpoints[1].on_off - dev2_cluster_on_off = device_switch_2.device.endpoints[1].on_off - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_switch_1, device_switch_2]) - await async_wait_for_updates(hass) - - # test that the switches were created and are off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x00, zcl_f.Status.SUCCESS], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert len(group_cluster_on_off.request.mock_calls) == 1 - assert group_cluster_on_off.request.call_args == call( - False, - ON, - group_cluster_on_off.commands_by_name["on"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert hass.states.get(entity_id).state == STATE_ON - - # test turn off failure case - hold_off = group_cluster_on_off.off - group_cluster_on_off.off = AsyncMock(return_value=[0x01, zcl_f.Status.FAILURE]) - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert len(group_cluster_on_off.off.mock_calls) == 1 - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_ON - group_cluster_on_off.off = hold_off - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x01, zcl_f.Status.SUCCESS], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert len(group_cluster_on_off.request.mock_calls) == 1 - assert group_cluster_on_off.request.call_args == call( - False, - OFF, - group_cluster_on_off.commands_by_name["off"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert hass.states.get(entity_id).state == STATE_OFF - - # test turn on failure case - hold_on = group_cluster_on_off.on - group_cluster_on_off.on = AsyncMock(return_value=[0x01, zcl_f.Status.FAILURE]) - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert len(group_cluster_on_off.on.mock_calls) == 1 - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - group_cluster_on_off.on = hold_on - - # test some of the group logic to make sure we key off states correctly - await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) - await send_attributes_report(hass, dev2_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - - # test that group switch is on - assert hass.states.get(entity_id).state == STATE_ON - - await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - - # test that group switch is still on - assert hass.states.get(entity_id).state == STATE_ON - - await send_attributes_report(hass, dev2_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - - # test that group switch is now off - assert hass.states.get(entity_id).state == STATE_OFF - - await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - - # test that group switch is now back on - assert hass.states.get(entity_id).state == STATE_ON - - -async def test_switch_configurable( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_tuya -) -> None: - """Test ZHA configurable switch platform.""" - - zha_device = await zha_device_joined_restored(zigpy_device_tuya) - cluster = zigpy_device_tuya.endpoints[1].tuya_manufacturer - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on at switch - await send_attributes_report(hass, cluster, {"window_detection_function": True}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn off at switch - await send_attributes_report(hass, cluster, {"window_detection_function": False}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": True}, manufacturer=None) - ] - - cluster.write_attributes.reset_mock() - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": False}, manufacturer=None) - ] - - cluster.read_attributes.reset_mock() - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - # the mocking doesn't update the attr cache so this flips back to initial value - assert cluster.read_attributes.call_count == 2 - assert [ - call( - [ - "window_detection_function", - ], - allow_cache=False, - only_cache=False, - manufacturer=None, - ), - call( - [ - "window_detection_function_inverter", - ], - allow_cache=False, - only_cache=False, - manufacturer=None, - ), - ] == cluster.read_attributes.call_args_list - - cluster.write_attributes.reset_mock() - cluster.write_attributes.side_effect = ZigbeeException - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": False}, manufacturer=None), - call({"window_detection_function": False}, manufacturer=None), - call({"window_detection_function": False}, manufacturer=None), - ] - - cluster.write_attributes.side_effect = None - - # test inverter - cluster.write_attributes.reset_mock() - cluster._attr_cache.update({0xEF02: True}) - - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": True}, manufacturer=None) - ] - - cluster.write_attributes.reset_mock() - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": False}, manufacturer=None) - ] - - # test joining a new switch to the network and HA - await async_test_rejoin(hass, zigpy_device_tuya, [cluster], (0,)) - - -async def test_switch_configurable_custom_on_off_values( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_mock -) -> None: - """Test ZHA configurable switch platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - manufacturer="manufacturer", - model="model", - ) - - ( - add_to_registry_v2(zigpy_device.manufacturer, zigpy_device.model) - .adds(WindowDetectionFunctionQuirk.TuyaManufCluster) - .switch( - "window_detection_function", - WindowDetectionFunctionQuirk.TuyaManufCluster.cluster_id, - on_value=3, - off_value=5, - ) - ) - - zigpy_device = _DEVICE_REGISTRY.get_device(zigpy_device) - - assert isinstance(zigpy_device, CustomDeviceV2) - cluster = zigpy_device.endpoints[1].tuya_manufacturer - cluster.PLUGGED_ATTR_READS = {"window_detection_function": 5} - update_attribute_cache(cluster) - - zha_device = await zha_device_joined_restored(zigpy_device) - - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 3}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn off at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 5}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 3}, manufacturer=None) - ] - cluster.write_attributes.reset_mock() - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 5}, manufacturer=None) - ] - - -async def test_switch_configurable_custom_on_off_values_force_inverted( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_mock -) -> None: - """Test ZHA configurable switch platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - manufacturer="manufacturer2", - model="model2", - ) - - ( - add_to_registry_v2(zigpy_device.manufacturer, zigpy_device.model) - .adds(WindowDetectionFunctionQuirk.TuyaManufCluster) - .switch( - "window_detection_function", - WindowDetectionFunctionQuirk.TuyaManufCluster.cluster_id, - on_value=3, - off_value=5, - force_inverted=True, - ) - ) - - zigpy_device = _DEVICE_REGISTRY.get_device(zigpy_device) - - assert isinstance(zigpy_device, CustomDeviceV2) - cluster = zigpy_device.endpoints[1].tuya_manufacturer - cluster.PLUGGED_ATTR_READS = {"window_detection_function": 5} - update_attribute_cache(cluster) - - zha_device = await zha_device_joined_restored(zigpy_device) - - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_ON - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_ON - - # turn on at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 3}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn off at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 5}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 5}, manufacturer=None) - ] - cluster.write_attributes.reset_mock() - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 3}, manufacturer=None) - ] - - -async def test_switch_configurable_custom_on_off_values_inverter_attribute( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_mock -) -> None: - """Test ZHA configurable switch platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - manufacturer="manufacturer3", - model="model3", - ) - - ( - add_to_registry_v2(zigpy_device.manufacturer, zigpy_device.model) - .adds(WindowDetectionFunctionQuirk.TuyaManufCluster) - .switch( - "window_detection_function", - WindowDetectionFunctionQuirk.TuyaManufCluster.cluster_id, - on_value=3, - off_value=5, - invert_attribute_name="window_detection_function_inverter", - ) - ) - - zigpy_device = _DEVICE_REGISTRY.get_device(zigpy_device) - - assert isinstance(zigpy_device, CustomDeviceV2) - cluster = zigpy_device.endpoints[1].tuya_manufacturer - cluster.PLUGGED_ATTR_READS = { - "window_detection_function": 5, - "window_detection_function_inverter": t.Bool(True), - } - update_attribute_cache(cluster) - - zha_device = await zha_device_joined_restored(zigpy_device) - - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_ON - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_ON - - # turn on at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 3}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn off at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 5}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 5}, manufacturer=None) - ] - cluster.write_attributes.reset_mock() - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 3}, manufacturer=None) - ] - - -WCAttrs = closures.WindowCovering.AttributeDefs -WCT = closures.WindowCovering.WindowCoveringType -WCCS = closures.WindowCovering.ConfigStatus -WCM = closures.WindowCovering.WindowCoveringMode - - -async def test_cover_inversion_switch( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device -) -> None: - """Test ZHA cover platform.""" - - # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering - cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 65, - WCAttrs.current_position_tilt_percentage.name: 42, - WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, - WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), - WCAttrs.window_covering_mode.name: WCM(WCM.LEDs_display_feedback), - } - update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) - assert ( - not zha_device.endpoints[1] - .all_cluster_handlers[f"1:0x{cluster.cluster_id:04x}"] - .inverted - ) - assert cluster.read_attributes.call_count == 3 - assert ( - WCAttrs.current_position_lift_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - assert ( - WCAttrs.current_position_tilt_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test update - prev_call_count = cluster.read_attributes.call_count - await async_update_entity(hass, entity_id) - assert cluster.read_attributes.call_count == prev_call_count + 1 - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - # test to see the state remains after tilting to 0% - await send_attributes_report( - hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} - ) - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - with patch( - "zigpy.zcl.Cluster.write_attributes", return_value=[0x1, zcl_f.Status.SUCCESS] - ): - cluster.PLUGGED_ATTR_READS = { - WCAttrs.config_status.name: WCCS.Operational - | WCCS.Open_up_commands_reversed, - } - # turn on from UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.call_count == 1 - assert cluster.write_attributes.call_args_list[0] == call( - { - WCAttrs.window_covering_mode.name: WCM.Motor_direction_reversed - | WCM.LEDs_display_feedback - }, - manufacturer=None, - ) - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_ON - - cluster.write_attributes.reset_mock() - - # turn off from UI - cluster.PLUGGED_ATTR_READS = { - WCAttrs.config_status.name: WCCS.Operational, - } - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.call_count == 1 - assert cluster.write_attributes.call_args_list[0] == call( - {WCAttrs.window_covering_mode.name: WCM.LEDs_display_feedback}, - manufacturer=None, - ) - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - cluster.write_attributes.reset_mock() - - # test that sending the command again does not result in a write - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.call_count == 0 - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - -async def test_cover_inversion_switch_not_created( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device -) -> None: - """Test ZHA cover platform.""" - - # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering - cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 65, - WCAttrs.current_position_tilt_percentage.name: 42, - WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), - } - update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) - - assert cluster.read_attributes.call_count == 3 - assert ( - WCAttrs.current_position_lift_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - assert ( - WCAttrs.current_position_tilt_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - - # entity should not be created when mode or config status aren't present - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is None diff --git a/tests/components/zha/test_update.py b/tests/components/zha/test_update.py index 32be013e673..6a1a19b407f 100644 --- a/tests/components/zha/test_update.py +++ b/tests/components/zha/test_update.py @@ -23,13 +23,25 @@ from homeassistant.components.update import ( DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from .common import async_enable_traffic, find_entity_id, update_attribute_cache -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .common import find_entity_id, update_attribute_cache +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @pytest.fixture(autouse=True) @@ -47,28 +59,32 @@ def update_platform_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00" - ) - - async def setup_test_data( - zha_device_joined_restored, - zigpy_device, + hass: HomeAssistant, + zigpy_device_mock, skip_attribute_plugs=False, file_not_found=False, ): """Set up test data for the tests.""" + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], + SIG_EP_OUTPUT: [general.Ota.cluster_id], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + fw_version = 0x12345678 installed_fw_version = fw_version - 10 cluster = zigpy_device.endpoints[1].out_clusters[general.Ota.cluster_id] @@ -106,31 +122,28 @@ async def setup_test_data( cluster.endpoint.device.application.ota.get_ota_image = AsyncMock( return_value=None if file_not_found else fw_image ) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + zha_device_proxy.device.async_update_sw_build_id(installed_fw_version) - zha_device = await zha_device_joined_restored(zigpy_device) - zha_device.async_update_sw_build_id(installed_fw_version) - - return zha_device, cluster, fw_image, installed_fw_version + return zha_device_proxy, cluster, fw_image, installed_fw_version async def test_firmware_update_notification_from_zigpy( hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update notification.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, - zigpy_device, + hass, + zigpy_device_mock, ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification await cluster._handle_query_next_image( @@ -139,7 +152,7 @@ async def test_firmware_update_notification_from_zigpy( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.manufacturer_code, + zha_device.device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -158,20 +171,20 @@ async def test_firmware_update_notification_from_zigpy( async def test_firmware_update_notification_from_service_call( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update manual check.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, zigpy_device + hass, + zigpy_device_mock, ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN async def _async_image_notify_side_effect(*args, **kwargs): await cluster._handle_query_next_image( @@ -180,7 +193,7 @@ async def test_firmware_update_notification_from_service_call( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.manufacturer_code, + zha_device.device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -245,11 +258,14 @@ def make_packet(zigpy_device, cluster, cmd_name: str, **kwargs): @patch("zigpy.device.AFTER_OTA_ATTR_READ_DELAY", 0.01) async def test_firmware_update_success( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update success.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, zigpy_device + hass, zigpy_device_mock ) assert installed_fw_version < fw_image.firmware.header.file_version @@ -257,10 +273,7 @@ async def test_firmware_update_success( entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification await cluster._handle_query_next_image( @@ -269,7 +282,7 @@ async def test_firmware_update_success( ), general.QueryNextImageCommand( field_control=fw_image.firmware.header.field_control, - manufacturer_code=zha_device.manufacturer_code, + manufacturer_code=zha_device.device.manufacturer_code, image_type=fw_image.firmware.header.image_type, current_file_version=installed_fw_version, ), @@ -289,9 +302,9 @@ async def test_firmware_update_success( if cluster_id == general.Ota.cluster_id: hdr, cmd = cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, @@ -309,9 +322,9 @@ async def test_firmware_update_success( assert cmd.image_type == fw_image.firmware.header.image_type assert cmd.file_version == fw_image.firmware.header.file_version assert cmd.image_size == fw_image.firmware.header.image_size - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, @@ -320,7 +333,7 @@ async def test_firmware_update_success( file_version=fw_image.firmware.header.file_version, file_offset=0, maximum_data_size=40, - request_node_addr=zigpy_device.ieee, + request_node_addr=zha_device.device.device.ieee, ) ) elif isinstance( @@ -336,9 +349,9 @@ async def test_firmware_update_success( assert cmd.file_version == fw_image.firmware.header.file_version assert cmd.file_offset == 0 assert cmd.image_data == fw_image.firmware.serialize()[0:40] - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, @@ -347,7 +360,7 @@ async def test_firmware_update_success( file_version=fw_image.firmware.header.file_version, file_offset=40, maximum_data_size=40, - request_node_addr=zigpy_device.ieee, + request_node_addr=zha_device.device.device.ieee, ) ) elif cmd.file_offset == 40: @@ -374,9 +387,9 @@ async def test_firmware_update_success( == f"0x{fw_image.firmware.header.file_version:08x}" ) - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.upgrade_end.name, status=foundation.Status.SUCCESS, @@ -430,7 +443,7 @@ async def test_firmware_update_success( # If we send a progress notification incorrectly, it won't be handled entity = hass.data[UPDATE_DOMAIN].get_entity(entity_id) - entity._update_progress(50, 100, 0.50) + entity.entity_data.entity._update_progress(50, 100, 0.50) state = hass.states.get(entity_id) assert not attrs[ATTR_IN_PROGRESS] @@ -438,20 +451,20 @@ async def test_firmware_update_success( async def test_firmware_update_raises( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update raises.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, zigpy_device + hass, zigpy_device_mock ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification await cluster._handle_query_next_image( @@ -460,7 +473,7 @@ async def test_firmware_update_raises( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.manufacturer_code, + zha_device.device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -481,9 +494,9 @@ async def test_firmware_update_raises( if cluster_id == general.Ota.cluster_id: hdr, cmd = cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, @@ -532,20 +545,20 @@ async def test_firmware_update_raises( async def test_firmware_update_no_longer_compatible( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update is no longer valid.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, zigpy_device + hass, zigpy_device_mock ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification await cluster._handle_query_next_image( @@ -554,7 +567,7 @@ async def test_firmware_update_no_longer_compatible( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.manufacturer_code, + zha_device.device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -577,9 +590,9 @@ async def test_firmware_update_no_longer_compatible( if cluster_id == general.Ota.cluster_id: hdr, cmd = cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, diff --git a/tests/components/zha/test_websocket_api.py b/tests/components/zha/test_websocket_api.py index 80b9f6accd0..f6afee9eb83 100644 --- a/tests/components/zha/test_websocket_api.py +++ b/tests/components/zha/test_websocket_api.py @@ -10,12 +10,27 @@ from unittest.mock import ANY, AsyncMock, MagicMock, call, patch from freezegun import freeze_time import pytest import voluptuous as vol +from zha.application.const import ( + ATTR_CLUSTER_ID, + ATTR_CLUSTER_TYPE, + ATTR_ENDPOINT_ID, + ATTR_ENDPOINT_NAMES, + ATTR_IEEE, + ATTR_MANUFACTURER, + ATTR_NEIGHBORS, + ATTR_QUIRK_APPLIED, + ATTR_TYPE, + CLUSTER_TYPE_IN, +) +from zha.zigbee.cluster_handlers import ClusterBindEvent, ClusterConfigureReportingEvent +from zha.zigbee.device import ClusterHandlerConfigurationComplete import zigpy.backups +from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE import zigpy.profiles.zha import zigpy.types from zigpy.types.named import EUI64 import zigpy.util -from zigpy.zcl.clusters import general, security +from zigpy.zcl.clusters import closures, general, security from zigpy.zcl.clusters.general import Groups import zigpy.zdo.types as zdo_types @@ -25,23 +40,12 @@ from homeassistant.components.websocket_api import ( TYPE_RESULT, ) from homeassistant.components.zha import DOMAIN -from homeassistant.components.zha.core.const import ( - ATTR_CLUSTER_ID, - ATTR_CLUSTER_TYPE, - ATTR_ENDPOINT_ID, - ATTR_ENDPOINT_NAMES, - ATTR_IEEE, - ATTR_MANUFACTURER, - ATTR_MODEL, - ATTR_NEIGHBORS, - ATTR_QUIRK_APPLIED, - ATTR_TYPE, - BINDINGS, - CLUSTER_TYPE_IN, - EZSP_OVERWRITE_EUI64, - GROUP_ID, - GROUP_IDS, - GROUP_NAME, +from homeassistant.components.zha.const import EZSP_OVERWRITE_EUI64 +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) from homeassistant.components.zha.websocket_api import ( ATTR_DURATION, @@ -49,22 +53,19 @@ from homeassistant.components.zha.websocket_api import ( ATTR_QR_CODE, ATTR_SOURCE_IEEE, ATTR_TARGET_IEEE, + BINDINGS, + GROUP_ID, + GROUP_IDS, + GROUP_NAME, ID, SERVICE_PERMIT, TYPE, async_load_api, ) -from homeassistant.const import ATTR_NAME, Platform +from homeassistant.const import ATTR_MODEL, ATTR_NAME, Platform from homeassistant.core import Context, HomeAssistant -from .conftest import ( - FIXTURE_GRP_ID, - FIXTURE_GRP_NAME, - SIG_EP_INPUT, - SIG_EP_OUTPUT, - SIG_EP_PROFILE, - SIG_EP_TYPE, -) +from .conftest import FIXTURE_GRP_ID, FIXTURE_GRP_NAME from .data import BASE_CUSTOM_CONFIGURATION, CONFIG_WITH_ALARM_OPTIONS from tests.common import MockConfigEntry, MockUser @@ -93,10 +94,18 @@ def required_platform_only(): @pytest.fixture -async def device_switch(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA switch platform.""" +async def zha_client( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup_zha, + zigpy_device_mock, +) -> MockHAClientWebSocket: + """Get ZHA WebSocket client.""" - zigpy_device = zigpy_device_mock( + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device_switch = zigpy_device_mock( { 1: { SIG_EP_INPUT: [general.OnOff.cluster_id, general.Basic.cluster_id], @@ -107,35 +116,8 @@ async def device_switch(hass, zigpy_device_mock, zha_device_joined): }, ieee=IEEE_SWITCH_DEVICE, ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - -@pytest.fixture -async def device_ias_ace(hass, zigpy_device_mock, zha_device_joined): - """Test alarm control panel device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - }, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_groupable(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( + zigpy_device_groupable = zigpy_device_mock( { 1: { SIG_EP_INPUT: [ @@ -150,19 +132,14 @@ async def device_groupable(hass, zigpy_device_mock, zha_device_joined): }, ieee=IEEE_GROUPABLE_DEVICE, ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device + gateway.get_or_create_device(zigpy_device_switch) + await gateway.async_device_initialized(zigpy_device_switch) + await hass.async_block_till_done(wait_background_tasks=True) -@pytest.fixture -async def zha_client( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - device_switch, - device_groupable, -) -> MockHAClientWebSocket: - """Get ZHA WebSocket client.""" + gateway.get_or_create_device(zigpy_device_groupable) + await gateway.async_device_initialized(zigpy_device_groupable) + await hass.async_block_till_done(wait_background_tasks=True) # load the ZHA API async_load_api(hass) @@ -247,7 +224,7 @@ async def test_list_devices(zha_client) -> None: msg = await zha_client.receive_json() devices = msg["result"] - assert len(devices) == 2 + 1 # the coordinator is included as well + assert len(devices) == 3 # the coordinator is included as well msg_id = 100 for device in devices: @@ -284,9 +261,31 @@ async def test_get_zha_config(zha_client) -> None: async def test_get_zha_config_with_alarm( - hass: HomeAssistant, zha_client, device_ias_ace + hass: HomeAssistant, zha_client, zigpy_device_mock ) -> None: """Test getting ZHA custom configuration.""" + + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device_ias = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + } + }, + ) + + gateway.get_or_create_device(zigpy_device_ias) + await gateway.async_device_initialized(zigpy_device_ias) + await hass.async_block_till_done(wait_background_tasks=True) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy( + zigpy_device_ias.ieee + ) + await zha_client.send_json({ID: 5, TYPE: "zha/configuration"}) msg = await zha_client.receive_json() @@ -295,7 +294,7 @@ async def test_get_zha_config_with_alarm( assert configuration == CONFIG_WITH_ALARM_OPTIONS # test that the alarm options are not in the config when we remove the device - device_ias_ace.gateway.device_removed(device_ias_ace.device) + zha_device_proxy.gateway_proxy.gateway.device_removed(zha_device_proxy.device) await hass.async_block_till_done() await zha_client.send_json({ID: 6, TYPE: "zha/configuration"}) @@ -390,11 +389,12 @@ async def test_get_group_not_found(zha_client) -> None: async def test_list_groupable_devices( - zha_client, device_groupable, zigpy_app_controller + hass: HomeAssistant, zha_client, zigpy_app_controller ) -> None: """Test getting ZHA devices that have a group cluster.""" # Ensure the coordinator doesn't have a group cluster coordinator = zigpy_app_controller.get_device(nwk=0x0000) + del coordinator.endpoints[1].in_clusters[Groups.cluster_id] await zha_client.send_json({ID: 10, TYPE: "zha/devices/groupable"}) @@ -425,7 +425,10 @@ async def test_list_groupable_devices( # Make sure there are no groupable devices when the device is unavailable # Make device unavailable - device_groupable.available = False + get_zha_gateway_proxy(hass).device_proxies[ + EUI64.convert(IEEE_GROUPABLE_DEVICE) + ].device.available = False + await hass.async_block_till_done(wait_background_tasks=True) await zha_client.send_json({ID: 11, TYPE: "zha/devices/groupable"}) @@ -437,9 +440,16 @@ async def test_list_groupable_devices( assert len(device_endpoints) == 0 -async def test_add_group(zha_client) -> None: +async def test_add_group(hass: HomeAssistant, zha_client) -> None: """Test adding and getting a new ZHA zigbee group.""" - await zha_client.send_json({ID: 12, TYPE: "zha/group/add", GROUP_NAME: "new_group"}) + await zha_client.send_json( + { + ID: 12, + TYPE: "zha/group/add", + GROUP_NAME: "new_group", + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) msg = await zha_client.receive_json() assert msg["id"] == 12 @@ -447,8 +457,17 @@ async def test_add_group(zha_client) -> None: added_group = msg["result"] + groupable_device = get_zha_gateway_proxy(hass).device_proxies[ + EUI64.convert(IEEE_GROUPABLE_DEVICE) + ] + assert added_group["name"] == "new_group" - assert added_group["members"] == [] + assert len(added_group["members"]) == 1 + assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE + assert ( + added_group["members"][0]["device"]["device_reg_id"] + == groupable_device.device_id + ) await zha_client.send_json({ID: 13, TYPE: "zha/groups"}) @@ -496,6 +515,82 @@ async def test_remove_group(zha_client) -> None: assert len(groups) == 0 +async def test_add_group_member(hass: HomeAssistant, zha_client) -> None: + """Test adding a ZHA zigbee group member.""" + await zha_client.send_json( + { + ID: 12, + TYPE: "zha/group/add", + GROUP_NAME: "new_group", + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 12 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + + assert len(added_group["members"]) == 0 + + await zha_client.send_json( + { + ID: 13, + TYPE: "zha/group/members/add", + GROUP_ID: added_group["group_id"], + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 13 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + + assert len(added_group["members"]) == 1 + assert added_group["name"] == "new_group" + assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE + + +async def test_remove_group_member(hass: HomeAssistant, zha_client) -> None: + """Test removing a ZHA zigbee group member.""" + await zha_client.send_json( + { + ID: 12, + TYPE: "zha/group/add", + GROUP_NAME: "new_group", + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 12 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + + assert added_group["name"] == "new_group" + assert len(added_group["members"]) == 1 + assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE + + await zha_client.send_json( + { + ID: 13, + TYPE: "zha/group/members/remove", + GROUP_ID: added_group["group_id"], + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 13 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + assert len(added_group["members"]) == 0 + + @pytest.fixture async def app_controller( hass: HomeAssistant, setup_zha, zigpy_app_controller: ControllerApplication @@ -1037,3 +1132,101 @@ async def test_websocket_bind_unbind_group( assert bind_mock.mock_calls == [call(test_group_id, ANY)] elif command_type == "unbind": assert unbind_mock.mock_calls == [call(test_group_id, ANY)] + + +async def test_websocket_reconfigure( + hass: HomeAssistant, zha_client: MockHAClientWebSocket, zigpy_device_mock +) -> None: + """Test websocket API to reconfigure a device.""" + gateway = get_zha_gateway(hass) + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [closures.WindowCovering.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SHADE, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + } + }, + ) + + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy = get_zha_gateway_proxy(hass).get_device_proxy(zha_device.ieee) + + def mock_reconfigure() -> None: + zha_device_proxy.handle_zha_channel_configure_reporting( + ClusterConfigureReportingEvent( + cluster_name="Window Covering", + cluster_id=258, + attributes={ + "current_position_lift_percentage": { + "min": 0, + "max": 900, + "id": "current_position_lift_percentage", + "name": "current_position_lift_percentage", + "change": 1, + "status": "SUCCESS", + }, + "current_position_tilt_percentage": { + "min": 0, + "max": 900, + "id": "current_position_tilt_percentage", + "name": "current_position_tilt_percentage", + "change": 1, + "status": "SUCCESS", + }, + }, + cluster_handler_unique_id="28:2c:02:bf:ff:ea:05:68:1:0x0102", + event_type="zha_channel_message", + event="zha_channel_configure_reporting", + ) + ) + + zha_device_proxy.handle_zha_channel_bind( + ClusterBindEvent( + cluster_name="Window Covering", + cluster_id=1, + success=True, + cluster_handler_unique_id="28:2c:02:bf:ff:ea:05:68:1:0x0012", + event_type="zha_channel_message", + event="zha_channel_bind", + ) + ) + + zha_device_proxy.handle_zha_channel_cfg_done( + ClusterHandlerConfigurationComplete( + device_ieee="28:2c:02:bf:ff:ea:05:68", + unique_id="28:2c:02:bf:ff:ea:05:68", + event_type="zha_channel_message", + event="zha_channel_cfg_done", + ) + ) + + with patch.object( + zha_device_proxy.device, "async_configure", side_effect=mock_reconfigure + ): + await zha_client.send_json( + { + ID: 6, + TYPE: "zha/devices/reconfigure", + ATTR_IEEE: str(zha_device_proxy.device.ieee), + } + ) + + messages = [] + + while len(messages) != 3: + msg = await zha_client.receive_json() + + if msg[ID] == 6: + messages.append(msg) + + # Ensure the frontend receives progress events + assert {m["event"]["type"] for m in messages} == { + "zha_channel_configure_reporting", + "zha_channel_bind", + "zha_channel_cfg_done", + } diff --git a/tests/components/zha/zha_devices_list.py b/tests/components/zha/zha_devices_list.py deleted file mode 100644 index 4c23244c5e0..00000000000 --- a/tests/components/zha/zha_devices_list.py +++ /dev/null @@ -1,5922 +0,0 @@ -"""Example Zigbee Devices.""" - -from zigpy.const import ( - SIG_ENDPOINTS, - SIG_EP_INPUT, - SIG_EP_OUTPUT, - SIG_EP_PROFILE, - SIG_EP_TYPE, - SIG_MANUFACTURER, - SIG_MODEL, - SIG_NODE_DESC, -) -from zigpy.profiles import zha, zll -from zigpy.types import Bool, uint8_t -from zigpy.zcl.clusters.closures import DoorLock -from zigpy.zcl.clusters.general import ( - Basic, - Groups, - Identify, - LevelControl, - MultistateInput, - OnOff, - Ota, - PowerConfiguration, - Scenes, -) -from zigpy.zcl.clusters.lighting import Color -from zigpy.zcl.clusters.measurement import ( - IlluminanceMeasurement, - OccupancySensing, - TemperatureMeasurement, -) - -DEV_SIG_CLUSTER_HANDLERS = "cluster_handlers" -DEV_SIG_DEV_NO = "device_no" -DEV_SIG_ENT_MAP = "entity_map" -DEV_SIG_ENT_MAP_CLASS = "entity_class" -DEV_SIG_ENT_MAP_ID = "entity_id" -DEV_SIG_EP_ID = "endpoint_id" -DEV_SIG_EVT_CLUSTER_HANDLERS = "event_cluster_handlers" -DEV_SIG_ZHA_QUIRK = "zha_quirk" -DEV_SIG_ATTRIBUTES = "attributes" - - -PROFILE_ID = SIG_EP_PROFILE -DEVICE_TYPE = SIG_EP_TYPE -INPUT_CLUSTERS = SIG_EP_INPUT -OUTPUT_CLUSTERS = SIG_EP_OUTPUT - -DEVICES = [ - { - DEV_SIG_DEV_NO: 0, - SIG_MANUFACTURER: "ADUROLIGHT", - SIG_MODEL: "Adurolight_NCC", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2080, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4096, 64716], - SIG_EP_OUTPUT: [3, 4, 6, 8, 4096, 64716], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.adurolight_adurolight_ncc_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.adurolight_adurolight_ncc_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.adurolight_adurolight_ncc_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 1, - SIG_MANUFACTURER: "Bosch", - SIG_MODEL: "ISW-ZPR1-WP13", - SIG_NODE_DESC: b"\x02@\x08\x00\x00l\x00\x00\x00\x00\x00\x00\x00", - SIG_ENDPOINTS: { - 5: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["5:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-5-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.bosch_isw_zpr1_wp13_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-5-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.bosch_isw_zpr1_wp13_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-5-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-5-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-5-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-5-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-5-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.bosch_isw_zpr1_wp13_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 2, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3130", - SIG_NODE_DESC: b"\x02@\x80N\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 2821], - SIG_EP_OUTPUT: [3, 6, 8, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3130_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3130_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3130_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3130_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3130_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 3, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3210-L", - SIG_NODE_DESC: b"\x01@\x8eN\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 81, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 1794, 2820, 2821, 64515], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.centralite_3210_l_switch", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3210_l_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3210_l_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 4, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3310-S", - SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 770, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 2821, 64581], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3310_s_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-64581"): { - DEV_SIG_CLUSTER_HANDLERS: ["humidity"], - DEV_SIG_ENT_MAP_CLASS: "Humidity", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_humidity", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3310_s_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 5, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3315-S", - SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 12, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821, 64527], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 49887, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_3315_s_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3315_s_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3315_s_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 6, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3320-L", - SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 12, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821, 64527], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 49887, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_3320_l_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3320_l_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3320_l_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 7, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3326-L", - SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 263, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821, 64582], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 49887, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_3326_l_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3326_l_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3326_l_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 8, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "Motion Sensor-A", - SIG_NODE_DESC: b"\x02@\x80N\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 263, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 1030, 2821], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_motion_sensor_a_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_motion_sensor_a_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-2-1030"): { - DEV_SIG_CLUSTER_HANDLERS: ["occupancy"], - DEV_SIG_ENT_MAP_CLASS: "Occupancy", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.centralite_motion_sensor_a_occupancy" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_motion_sensor_a_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 9, - SIG_MANUFACTURER: "ClimaxTechnology", - SIG_MODEL: "PSMP5_00.00.02.02TC", - SIG_NODE_DESC: b"\x01@\x8e\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 81, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 1794], - SIG_EP_OUTPUT: [0], - SIG_EP_PROFILE: 260, - }, - 4: { - SIG_EP_TYPE: 9, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["4:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: ( - "switch.climaxtechnology_psmp5_00_00_02_02tc_switch" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.climaxtechnology_psmp5_00_00_02_02tc_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: ( - "sensor.climaxtechnology_psmp5_00_00_02_02tc_instantaneous_demand" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: ( - "sensor.climaxtechnology_psmp5_00_00_02_02tc_summation_delivered" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_psmp5_00_00_02_02tc_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_psmp5_00_00_02_02tc_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-4-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.climaxtechnology_psmp5_00_00_02_02tc_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 10, - SIG_MANUFACTURER: "ClimaxTechnology", - SIG_MODEL: "SD8SC_00.00.03.12TC", - SIG_NODE_DESC: b"\x02@\x80\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 1280, 1282], - SIG_EP_OUTPUT: [0], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.climaxtechnology_sd8sc_00_00_03_12tc_ias_zone" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.climaxtechnology_sd8sc_00_00_03_12tc_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_sd8sc_00_00_03_12tc_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_sd8sc_00_00_03_12tc_lqi", - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-WarningMode"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultToneSelectEntity", - DEV_SIG_ENT_MAP_ID: ( - "select.climaxtechnology_sd8sc_00_00_03_12tc_default_siren_tone" - ), - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-SirenLevel"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultSirenLevelSelectEntity", - DEV_SIG_ENT_MAP_ID: ( - "select.climaxtechnology_sd8sc_00_00_03_12tc_default_siren_level" - ), - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-StrobeLevel"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeLevelSelectEntity", - DEV_SIG_ENT_MAP_ID: ( - "select.climaxtechnology_sd8sc_00_00_03_12tc_default_strobe_level" - ), - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-Strobe"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeSelectEntity", - DEV_SIG_ENT_MAP_ID: ( - "select.climaxtechnology_sd8sc_00_00_03_12tc_default_strobe" - ), - }, - ("siren", "00:11:22:33:44:55:66:77-1-1282"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHASiren", - DEV_SIG_ENT_MAP_ID: "siren.climaxtechnology_sd8sc_00_00_03_12tc_siren", - }, - }, - }, - { - DEV_SIG_DEV_NO: 11, - SIG_MANUFACTURER: "ClimaxTechnology", - SIG_MODEL: "WS15_00.00.03.03TC", - SIG_NODE_DESC: b"\x02@\x80\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 1280], - SIG_EP_OUTPUT: [0], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.climaxtechnology_ws15_00_00_03_03tc_ias_zone" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.climaxtechnology_ws15_00_00_03_03tc_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_ws15_00_00_03_03tc_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_ws15_00_00_03_03tc_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 12, - SIG_MANUFACTURER: "Feibit Inc co.", - SIG_MODEL: "FB56-ZCW08KU1.1", - SIG_NODE_DESC: b"\x01@\x8e\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 11: { - SIG_EP_TYPE: 528, - DEV_SIG_EP_ID: 11, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49246, - }, - 13: { - SIG_EP_TYPE: 57694, - DEV_SIG_EP_ID: 13, - SIG_EP_INPUT: [4096], - SIG_EP_OUTPUT: [4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-11"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.feibit_inc_co_fb56_zcw08ku1_1_light", - }, - ("button", "00:11:22:33:44:55:66:77-11-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.feibit_inc_co_fb56_zcw08ku1_1_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-11-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.feibit_inc_co_fb56_zcw08ku1_1_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-11-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.feibit_inc_co_fb56_zcw08ku1_1_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 13, - SIG_MANUFACTURER: "HEIMAN", - SIG_MODEL: "SmokeSensor-EM", - SIG_NODE_DESC: b"\x02@\x80\x0b\x12RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 1280, 1282], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.heiman_smokesensor_em_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.heiman_smokesensor_em_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_smokesensor_em_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_smokesensor_em_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_smokesensor_em_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.heiman_smokesensor_em_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 14, - SIG_MANUFACTURER: "Heiman", - SIG_MODEL: "CO_V16", - SIG_NODE_DESC: b"\x02@\x84\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 1280], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.heiman_co_v16_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.heiman_co_v16_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_co_v16_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_co_v16_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.heiman_co_v16_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 15, - SIG_MANUFACTURER: "Heiman", - SIG_MODEL: "WarningDevice", - SIG_NODE_DESC: b"\x01@\x8e\x0b\x12RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1027, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 9, 1280, 1282], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("select", "00:11:22:33:44:55:66:77-1-1282-WarningMode"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultToneSelectEntity", - DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_siren_tone", - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-SirenLevel"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultSirenLevelSelectEntity", - DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_siren_level", - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-StrobeLevel"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeLevelSelectEntity", - DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_strobe_level", - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-Strobe"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeSelectEntity", - DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_strobe", - }, - ("siren", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHASiren", - DEV_SIG_ENT_MAP_ID: "siren.heiman_warningdevice_siren", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.heiman_warningdevice_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.heiman_warningdevice_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_warningdevice_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_warningdevice_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.heiman_warningdevice_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 16, - SIG_MANUFACTURER: "HiveHome.com", - SIG_MODEL: "MOT003", - SIG_NODE_DESC: b"\x02@\x809\x10PP\x00\x00\x00P\x00\x00", - SIG_ENDPOINTS: { - 6: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [0, 1, 3, 32, 1024, 1026, 1280], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["6:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-6-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.hivehome_com_mot003_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-6-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.hivehome_com_mot003_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_illuminance", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-6-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.hivehome_com_mot003_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 17, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E12 WS opal 600lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 268, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 4096, 64636], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 260, - }, - 242: { - SIG_EP_TYPE: 97, - DEV_SIG_EP_ID: 242, - SIG_EP_INPUT: [33], - SIG_EP_OUTPUT: [33], - SIG_EP_PROFILE: 41440, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 18, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E26 CWS opal 600lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 512, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 4096], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 19, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E26 W opal 1000lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 2821, 4096], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 20, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E26 WS opal 980lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 544, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 4096], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 21, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E26 opal 1000lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 2821, 4096], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 22, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI control outlet", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 266, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 64636], - SIG_EP_OUTPUT: [5, 25, 32], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: ( - "switch.ikea_of_sweden_tradfri_control_outlet_switch" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_control_outlet_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_control_outlet_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_control_outlet_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_control_outlet_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 23, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI motion sensor", - SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2128, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 2821, 4096], - SIG_EP_OUTPUT: [3, 4, 6, 25, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_motion_sensor_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_motion_sensor_battery" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_motion_sensor_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_motion_sensor_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Motion", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.ikea_of_sweden_tradfri_motion_sensor_motion" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_motion_sensor_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 24, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI on/off switch", - SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2080, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 32, 4096, 64636], - SIG_EP_OUTPUT: [3, 4, 6, 8, 25, 258, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019", "1:0x0102"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_on_off_switch_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_on_off_switch_battery" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_on_off_switch_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_on_off_switch_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_on_off_switch_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 25, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI remote control", - SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2096, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 2821, 4096], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 25, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_remote_control_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_remote_control_battery" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_remote_control_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_remote_control_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_remote_control_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 26, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI signal repeater", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 8, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 9, 2821, 4096, 64636], - SIG_EP_OUTPUT: [25, 32, 4096], - SIG_EP_PROFILE: 260, - }, - 242: { - SIG_EP_TYPE: 97, - DEV_SIG_EP_ID: 242, - SIG_EP_INPUT: [33], - SIG_EP_OUTPUT: [33], - SIG_EP_PROFILE: 41440, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_signal_repeater_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_signal_repeater_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_signal_repeater_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_signal_repeater_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 27, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI wireless dimmer", - SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 2821, 4096], - SIG_EP_OUTPUT: [3, 4, 6, 8, 25, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_wireless_dimmer_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_wireless_dimmer_battery" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_wireless_dimmer_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_wireless_dimmer_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_wireless_dimmer_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 28, - SIG_MANUFACTURER: "Jasco Products", - SIG_MODEL: "45852", - SIG_NODE_DESC: b"\x01@\x8e$\x11R\xff\x00\x00\x00\xff\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 260, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821], - SIG_EP_OUTPUT: [3, 6, 8], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006", "2:0x0008"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", - DEV_SIG_ENT_MAP_ID: "light.jasco_products_45852_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.jasco_products_45852_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.jasco_products_45852_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 29, - SIG_MANUFACTURER: "Jasco Products", - SIG_MODEL: "45856", - SIG_NODE_DESC: b"\x01@\x8e$\x11R\xff\x00\x00\x00\xff\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 1794, 2821], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 259, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821], - SIG_EP_OUTPUT: [3, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", - DEV_SIG_ENT_MAP_ID: "light.jasco_products_45856_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.jasco_products_45856_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.jasco_products_45856_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 30, - SIG_MANUFACTURER: "Jasco Products", - SIG_MODEL: "45857", - SIG_NODE_DESC: b"\x01@\x8e$\x11R\xff\x00\x00\x00\xff\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 260, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821], - SIG_EP_OUTPUT: [3, 6, 8], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006", "2:0x0008"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", - DEV_SIG_ENT_MAP_ID: "light.jasco_products_45857_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.jasco_products_45857_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.jasco_products_45857_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 31, - SIG_MANUFACTURER: "Keen Home Inc", - SIG_MODEL: "SV02-610-MP-1.3", - SIG_NODE_DESC: b"\x02@\x80[\x11RR\x00\x00*R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 3, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 8, 32, 1026, 1027, 2821, 64513, 64514], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.keen_home_inc_sv02_610_mp_1_3_identify", - }, - ("cover", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off"], - DEV_SIG_ENT_MAP_CLASS: "KeenVent", - DEV_SIG_ENT_MAP_ID: "cover.keen_home_inc_sv02_610_mp_1_3_keen_vent", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { - DEV_SIG_CLUSTER_HANDLERS: ["pressure"], - DEV_SIG_ENT_MAP_CLASS: "Pressure", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_pressure", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.keen_home_inc_sv02_610_mp_1_3_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 32, - SIG_MANUFACTURER: "Keen Home Inc", - SIG_MODEL: "SV02-612-MP-1.2", - SIG_NODE_DESC: b"\x02@\x80[\x11RR\x00\x00*R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 3, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 8, 32, 1026, 1027, 2821, 64513, 64514], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.keen_home_inc_sv02_612_mp_1_2_identify", - }, - ("cover", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off"], - DEV_SIG_ENT_MAP_CLASS: "KeenVent", - DEV_SIG_ENT_MAP_ID: "cover.keen_home_inc_sv02_612_mp_1_2_keen_vent", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { - DEV_SIG_CLUSTER_HANDLERS: ["pressure"], - DEV_SIG_ENT_MAP_CLASS: "Pressure", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_pressure", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.keen_home_inc_sv02_612_mp_1_2_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 33, - SIG_MANUFACTURER: "Keen Home Inc", - SIG_MODEL: "SV02-612-MP-1.3", - SIG_NODE_DESC: b"\x02@\x80[\x11RR\x00\x00*R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 3, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 8, 32, 1026, 1027, 2821, 64513, 64514], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.keen_home_inc_sv02_612_mp_1_3_identify", - }, - ("cover", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off"], - DEV_SIG_ENT_MAP_CLASS: "KeenVent", - DEV_SIG_ENT_MAP_ID: "cover.keen_home_inc_sv02_612_mp_1_3_keen_vent", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { - DEV_SIG_CLUSTER_HANDLERS: ["pressure"], - DEV_SIG_ENT_MAP_CLASS: "Pressure", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_pressure", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.keen_home_inc_sv02_612_mp_1_3_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 34, - SIG_MANUFACTURER: "King Of Fans, Inc.", - SIG_MODEL: "HBUniversalCFRemote", - SIG_NODE_DESC: b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 514], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.king_of_fans_inc_hbuniversalcfremote_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.king_of_fans_inc_hbuniversalcfremote_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.king_of_fans_inc_hbuniversalcfremote_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.king_of_fans_inc_hbuniversalcfremote_lqi", - }, - ("fan", "00:11:22:33:44:55:66:77-1-514"): { - DEV_SIG_CLUSTER_HANDLERS: ["fan"], - DEV_SIG_ENT_MAP_CLASS: "KofFan", - DEV_SIG_ENT_MAP_ID: "fan.king_of_fans_inc_hbuniversalcfremote_fan", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.king_of_fans_inc_hbuniversalcfremote_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 35, - SIG_MANUFACTURER: "LDS", - SIG_MODEL: "ZBT-CCTSwitch-D0001", - SIG_NODE_DESC: b"\x02@\x80h\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2048, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4096, 64769], - SIG_EP_OUTPUT: [3, 4, 6, 8, 25, 768, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019", "1:0x0300"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lds_zbt_cctswitch_d0001_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lds_zbt_cctswitch_d0001_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lds_zbt_cctswitch_d0001_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lds_zbt_cctswitch_d0001_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lds_zbt_cctswitch_d0001_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 36, - SIG_MANUFACTURER: "LEDVANCE", - SIG_MODEL: "A19 RGBW", - SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.ledvance_a19_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.ledvance_a19_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_a19_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_a19_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ledvance_a19_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 37, - SIG_MANUFACTURER: "LEDVANCE", - SIG_MODEL: "FLEX RGBW", - SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.ledvance_flex_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.ledvance_flex_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_flex_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_flex_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ledvance_flex_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 38, - SIG_MANUFACTURER: "LEDVANCE", - SIG_MODEL: "PLUG", - SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 81, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 2821, 64513, 64520], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.ledvance_plug_switch", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.ledvance_plug_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_plug_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_plug_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ledvance_plug_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 39, - SIG_MANUFACTURER: "LEDVANCE", - SIG_MODEL: "RT RGBW", - SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.ledvance_rt_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.ledvance_rt_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_rt_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_rt_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ledvance_rt_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 40, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.plug.maus01", - SIG_NODE_DESC: b"\x01@\x8e_\x11\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 81, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 2, 3, 4, 5, 6, 10, 16, 2820], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 9, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [12], - SIG_EP_OUTPUT: [4, 12], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 83, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [12], - SIG_EP_OUTPUT: [12], - SIG_EP_PROFILE: 260, - }, - 100: { - SIG_EP_TYPE: 263, - DEV_SIG_EP_ID: 100, - SIG_EP_INPUT: [15], - SIG_EP_OUTPUT: [4, 15], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.lumi_lumi_plug_maus01_switch", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_device_temperature", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_plug_maus01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-100-15"): { - DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], - DEV_SIG_ENT_MAP_CLASS: "BinaryInput", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_plug_maus01_binary_input", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_summation_delivered", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_plug_maus01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 41, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.relay.c2acn01", - SIG_NODE_DESC: b"\x01@\x8e7\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 2, 3, 4, 5, 6, 10, 12, 16, 2820], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [4, 5, 6, 16], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_relay_c2acn01_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_device_temperature", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_relay_c2acn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_lqi", - }, - ("light", "00:11:22:33:44:55:66:77-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_relay_c2acn01_light_2", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_relay_c2acn01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 42, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b186acn01", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 18, 25, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 24323, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 12, 18], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b186acn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b186acn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b186acn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b186acn01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_remote_b186acn01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 43, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b286acn01", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 18, 25, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 24323, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 12, 18], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b286acn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286acn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286acn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286acn01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_remote_b286acn01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 44, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b286opcn01", - SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 261, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6, 8, 768], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 3: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 4: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 5: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 6: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b286opcn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286opcn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286opcn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286opcn01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 45, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b486opcn01", - SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 261, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6, 8, 768], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 259, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3], - SIG_EP_OUTPUT: [3, 6], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 4: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 5: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 6: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b486opcn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b486opcn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b486opcn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b486opcn01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 46, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b686opcn01", - SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 261, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6, 8, 768], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b686opcn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 47, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b686opcn01", - SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 261, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6, 8, 768], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 259, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3], - SIG_EP_OUTPUT: [3, 6], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: None, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: None, - }, - 4: { - SIG_EP_TYPE: None, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: None, - }, - 5: { - SIG_EP_TYPE: None, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: None, - }, - 6: { - SIG_EP_TYPE: None, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: None, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b686opcn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 48, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.router", - SIG_NODE_DESC: b"\x01@\x8e_\x11P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 8: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 8, - SIG_EP_INPUT: [0, 6], - SIG_EP_OUTPUT: [0, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["8:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-8"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_router_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-8-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_router_opening", - }, - }, - }, - { - DEV_SIG_DEV_NO: 49, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.router", - SIG_NODE_DESC: b"\x01@\x8e_\x11P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 8: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 8, - SIG_EP_INPUT: [0, 6, 11, 17], - SIG_EP_OUTPUT: [0, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["8:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-8"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_router_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-8-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_router_opening", - }, - }, - }, - { - DEV_SIG_DEV_NO: 50, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.router", - SIG_NODE_DESC: b"\x01@\x8e_\x11P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 8: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 8, - SIG_EP_INPUT: [0, 6, 17], - SIG_EP_OUTPUT: [0, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["8:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-8"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_router_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-8-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_router_opening", - }, - }, - }, - { - DEV_SIG_DEV_NO: 51, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sen_ill.mgl01", - SIG_NODE_DESC: b"\x02@\x84n\x12\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 262, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 1024], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_battery", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sen_ill_mgl01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_illuminance", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 52, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_86sw1", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 18, 25, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 24323, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 12, 18], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_86sw1_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_86sw1_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_86sw1_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_86sw1_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_86sw1_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 53, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_cube.aqgl01", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 28417, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 25], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 28418, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 28419, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3, 12], - SIG_EP_OUTPUT: [3, 4, 5, 12], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_cube_aqgl01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_cube_aqgl01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_cube_aqgl01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_cube_aqgl01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_cube_aqgl01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 54, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_ht", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 25, 1026, 1029, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 24323, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3], - SIG_EP_OUTPUT: [3, 4, 5, 12], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_ht_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1029"): { - DEV_SIG_CLUSTER_HANDLERS: ["humidity"], - DEV_SIG_ENT_MAP_CLASS: "Humidity", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_humidity", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_ht_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 55, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_magnet", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2128, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 25, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 6, 8, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_magnet_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_magnet_opening", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_magnet_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 56, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_magnet.aq2", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 65535], - SIG_EP_OUTPUT: [0, 4, 6, 65535], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_magnet_aq2_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_aq2_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_aq2_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_aq2_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_magnet_aq2_opening", - }, - }, - }, - { - DEV_SIG_DEV_NO: 57, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_motion.aq2", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 263, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 1024, 1030, 1280, 65535], - SIG_EP_OUTPUT: [0, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1030"): { - DEV_SIG_CLUSTER_HANDLERS: ["occupancy"], - DEV_SIG_ENT_MAP_CLASS: "Occupancy", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.lumi_lumi_sensor_motion_aq2_occupancy" - ), - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_motion_aq2_motion", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_motion_aq2_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_illuminance", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: ( - "sensor.lumi_lumi_sensor_motion_aq2_device_temperature" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_motion_aq2_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 58, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_smoke", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 12, 18, 1280], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_smoke_smoke", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_smoke_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_smoke_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: ( - "sensor.lumi_lumi_sensor_smoke_device_temperature" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_smoke_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_smoke_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_smoke_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 59, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_switch", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 6, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [0, 4, 5, 6, 8, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_switch_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_switch_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 60, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_switch.aq2", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 6, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 65535], - SIG_EP_OUTPUT: [0, 4, 6, 65535], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006"], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq2_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq2_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq2_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 61, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_switch.aq3", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 6, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 18], - SIG_EP_OUTPUT: [0, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006"], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq3_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq3_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq3_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 62, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_wleak.aq1", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 2, 3, 1280], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_wleak_aq1_ias_zone", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: ( - "sensor.lumi_lumi_sensor_wleak_aq1_device_temperature" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_wleak_aq1_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_wleak_aq1_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_wleak_aq1_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_wleak_aq1_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_wleak_aq1_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 63, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.vibration.aq1", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.DOOR_LOCK, - INPUT_CLUSTERS: [ - Basic.cluster_id, - Identify.cluster_id, - Ota.cluster_id, - DoorLock.cluster_id, - ], - OUTPUT_CLUSTERS: [ - Basic.cluster_id, - Identify.cluster_id, - Groups.cluster_id, - Scenes.cluster_id, - Ota.cluster_id, - DoorLock.cluster_id, - ], - }, - 2: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: 0x5F02, - INPUT_CLUSTERS: [Identify.cluster_id, MultistateInput.cluster_id], - OUTPUT_CLUSTERS: [ - Identify.cluster_id, - Groups.cluster_id, - Scenes.cluster_id, - MultistateInput.cluster_id, - ], - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_vibration_aq1_vibration", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_vibration_aq1_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_device_temperature", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_vibration_aq1_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 64, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.weather", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 1026, 1027, 1029, 65535], - SIG_EP_OUTPUT: [0, 4, 65535], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_weather_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { - DEV_SIG_CLUSTER_HANDLERS: ["pressure"], - DEV_SIG_ENT_MAP_CLASS: "Pressure", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_pressure", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1029"): { - DEV_SIG_CLUSTER_HANDLERS: ["humidity"], - DEV_SIG_ENT_MAP_CLASS: "Humidity", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_humidity", - }, - }, - }, - { - DEV_SIG_DEV_NO: 65, - SIG_MANUFACTURER: "NYCE", - SIG_MODEL: "3010", - SIG_NODE_DESC: b"\x02@\x80\xb9\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1280], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.nyce_3010_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.nyce_3010_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3010_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3010_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3010_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 66, - SIG_MANUFACTURER: "NYCE", - SIG_MODEL: "3014", - SIG_NODE_DESC: b"\x02@\x80\xb9\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1280], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.nyce_3014_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.nyce_3014_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3014_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3014_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3014_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 67, - SIG_MANUFACTURER: None, - SIG_MODEL: None, - SIG_NODE_DESC: b"\x10@\x0f5\x11Y=\x00@\x00=\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 5, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [10, 25], - SIG_EP_OUTPUT: [1280], - SIG_EP_PROFILE: 260, - }, - 242: { - SIG_EP_TYPE: 100, - DEV_SIG_EP_ID: 242, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [33], - SIG_EP_PROFILE: 41440, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: {}, - }, - { - DEV_SIG_DEV_NO: 68, - SIG_MANUFACTURER: None, - SIG_MODEL: None, - SIG_NODE_DESC: b"\x00@\x8f\xcd\xabR\x80\x00\x00\x00\x80\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 48879, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [1280], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: {}, - }, - { - DEV_SIG_DEV_NO: 69, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "LIGHTIFY A19 RGBW", - SIG_NODE_DESC: b"\x01@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 3: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 64527], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.osram_lightify_a19_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-3-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_lightify_a19_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_a19_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_a19_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_lightify_a19_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 70, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "LIGHTIFY Dimming Switch", - SIG_NODE_DESC: b"\x02@\x80\x0c\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 2821], - SIG_EP_OUTPUT: [3, 6, 8, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_lightify_dimming_switch_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_dimming_switch_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_dimming_switch_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_dimming_switch_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_lightify_dimming_switch_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 71, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "LIGHTIFY Flex RGBW", - SIG_NODE_DESC: b"\x19@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 3: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 64527], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.osram_lightify_flex_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-3-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_lightify_flex_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_flex_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_flex_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_lightify_flex_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 72, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "LIGHTIFY RT Tunable White", - SIG_NODE_DESC: b"\x01@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 3: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2820, 64527], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.osram_lightify_rt_tunable_white_light", - }, - ("button", "00:11:22:33:44:55:66:77-3-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_lightify_rt_tunable_white_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: ("sensor.osram_lightify_rt_tunable_white_power"), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: ( - "sensor.osram_lightify_rt_tunable_white_apparent_power" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: ("sensor.osram_lightify_rt_tunable_white_current"), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: ("sensor.osram_lightify_rt_tunable_white_voltage"), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: ( - "sensor.osram_lightify_rt_tunable_white_ac_frequency" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.osram_lightify_rt_tunable_white_power_factor" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_rt_tunable_white_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_rt_tunable_white_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_lightify_rt_tunable_white_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 73, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "Plug 01", - SIG_NODE_DESC: b"\x01@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 3: { - SIG_EP_TYPE: 16, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 2820, 4096, 64527], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.osram_plug_01_switch", - }, - ("button", "00:11:22:33:44:55:66:77-3-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_plug_01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_plug_01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_plug_01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_plug_01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 74, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "Switch 4x-LIGHTIFY", - SIG_NODE_DESC: b"\x02@\x80\x0c\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 32, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 25, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 4: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 5: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 6: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [ - "1:0x0005", - "1:0x0006", - "1:0x0008", - "1:0x0019", - "1:0x0300", - "2:0x0005", - "2:0x0006", - "2:0x0008", - "2:0x0300", - "3:0x0005", - "3:0x0006", - "3:0x0008", - "3:0x0300", - "4:0x0005", - "4:0x0006", - "4:0x0008", - "4:0x0300", - "5:0x0005", - "5:0x0006", - "5:0x0008", - "5:0x0300", - "6:0x0005", - "6:0x0006", - "6:0x0008", - "6:0x0300", - ], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.osram_switch_4x_lightify_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_switch_4x_lightify_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_switch_4x_lightify_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_switch_4x_lightify_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 75, - SIG_MANUFACTURER: "Philips", - SIG_MODEL: "RWL020", - SIG_NODE_DESC: b"\x02@\x80\x0b\x10G-\x00\x00\x00-\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2096, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0], - SIG_EP_OUTPUT: [0, 3, 4, 5, 6, 8], - SIG_EP_PROFILE: 49246, - }, - 2: { - SIG_EP_TYPE: 12, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 1, 3, 15, 64512], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "2:0x0019"], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.philips_rwl020_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.philips_rwl020_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-2-15"): { - DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], - DEV_SIG_ENT_MAP_CLASS: "BinaryInput", - DEV_SIG_ENT_MAP_ID: "binary_sensor.philips_rwl020_binary_input", - }, - ("button", "00:11:22:33:44:55:66:77-2-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.philips_rwl020_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.philips_rwl020_battery", - }, - ("update", "00:11:22:33:44:55:66:77-2-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.philips_rwl020_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 76, - SIG_MANUFACTURER: "Samjin", - SIG_MODEL: "button", - SIG_NODE_DESC: b"\x02@\x80A\x12RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_button_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.samjin_button_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.samjin_button_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 77, - SIG_MANUFACTURER: "Samjin", - SIG_MODEL: "multi", - SIG_NODE_DESC: b"\x02@\x80A\x12RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 64514], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_multi_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.samjin_multi_identify", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-64514"): { - DEV_SIG_CLUSTER_HANDLERS: ["accelerometer"], - DEV_SIG_ENT_MAP_CLASS: "Accelerometer", - DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_multi_accelerometer", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.samjin_multi_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 78, - SIG_MANUFACTURER: "Samjin", - SIG_MODEL: "water", - SIG_NODE_DESC: b"\x02@\x80A\x12RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_water_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.samjin_water_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.samjin_water_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 79, - SIG_MANUFACTURER: "Securifi Ltd.", - SIG_MODEL: None, - SIG_NODE_DESC: b"\x01@\x8e\x02\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 0, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 2820, 2821], - SIG_EP_OUTPUT: [0, 1, 3, 4, 5, 6, 25, 2820, 2821], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.securifi_ltd_unk_model_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.securifi_ltd_unk_model_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.securifi_ltd_unk_model_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 80, - SIG_MANUFACTURER: "Sercomm Corp.", - SIG_MODEL: "SZ-DWS04N_SF", - SIG_NODE_DESC: b"\x02@\x801\x11R\xff\x00\x00\x00\xff\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.sercomm_corp_sz_dws04n_sf_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sercomm_corp_sz_dws04n_sf_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sercomm_corp_sz_dws04n_sf_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 81, - SIG_MANUFACTURER: "Sercomm Corp.", - SIG_MODEL: "SZ-ESW01", - SIG_NODE_DESC: b"\x01@\x8e1\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 1794, 2820, 2821], - SIG_EP_OUTPUT: [3, 10, 25, 2821], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 259, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.sercomm_corp_sz_esw01_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sercomm_corp_sz_esw01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sercomm_corp_sz_esw01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 82, - SIG_MANUFACTURER: "Sercomm Corp.", - SIG_MODEL: "SZ-PIR04", - SIG_NODE_DESC: b"\x02@\x801\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1024, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.sercomm_corp_sz_pir04_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sercomm_corp_sz_pir04_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_illuminance", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sercomm_corp_sz_pir04_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 83, - SIG_MANUFACTURER: "Sinope Technologies", - SIG_MODEL: "RM3250ZB", - SIG_NODE_DESC: b"\x11@\x8e\x9c\x11G+\x00\x00*+\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 2820, 2821, 65281], - SIG_EP_OUTPUT: [3, 4, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sinope_technologies_rm3250zb_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: ( - "sensor.sinope_technologies_rm3250zb_apparent_power" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.sinope_technologies_rm3250zb_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sinope_technologies_rm3250zb_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 84, - SIG_MANUFACTURER: "Sinope Technologies", - SIG_MODEL: "TH1123ZB", - SIG_NODE_DESC: b"\x12@\x8c\x9c\x11G+\x00\x00\x00+\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 513, 516, 1026, 2820, 2821, 65281], - SIG_EP_OUTPUT: [25, 65281], - SIG_EP_PROFILE: 260, - }, - 196: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 196, - SIG_EP_INPUT: [1], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49757, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sinope_technologies_th1123zb_identify", - }, - ("climate", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: [ - "thermostat", - "sinope_manufacturer_specific", - ], - DEV_SIG_ENT_MAP_CLASS: "SinopeTechnologiesThermostat", - DEV_SIG_ENT_MAP_ID: "climate.sinope_technologies_th1123zb_thermostat", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: ( - "sensor.sinope_technologies_th1123zb_apparent_power" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-hvac_action"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SinopeHVACAction", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_hvac_action", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-pi_heating_demand"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "PiHeatingDemand", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_pi_heating_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-setpoint_change_source"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SetpointChangeSource", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_setpoint_change_source", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sinope_technologies_th1123zb_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 85, - SIG_MANUFACTURER: "Sinope Technologies", - SIG_MODEL: "TH1124ZB", - SIG_NODE_DESC: b"\x11@\x8e\x9c\x11G+\x00\x00\x00+\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 513, 516, 1026, 2820, 2821, 65281], - SIG_EP_OUTPUT: [25, 65281], - SIG_EP_PROFILE: 260, - }, - 196: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 196, - SIG_EP_INPUT: [1], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49757, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sinope_technologies_th1124zb_identify", - }, - ("climate", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: [ - "thermostat", - "sinope_manufacturer_specific", - ], - DEV_SIG_ENT_MAP_CLASS: "SinopeTechnologiesThermostat", - DEV_SIG_ENT_MAP_ID: "climate.sinope_technologies_th1124zb_thermostat", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: ( - "sensor.sinope_technologies_th1124zb_apparent_power" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-hvac_action"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SinopeHVACAction", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_hvac_action", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-pi_heating_demand"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "PiHeatingDemand", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_pi_heating_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-setpoint_change_source"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SetpointChangeSource", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_setpoint_change_source", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sinope_technologies_th1124zb_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 86, - SIG_MANUFACTURER: "SmartThings", - SIG_MODEL: "outletv4", - SIG_NODE_DESC: b"\x01@\x8e\n\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 9, 15, 2820], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-15"): { - DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], - DEV_SIG_ENT_MAP_CLASS: "BinaryInput", - DEV_SIG_ENT_MAP_ID: "binary_sensor.smartthings_outletv4_binary_input", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.smartthings_outletv4_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.smartthings_outletv4_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.smartthings_outletv4_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 87, - SIG_MANUFACTURER: "SmartThings", - SIG_MODEL: "tagv4", - SIG_NODE_DESC: b"\x02@\x80\n\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 32768, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 15, 32], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("device_tracker", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "ZHADeviceScannerEntity", - DEV_SIG_ENT_MAP_ID: "device_tracker.smartthings_tagv4_device_scanner", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-15"): { - DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], - DEV_SIG_ENT_MAP_CLASS: "BinaryInput", - DEV_SIG_ENT_MAP_ID: "binary_sensor.smartthings_tagv4_binary_input", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.smartthings_tagv4_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_tagv4_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_tagv4_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.smartthings_tagv4_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 88, - SIG_MANUFACTURER: "Third Reality, Inc", - SIG_MODEL: "3RSS007Z", - SIG_NODE_DESC: b"\x02@\x803\x12\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 25], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.third_reality_inc_3rss007z_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss007z_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss007z_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.third_reality_inc_3rss007z_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.third_reality_inc_3rss007z_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 89, - SIG_MANUFACTURER: "Third Reality, Inc", - SIG_MODEL: "3RSS008Z", - SIG_NODE_DESC: b"\x02@\x803\x12\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 25], - SIG_EP_OUTPUT: [1], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.third_reality_inc_3rss008z_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss008z_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss008z_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss008z_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.third_reality_inc_3rss008z_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.third_reality_inc_3rss008z_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 90, - SIG_MANUFACTURER: "Visonic", - SIG_MODEL: "MCT-340 E", - SIG_NODE_DESC: b"\x02@\x80\x11\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.visonic_mct_340_e_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.visonic_mct_340_e_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.visonic_mct_340_e_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 91, - SIG_MANUFACTURER: "Zen Within", - SIG_MODEL: "Zen-01", - SIG_NODE_DESC: b"\x02@\x80X\x11R\x80\x00\x00\x00\x80\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 32, 513, 514, 516, 2821], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.zen_within_zen_01_identify", - }, - ("climate", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat", "fan"], - DEV_SIG_ENT_MAP_CLASS: "ZenWithinThermostat", - DEV_SIG_ENT_MAP_ID: "climate.zen_within_zen_01_thermostat", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-hvac_action"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "ThermostatHVACAction", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_hvac_action", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-pi_heating_demand"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "PiHeatingDemand", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_pi_heating_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-setpoint_change_source"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SetpointChangeSource", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_setpoint_change_source", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.zen_within_zen_01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 92, - SIG_MANUFACTURER: "_TYZB01_ns1ndbww", - SIG_MODEL: "TS0004", - SIG_NODE_DESC: b"\x01@\x8e\x02\x10R\x00\x02\x00,\x00\x02\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 4, 5, 6, 10], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [4, 5, 6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [4, 5, 6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - 4: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [4, 5, 6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.tyzb01_ns1ndbww_ts0004_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.tyzb01_ns1ndbww_ts0004_lqi", - }, - ("light", "00:11:22:33:44:55:66:77-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light_2", - }, - ("light", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light_3", - }, - ("light", "00:11:22:33:44:55:66:77-4"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light_4", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.tyzb01_ns1ndbww_ts0004_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 93, - SIG_MANUFACTURER: "netvox", - SIG_MODEL: "Z308E3ED", - SIG_NODE_DESC: b"\x02@\x80\x9f\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 21, 32, 1280, 2821], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.netvox_z308e3ed_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.netvox_z308e3ed_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.netvox_z308e3ed_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.netvox_z308e3ed_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.netvox_z308e3ed_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 94, - SIG_MANUFACTURER: "sengled", - SIG_MODEL: "E11-G13", - SIG_NODE_DESC: b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "MinTransitionLight", - DEV_SIG_ENT_MAP_ID: "light.sengled_e11_g13_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sengled_e11_g13_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sengled_e11_g13_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 95, - SIG_MANUFACTURER: "sengled", - SIG_MODEL: "E12-N14", - SIG_NODE_DESC: b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "MinTransitionLight", - DEV_SIG_ENT_MAP_ID: "light.sengled_e12_n14_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sengled_e12_n14_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sengled_e12_n14_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 96, - SIG_MANUFACTURER: "sengled", - SIG_MODEL: "Z01-A19NAE26", - SIG_NODE_DESC: b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 1794, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "MinTransitionLight", - DEV_SIG_ENT_MAP_ID: "light.sengled_z01_a19nae26_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sengled_z01_a19nae26_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sengled_z01_a19nae26_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 97, - SIG_MANUFACTURER: "unk_manufacturer", - SIG_MODEL: "unk_model", - SIG_NODE_DESC: b"\x01@\x8e\x10\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 512, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 10, 21, 256, 64544, 64545], - SIG_EP_OUTPUT: [3, 64544], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.unk_manufacturer_unk_model_identify", - }, - ("cover", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off", "shade"], - DEV_SIG_ENT_MAP_CLASS: "Shade", - DEV_SIG_ENT_MAP_ID: "cover.unk_manufacturer_unk_model_shade", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.unk_manufacturer_unk_model_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.unk_manufacturer_unk_model_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 98, - SIG_MANUFACTURER: "Digi", - SIG_MODEL: "XBee3", - SIG_NODE_DESC: b"\x01@\x8e\x1e\x10R\xff\x00\x00,\xff\x00\x00", - SIG_ENDPOINTS: { - 208: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 208, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 209: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 209, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 210: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 210, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 211: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 211, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 212: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 212, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 213: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 213, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 214: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 214, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 215: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 215, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 216: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 216, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 217: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 217, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 218: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 218, - SIG_EP_INPUT: [6, 13], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 219: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 219, - SIG_EP_INPUT: [6, 13], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 220: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 220, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 221: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 221, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 222: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 222, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 232: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 232, - SIG_EP_INPUT: [17, 146], - SIG_EP_OUTPUT: [8, 17], - SIG_EP_PROFILE: 49413, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["232:0x0008"], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-208-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input", - }, - ("switch", "00:11:22:33:44:55:66:77-208-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch", - }, - ("sensor", "00:11:22:33:44:55:66:77-209-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_2", - }, - ("switch", "00:11:22:33:44:55:66:77-209-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_2", - }, - ("sensor", "00:11:22:33:44:55:66:77-210-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_3", - }, - ("switch", "00:11:22:33:44:55:66:77-210-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_3", - }, - ("sensor", "00:11:22:33:44:55:66:77-211-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_4", - }, - ("switch", "00:11:22:33:44:55:66:77-211-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_4", - }, - ("switch", "00:11:22:33:44:55:66:77-212-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_5", - }, - ("switch", "00:11:22:33:44:55:66:77-213-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_6", - }, - ("switch", "00:11:22:33:44:55:66:77-214-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_7", - }, - ("sensor", "00:11:22:33:44:55:66:77-215-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_5", - }, - ("switch", "00:11:22:33:44:55:66:77-215-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_8", - }, - ("switch", "00:11:22:33:44:55:66:77-216-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_9", - }, - ("switch", "00:11:22:33:44:55:66:77-217-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_10", - }, - ("number", "00:11:22:33:44:55:66:77-218-13"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_output"], - DEV_SIG_ENT_MAP_CLASS: "ZhaNumber", - DEV_SIG_ENT_MAP_ID: "number.digi_xbee3_number", - }, - ("switch", "00:11:22:33:44:55:66:77-218-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_11", - }, - ("switch", "00:11:22:33:44:55:66:77-219-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_12", - }, - ("number", "00:11:22:33:44:55:66:77-219-13"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_output"], - DEV_SIG_ENT_MAP_CLASS: "ZhaNumber", - DEV_SIG_ENT_MAP_ID: "number.digi_xbee3_number_2", - }, - ("switch", "00:11:22:33:44:55:66:77-220-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_13", - }, - ("switch", "00:11:22:33:44:55:66:77-221-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_14", - }, - ("switch", "00:11:22:33:44:55:66:77-222-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_15", - }, - }, - }, - { - DEV_SIG_DEV_NO: 99, - SIG_MANUFACTURER: "efektalab.ru", - SIG_MODEL: "EFEKTA_PWS", - SIG_NODE_DESC: b"\x02@\x80\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 12, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 1026, 1032], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1032"): { - DEV_SIG_CLUSTER_HANDLERS: ["soil_moisture"], - DEV_SIG_ENT_MAP_CLASS: "SoilMoisture", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_soil_moisture", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 100, - SIG_MANUFACTURER: "Konke", - SIG_MODEL: "3AFE170100510001", - SIG_NODE_DESC: b"\x02@\x80\x02\x10RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - PROFILE_ID: 260, - DEVICE_TYPE: zha.DeviceType.ON_OFF_OUTPUT, - INPUT_CLUSTERS: [ - Basic.cluster_id, - PowerConfiguration.cluster_id, - Identify.cluster_id, - Groups.cluster_id, - Scenes.cluster_id, - OnOff.cluster_id, - ], - OUTPUT_CLUSTERS: [ - Identify.cluster_id, - ], - } - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.konke_3afe170100510001_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.konke_3afe170100510001_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.konke_3afe170100510001_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.konke_3afe170100510001_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 101, - SIG_MANUFACTURER: "Philips", - SIG_MODEL: "SML001", - SIG_NODE_DESC: b"\x02@\x80\x0b\x10Y?\x00\x00\x00?\x00\x00", - SIG_ENDPOINTS: { - 1: { - PROFILE_ID: zll.PROFILE_ID, - DEVICE_TYPE: zll.DeviceType.ON_OFF_SENSOR, - INPUT_CLUSTERS: [Basic.cluster_id], - OUTPUT_CLUSTERS: [ - Basic.cluster_id, - Identify.cluster_id, - Groups.cluster_id, - Scenes.cluster_id, - OnOff.cluster_id, - LevelControl.cluster_id, - Color.cluster_id, - ], - }, - 2: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, - INPUT_CLUSTERS: [ - Basic.cluster_id, - PowerConfiguration.cluster_id, - Identify.cluster_id, - IlluminanceMeasurement.cluster_id, - TemperatureMeasurement.cluster_id, - OccupancySensing.cluster_id, - ], - OUTPUT_CLUSTERS: [ - Ota.cluster_id, - ], - }, - }, - DEV_SIG_ATTRIBUTES: { - 2: { - "basic": { - "trigger_indicator": Bool(False), - }, - "philips_occupancy": { - "sensitivity": uint8_t(1), - }, - } - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [ - "1:0x0005", - "1:0x0006", - "1:0x0008", - "1:0x0300", - "2:0x0019", - ], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-2-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.philips_sml001_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Motion", - DEV_SIG_ENT_MAP_ID: "binary_sensor.philips_sml001_motion", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_illuminance", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-2-1030"): { - DEV_SIG_CLUSTER_HANDLERS: ["philips_occupancy"], - DEV_SIG_ENT_MAP_CLASS: "HueOccupancy", - DEV_SIG_ENT_MAP_ID: "binary_sensor.philips_sml001_occupancy", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_temperature", - }, - ("switch", "00:11:22:33:44:55:66:77-2-0-trigger_indicator"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "HueMotionTriggerIndicatorSwitch", - DEV_SIG_ENT_MAP_ID: "switch.philips_sml001_led_trigger_indicator", - }, - ("select", "00:11:22:33:44:55:66:77-2-1030-motion_sensitivity"): { - DEV_SIG_CLUSTER_HANDLERS: ["philips_occupancy"], - DEV_SIG_ENT_MAP_CLASS: "HueV1MotionSensitivity", - DEV_SIG_ENT_MAP_ID: "select.philips_sml001_motion_sensitivity", - }, - ("update", "00:11:22:33:44:55:66:77-2-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.philips_sml001_firmware", - }, - }, - }, -] diff --git a/tests/components/zone/test_trigger.py b/tests/components/zone/test_trigger.py index 6ec5e2fd894..a28b3c0592a 100644 --- a/tests/components/zone/test_trigger.py +++ b/tests/components/zone/test_trigger.py @@ -8,7 +8,7 @@ from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import async_mock_service, mock_component +from tests.common import mock_component @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -16,14 +16,8 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") hass.loop.run_until_complete( @@ -43,7 +37,7 @@ def setup_comp(hass): async def test_if_fires_on_zone_enter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone enter.""" context = Context() @@ -88,9 +82,11 @@ async def test_if_fires_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id - assert calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id + assert ( + service_calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" + ) # Set out of zone again so we can trigger call hass.states.async_set( @@ -104,17 +100,20 @@ async def test_if_fires_on_zone_enter( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.states.async_set( "test.entity", "hello", {"latitude": 32.880586, "longitude": -117.237564} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_zone_enter_uuid( - hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for firing on zone enter when device is specified by entity registry id.""" context = Context() @@ -165,9 +164,11 @@ async def test_if_fires_on_zone_enter_uuid( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id - assert calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id + assert ( + service_calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" + ) # Set out of zone again so we can trigger call hass.states.async_set( @@ -181,17 +182,18 @@ async def test_if_fires_on_zone_enter_uuid( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.states.async_set( "test.entity", "hello", {"latitude": 32.880586, "longitude": -117.237564} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_not_fires_for_enter_on_zone_leave( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on zone leave.""" hass.states.async_set( @@ -220,11 +222,11 @@ async def test_if_not_fires_for_enter_on_zone_leave( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_zone_leave( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone leave.""" hass.states.async_set( @@ -253,11 +255,11 @@ async def test_if_fires_on_zone_leave( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_not_fires_for_leave_on_zone_enter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on zone enter.""" hass.states.async_set( @@ -286,10 +288,12 @@ async def test_if_not_fires_for_leave_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 -async def test_zone_condition(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_zone_condition( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test for zone condition.""" hass.states.async_set( "test.entity", "hello", {"latitude": 32.880586, "longitude": -117.237564} @@ -314,11 +318,11 @@ async def test_zone_condition(hass: HomeAssistant, calls: list[ServiceCall]) -> hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_unknown_zone( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test for firing on zone enter.""" context = Context() diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index a2a4c217b8b..60deb7dbce8 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -1,9 +1,11 @@ """Provide common Z-Wave JS fixtures.""" import asyncio +from collections.abc import Generator import copy import io import json +from typing import Any from unittest.mock import DEFAULT, AsyncMock, patch import pytest @@ -20,13 +22,13 @@ from tests.common import MockConfigEntry, load_fixture @pytest.fixture(name="addon_info_side_effect") -def addon_info_side_effect_fixture(): +def addon_info_side_effect_fixture() -> Any | None: """Return the add-on info side effect.""" return None @pytest.fixture(name="addon_info") -def mock_addon_info(addon_info_side_effect): +def mock_addon_info(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: """Mock Supervisor add-on info.""" with patch( "homeassistant.components.hassio.addon_manager.async_get_addon_info", @@ -44,13 +46,15 @@ def mock_addon_info(addon_info_side_effect): @pytest.fixture(name="addon_store_info_side_effect") -def addon_store_info_side_effect_fixture(): +def addon_store_info_side_effect_fixture() -> Any | None: """Return the add-on store info side effect.""" return None @pytest.fixture(name="addon_store_info") -def mock_addon_store_info(addon_store_info_side_effect): +def mock_addon_store_info( + addon_store_info_side_effect: Any | None, +) -> Generator[AsyncMock]: """Mock Supervisor add-on info.""" with patch( "homeassistant.components.hassio.addon_manager.async_get_addon_store_info", @@ -66,7 +70,7 @@ def mock_addon_store_info(addon_store_info_side_effect): @pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info, addon_info): +def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> AsyncMock: """Mock add-on already running.""" addon_store_info.return_value = { "available": True, @@ -81,7 +85,9 @@ def mock_addon_running(addon_store_info, addon_info): @pytest.fixture(name="addon_installed") -def mock_addon_installed(addon_store_info, addon_info): +def mock_addon_installed( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: """Mock add-on already installed but not running.""" addon_store_info.return_value = { "available": True, @@ -96,23 +102,27 @@ def mock_addon_installed(addon_store_info, addon_info): @pytest.fixture(name="addon_not_installed") -def mock_addon_not_installed(addon_store_info, addon_info): +def mock_addon_not_installed( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: """Mock add-on not installed.""" addon_store_info.return_value["available"] = True return addon_info @pytest.fixture(name="addon_options") -def mock_addon_options(addon_info): +def mock_addon_options(addon_info: AsyncMock): """Mock add-on options.""" return addon_info.return_value["options"] @pytest.fixture(name="set_addon_options_side_effect") -def set_addon_options_side_effect_fixture(addon_options): +def set_addon_options_side_effect_fixture( + addon_options: dict[str, Any], +) -> Any | None: """Return the set add-on options side effect.""" - async def set_addon_options(hass: HomeAssistant, slug, options): + async def set_addon_options(hass: HomeAssistant, slug: str, options: dict) -> None: """Mock set add-on options.""" addon_options.update(options["options"]) @@ -120,7 +130,9 @@ def set_addon_options_side_effect_fixture(addon_options): @pytest.fixture(name="set_addon_options") -def mock_set_addon_options(set_addon_options_side_effect): +def mock_set_addon_options( + set_addon_options_side_effect: Any | None, +) -> Generator[AsyncMock]: """Mock set add-on options.""" with patch( "homeassistant.components.hassio.addon_manager.async_set_addon_options", @@ -130,7 +142,9 @@ def mock_set_addon_options(set_addon_options_side_effect): @pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture(addon_store_info, addon_info): +def install_addon_side_effect_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: """Return the install add-on side effect.""" async def install_addon(hass: HomeAssistant, slug): @@ -149,7 +163,7 @@ def install_addon_side_effect_fixture(addon_store_info, addon_info): @pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect): +def mock_install_addon(install_addon_side_effect: Any | None) -> Generator[AsyncMock]: """Mock install add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_install_addon", @@ -159,7 +173,7 @@ def mock_install_addon(install_addon_side_effect): @pytest.fixture(name="update_addon") -def mock_update_addon(): +def mock_update_addon() -> Generator[AsyncMock]: """Mock update add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_update_addon" @@ -168,7 +182,9 @@ def mock_update_addon(): @pytest.fixture(name="start_addon_side_effect") -def start_addon_side_effect_fixture(addon_store_info, addon_info): +def start_addon_side_effect_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: """Return the start add-on options side effect.""" async def start_addon(hass: HomeAssistant, slug): @@ -186,7 +202,7 @@ def start_addon_side_effect_fixture(addon_store_info, addon_info): @pytest.fixture(name="start_addon") -def mock_start_addon(start_addon_side_effect): +def mock_start_addon(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: """Mock start add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_start_addon", @@ -196,7 +212,7 @@ def mock_start_addon(start_addon_side_effect): @pytest.fixture(name="stop_addon") -def stop_addon_fixture(): +def stop_addon_fixture() -> Generator[AsyncMock]: """Mock stop add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_stop_addon" @@ -205,13 +221,13 @@ def stop_addon_fixture(): @pytest.fixture(name="restart_addon_side_effect") -def restart_addon_side_effect_fixture(): +def restart_addon_side_effect_fixture() -> Any | None: """Return the restart add-on options side effect.""" return None @pytest.fixture(name="restart_addon") -def mock_restart_addon(restart_addon_side_effect): +def mock_restart_addon(restart_addon_side_effect: Any | None) -> Generator[AsyncMock]: """Mock restart add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_restart_addon", @@ -221,7 +237,7 @@ def mock_restart_addon(restart_addon_side_effect): @pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture(): +def uninstall_addon_fixture() -> Generator[AsyncMock]: """Mock uninstall add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_uninstall_addon" @@ -230,7 +246,7 @@ def uninstall_addon_fixture(): @pytest.fixture(name="create_backup") -def create_backup_fixture(): +def create_backup_fixture() -> Generator[AsyncMock]: """Mock create backup.""" with patch( "homeassistant.components.hassio.addon_manager.async_create_backup" @@ -472,6 +488,12 @@ def iblinds_v3_state_fixture(): return json.loads(load_fixture("zwave_js/cover_iblinds_v3_state.json")) +@pytest.fixture(name="zvidar_state", scope="package") +def zvidar_state_fixture(): + """Load the ZVIDAR node state fixture data.""" + return json.loads(load_fixture("zwave_js/cover_zvidar_state.json")) + + @pytest.fixture(name="qubino_shutter_state", scope="package") def qubino_shutter_state_fixture(): """Load the Qubino Shutter node state fixture data.""" @@ -1081,6 +1103,14 @@ def iblinds_v3_cover_fixture(client, iblinds_v3_state): return node +@pytest.fixture(name="zvidar") +def zvidar_cover_fixture(client, zvidar_state): + """Mock a ZVIDAR window cover node.""" + node = Node(client, copy.deepcopy(zvidar_state)) + client.driver.controller.nodes[node.node_id] = node + return node + + @pytest.fixture(name="qubino_shutter") def qubino_shutter_cover_fixture(client, qubino_shutter_state): """Mock a Qubino flush shutter node.""" diff --git a/tests/components/zwave_js/fixtures/cover_zvidar_state.json b/tests/components/zwave_js/fixtures/cover_zvidar_state.json new file mode 100644 index 00000000000..05118931026 --- /dev/null +++ b/tests/components/zwave_js/fixtures/cover_zvidar_state.json @@ -0,0 +1,1120 @@ +{ + "nodeId": 270, + "index": 0, + "installerIcon": 6656, + "userIcon": 6656, + "status": 4, + "ready": true, + "isListening": false, + "isRouting": false, + "isSecure": true, + "manufacturerId": 1114, + "productId": 1287, + "productType": 2308, + "firmwareVersion": "1.10.0", + "zwavePlusVersion": 2, + "name": "Window Blind Controller", + "location": "**REDACTED**", + "deviceConfig": { + "filename": "/snapshot/build/node_modules/@zwave-js/config/config/devices/0x045a/Z-CM-V01.json", + "isEmbedded": true, + "manufacturer": "ZVIDAR", + "manufacturerId": 1114, + "label": "Z-CM-V01", + "description": "Smart Curtain Motor", + "devices": [ + { + "productType": 2308, + "productId": 1287 + } + ], + "firmwareVersion": { + "min": "0.0", + "max": "255.255" + }, + "preferred": false, + "paramInformation": { + "_map": {} + }, + "compat": { + "removeCCs": {} + } + }, + "label": "Z-CM-V01", + "interviewAttempts": 0, + "isFrequentListening": "1000ms", + "maxDataRate": 100000, + "supportedDataRates": [100000], + "protocolVersion": 3, + "supportsBeaming": false, + "supportsSecurity": true, + "nodeType": 1, + "zwavePlusNodeType": 0, + "zwavePlusRoleType": 7, + "deviceClass": { + "basic": { + "key": 3, + "label": "End Node" + }, + "generic": { + "key": 17, + "label": "Multilevel Switch" + }, + "specific": { + "key": 0, + "label": "Unused" + } + }, + "interviewStage": "Complete", + "deviceDatabaseUrl": "https://devices.zwave-js.io/?jumpTo=0x045a:0x0904:0x0507:1.10.0", + "statistics": { + "commandsTX": 2, + "commandsRX": 1, + "commandsDroppedRX": 1, + "commandsDroppedTX": 0, + "timeoutResponse": 0, + "rtt": 357.6, + "lastSeen": "2024-07-21T16:42:38.086Z", + "rssi": -89, + "lwr": { + "protocolDataRate": 4, + "repeaters": [], + "rssi": -91, + "repeaterRSSI": [] + } + }, + "highestSecurityClass": 1, + "isControllerNode": false, + "keepAwake": false, + "lastSeen": "2024-07-21T16:42:38.086Z", + "protocol": 1, + "values": [ + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "targetValue", + "propertyName": "targetValue", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Target value", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 99, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "duration", + "propertyName": "duration", + "ccVersion": 4, + "metadata": { + "type": "duration", + "readable": true, + "writeable": false, + "label": "Remaining duration", + "stateful": true, + "secret": false + }, + "value": "unknown" + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "currentValue", + "propertyName": "currentValue", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Current value", + "min": 0, + "max": 99, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "Up", + "propertyName": "Up", + "ccVersion": 4, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Perform a level change (Up)", + "ccSpecific": { + "switchType": 2 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "Down", + "propertyName": "Down", + "ccVersion": 4, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Perform a level change (Down)", + "ccSpecific": { + "switchType": 2 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "restorePrevious", + "propertyName": "restorePrevious", + "ccVersion": 4, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Restore previous value", + "states": { + "true": "Restore" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 1, + "propertyName": "Hand Button Action", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Hand Button Action", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Close", + "1": "Open" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 2, + "propertyName": "Motor Direction", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Motor Direction", + "default": 1, + "min": 1, + "max": 3, + "states": { + "1": "Forward", + "2": "Opposite", + "3": "Reverse" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 3, + "propertyName": "Manually Set Open Boundary", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Manually Set Open Boundary", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Cancel", + "1": "Start" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 4, + "propertyName": "Manually Set Closed Boundary", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Manually Set Closed Boundary", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Cancel", + "1": "Start" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 5, + "propertyName": "Control Motor", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Control Motor", + "default": 3, + "min": 1, + "max": 3, + "states": { + "1": "Open (Up)", + "2": "Close (Down)", + "3": "Stop" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 6, + "propertyName": "Calibrate Limit Position", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Calibrate Limit Position", + "default": 1, + "min": 1, + "max": 3, + "states": { + "1": "Upper limit", + "2": "Lower limit", + "3": "Third limit" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 7, + "propertyName": "Delete Limit Position", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Delete Limit Position", + "default": 0, + "min": 0, + "max": 3, + "states": { + "0": "All limits", + "1": "Only upper limit", + "2": "Only lower limit", + "3": "Only third limit" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 8, + "propertyName": "Low Battery Level Alarm Threshold", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Low Battery Level Alarm Threshold", + "default": 10, + "min": 0, + "max": 50, + "unit": "%", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 10 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyName": "Battery Report Interval", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Battery Report Interval", + "default": 3600, + "min": 0, + "max": 2678400, + "unit": "seconds", + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 3600 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 10, + "propertyName": "Battery Change Report Threshold", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Battery Change Report Threshold", + "default": 5, + "min": 0, + "max": 50, + "unit": "%", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 5 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "Power Management", + "propertyKey": "Mains status", + "propertyName": "Power Management", + "propertyKeyName": "Mains status", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Mains status", + "ccSpecific": { + "notificationType": 8 + }, + "min": 0, + "max": 255, + "states": { + "2": "AC mains disconnected", + "3": "AC mains re-connected" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmType", + "propertyName": "alarmType", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Type", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmLevel", + "propertyName": "alarmLevel", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Level", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "manufacturerId", + "propertyName": "manufacturerId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Manufacturer ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 1114 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productType", + "propertyName": "productType", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product type", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 2308 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productId", + "propertyName": "productId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 1287 + }, + { + "endpoint": 0, + "commandClass": 128, + "commandClassName": "Battery", + "property": "level", + "propertyName": "level", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Battery level", + "min": 0, + "max": 100, + "unit": "%", + "stateful": true, + "secret": false + }, + "value": 86 + }, + { + "endpoint": 0, + "commandClass": 128, + "commandClassName": "Battery", + "property": "isLow", + "propertyName": "isLow", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": true, + "writeable": false, + "label": "Low battery level", + "stateful": true, + "secret": false + }, + "value": false + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "libraryType", + "propertyName": "libraryType", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Library type", + "states": { + "0": "Unknown", + "1": "Static Controller", + "2": "Controller", + "3": "Enhanced Slave", + "4": "Slave", + "5": "Installer", + "6": "Routing Slave", + "7": "Bridge Controller", + "8": "Device under Test", + "9": "N/A", + "10": "AV Remote", + "11": "AV Device" + }, + "stateful": true, + "secret": false + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "protocolVersion", + "propertyName": "protocolVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + }, + "value": "7.16" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "firmwareVersions", + "propertyName": "firmwareVersions", + "ccVersion": 3, + "metadata": { + "type": "string[]", + "readable": true, + "writeable": false, + "label": "Z-Wave chip firmware versions", + "stateful": true, + "secret": false + }, + "value": ["1.10"] + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hardwareVersion", + "propertyName": "hardwareVersion", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Z-Wave chip hardware version", + "stateful": true, + "secret": false + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "sdkVersion", + "propertyName": "sdkVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "SDK version", + "stateful": true, + "secret": false + }, + "value": "7.16.3" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationFrameworkAPIVersion", + "propertyName": "applicationFrameworkAPIVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave application framework API version", + "stateful": true, + "secret": false + }, + "value": "10.16.3" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationFrameworkBuildNumber", + "propertyName": "applicationFrameworkBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave application framework API build number", + "stateful": true, + "secret": false + }, + "value": 297 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hostInterfaceVersion", + "propertyName": "hostInterfaceVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Serial API version", + "stateful": true, + "secret": false + }, + "value": "unused" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hostInterfaceBuildNumber", + "propertyName": "hostInterfaceBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Serial API build number", + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "zWaveProtocolVersion", + "propertyName": "zWaveProtocolVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + }, + "value": "7.16.3" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "zWaveProtocolBuildNumber", + "propertyName": "zWaveProtocolBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol build number", + "stateful": true, + "secret": false + }, + "value": 297 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationVersion", + "propertyName": "applicationVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Application version", + "stateful": true, + "secret": false + }, + "value": "1.10.0" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationBuildNumber", + "propertyName": "applicationBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Application build number", + "stateful": true, + "secret": false + }, + "value": 43707 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": 80, + "propertyKey": 3, + "propertyName": "Node Identify", + "propertyKeyName": "On/Off Period: Duration", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Sets the duration of an on/off period in 1/10th seconds. Must be set together with \"On/Off Cycle Count\"", + "label": "Node Identify - On/Off Period: Duration", + "ccSpecific": { + "indicatorId": 80, + "propertyId": 3 + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": 80, + "propertyKey": 4, + "propertyName": "Node Identify", + "propertyKeyName": "On/Off Cycle Count", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Sets the number of on/off periods. 0xff means infinite. Must be set together with \"On/Off Period duration\"", + "label": "Node Identify - On/Off Cycle Count", + "ccSpecific": { + "indicatorId": 80, + "propertyId": 4 + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": 80, + "propertyKey": 5, + "propertyName": "Node Identify", + "propertyKeyName": "On/Off Period: On time", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "This property is used to set the length of the On time during an On/Off period. It allows asymmetric On/Off periods. The value 0x00 MUST represent symmetric On/Off period (On time equal to Off time)", + "label": "Node Identify - On/Off Period: On time", + "ccSpecific": { + "indicatorId": 80, + "propertyId": 5 + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "value", + "propertyName": "value", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Indicator value", + "ccSpecific": { + "indicatorId": 0 + }, + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "identify", + "propertyName": "identify", + "ccVersion": 3, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Identify", + "states": { + "true": "Identify" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "timeout", + "propertyName": "timeout", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": true, + "label": "Timeout", + "stateful": true, + "secret": false + } + } + ], + "endpoints": [ + { + "nodeId": 261, + "index": 0, + "installerIcon": 6656, + "userIcon": 6656, + "deviceClass": { + "basic": { + "key": 3, + "label": "End Node" + }, + "generic": { + "key": 17, + "label": "Multilevel Switch" + }, + "specific": { + "key": 0, + "label": "Unused" + } + }, + "commandClasses": [ + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 85, + "name": "Transport Service", + "version": 2, + "isSecure": false + }, + { + "id": 159, + "name": "Security 2", + "version": 1, + "isSecure": true + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 38, + "name": "Multilevel Switch", + "version": 4, + "isSecure": true + }, + { + "id": 112, + "name": "Configuration", + "version": 4, + "isSecure": true + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": true + }, + { + "id": 89, + "name": "Association Group Information", + "version": 3, + "isSecure": true + }, + { + "id": 142, + "name": "Multi Channel Association", + "version": 3, + "isSecure": true + }, + { + "id": 134, + "name": "Version", + "version": 3, + "isSecure": true + }, + { + "id": 114, + "name": "Manufacturer Specific", + "version": 2, + "isSecure": true + }, + { + "id": 90, + "name": "Device Reset Locally", + "version": 1, + "isSecure": true + }, + { + "id": 128, + "name": "Battery", + "version": 1, + "isSecure": true + }, + { + "id": 113, + "name": "Notification", + "version": 8, + "isSecure": true + }, + { + "id": 122, + "name": "Firmware Update Meta Data", + "version": 5, + "isSecure": true + }, + { + "id": 115, + "name": "Powerlevel", + "version": 1, + "isSecure": true + }, + { + "id": 135, + "name": "Indicator", + "version": 3, + "isSecure": true + } + ] + } + ] +} diff --git a/tests/components/zwave_js/test_config_flow.py b/tests/components/zwave_js/test_config_flow.py index 10fd5edfabb..46172f72b2f 100644 --- a/tests/components/zwave_js/test_config_flow.py +++ b/tests/components/zwave_js/test_config_flow.py @@ -1,14 +1,15 @@ """Test the Z-Wave JS config flow.""" import asyncio +from collections.abc import Generator from copy import copy from ipaddress import ip_address -from unittest.mock import DEFAULT, MagicMock, call, patch +from typing import Any +from unittest.mock import DEFAULT, AsyncMock, MagicMock, call, patch import aiohttp import pytest from serial.tools.list_ports_common import ListPortInfo -from typing_extensions import Generator from zwave_js_server.version import VersionInfo from homeassistant import config_entries @@ -59,7 +60,7 @@ CP2652_ZIGBEE_DISCOVERY_INFO = usb.UsbServiceInfo( @pytest.fixture(name="setup_entry") -def setup_entry_fixture(): +def setup_entry_fixture() -> Generator[AsyncMock]: """Mock entry setup.""" with patch( "homeassistant.components.zwave_js.async_setup_entry", return_value=True @@ -68,7 +69,7 @@ def setup_entry_fixture(): @pytest.fixture(name="supervisor") -def mock_supervisor_fixture(): +def mock_supervisor_fixture() -> Generator[None]: """Mock Supervisor.""" with patch( "homeassistant.components.zwave_js.config_flow.is_hassio", return_value=True @@ -77,19 +78,21 @@ def mock_supervisor_fixture(): @pytest.fixture(name="discovery_info") -def discovery_info_fixture(): +def discovery_info_fixture() -> dict[str, Any]: """Return the discovery info from the supervisor.""" return DEFAULT @pytest.fixture(name="discovery_info_side_effect") -def discovery_info_side_effect_fixture(): +def discovery_info_side_effect_fixture() -> Any | None: """Return the discovery info from the supervisor.""" return None @pytest.fixture(name="get_addon_discovery_info") -def mock_get_addon_discovery_info(discovery_info, discovery_info_side_effect): +def mock_get_addon_discovery_info( + discovery_info: dict[str, Any], discovery_info_side_effect: Any | None +) -> Generator[AsyncMock]: """Mock get add-on discovery info.""" with patch( "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", @@ -100,13 +103,15 @@ def mock_get_addon_discovery_info(discovery_info, discovery_info_side_effect): @pytest.fixture(name="server_version_side_effect") -def server_version_side_effect_fixture(): +def server_version_side_effect_fixture() -> Any | None: """Return the server version side effect.""" return None @pytest.fixture(name="get_server_version", autouse=True) -def mock_get_server_version(server_version_side_effect, server_version_timeout): +def mock_get_server_version( + server_version_side_effect: Any | None, server_version_timeout: int +) -> Generator[AsyncMock]: """Mock server version.""" version_info = VersionInfo( driver_version="mock-driver-version", @@ -130,18 +135,18 @@ def mock_get_server_version(server_version_side_effect, server_version_timeout): @pytest.fixture(name="server_version_timeout") -def mock_server_version_timeout(): +def mock_server_version_timeout() -> int: """Patch the timeout for getting server version.""" return SERVER_VERSION_TIMEOUT @pytest.fixture(name="addon_setup_time", autouse=True) -def mock_addon_setup_time(): +def mock_addon_setup_time() -> Generator[None]: """Mock add-on setup sleep time.""" with patch( "homeassistant.components.zwave_js.config_flow.ADDON_SETUP_TIMEOUT", new=0 - ) as addon_setup_time: - yield addon_setup_time + ): + yield @pytest.fixture(name="serial_port") diff --git a/tests/components/zwave_js/test_device_condition.py b/tests/components/zwave_js/test_device_condition.py index 61ed2bb35fb..17bc4cf0f5d 100644 --- a/tests/components/zwave_js/test_device_condition.py +++ b/tests/components/zwave_js/test_device_condition.py @@ -25,13 +25,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.setup import async_setup_component -from tests.common import async_get_device_automations, async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import async_get_device_automations async def test_get_conditions( @@ -99,7 +93,7 @@ async def test_node_status_state( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test for node_status conditions.""" @@ -206,8 +200,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "alive - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "alive - event - test_event1" event = Event( "wake up", @@ -225,8 +219,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "awake - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "awake - event - test_event2" event = Event( "sleep", @@ -240,8 +234,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "asleep - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "asleep - event - test_event3" event = Event( "dead", @@ -255,8 +249,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "dead - event - test_event4" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "dead - event - test_event4" async def test_config_parameter_state( @@ -264,7 +258,7 @@ async def test_config_parameter_state( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test for config_parameter conditions.""" @@ -331,8 +325,8 @@ async def test_config_parameter_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "Beeper - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "Beeper - event - test_event1" # Flip Beeper state to not match condition event = Event( @@ -375,8 +369,8 @@ async def test_config_parameter_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "User Slot Status - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "User Slot Status - event - test_event2" async def test_value_state( @@ -384,7 +378,7 @@ async def test_value_state( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test for value conditions.""" @@ -427,8 +421,8 @@ async def test_value_state( hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "value - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "value - event - test_event1" async def test_get_condition_capabilities_node_status( diff --git a/tests/components/zwave_js/test_device_trigger.py b/tests/components/zwave_js/test_device_trigger.py index 0fa228288ec..ccc69f7723d 100644 --- a/tests/components/zwave_js/test_device_trigger.py +++ b/tests/components/zwave_js/test_device_trigger.py @@ -28,13 +28,7 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from tests.common import async_get_device_automations, async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import async_get_device_automations async def test_no_controller_triggers( @@ -85,7 +79,7 @@ async def test_if_notification_notification_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for event.notification.notification trigger firing.""" node: Node = lock_schlage_be469 @@ -168,13 +162,13 @@ async def test_if_notification_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.notification.notification - device - zwave_js_notification - {CommandClass.NOTIFICATION}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.notification.notification2 - device - zwave_js_notification - {CommandClass.NOTIFICATION}" ) @@ -221,7 +215,7 @@ async def test_if_entry_control_notification_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for notification.entry_control trigger firing.""" node: Node = lock_schlage_be469 @@ -303,13 +297,13 @@ async def test_if_entry_control_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.notification.notification - device - zwave_js_notification - {CommandClass.ENTRY_CONTROL}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.notification.notification2 - device - zwave_js_notification - {CommandClass.ENTRY_CONTROL}" ) @@ -389,7 +383,7 @@ async def test_if_node_status_change_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for node_status trigger firing.""" node: Node = lock_schlage_be469 @@ -460,9 +454,9 @@ async def test_if_node_status_change_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].data["some"] == "state.node_status - device - alive" - assert calls[1].data["some"] == "state.node_status2 - device - alive" + assert len(service_calls) == 2 + assert service_calls[0].data["some"] == "state.node_status - device - alive" + assert service_calls[1].data["some"] == "state.node_status2 - device - alive" async def test_if_node_status_change_fires_legacy( @@ -472,7 +466,7 @@ async def test_if_node_status_change_fires_legacy( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for node_status trigger firing.""" node: Node = lock_schlage_be469 @@ -543,9 +537,9 @@ async def test_if_node_status_change_fires_legacy( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].data["some"] == "state.node_status - device - alive" - assert calls[1].data["some"] == "state.node_status2 - device - alive" + assert len(service_calls) == 2 + assert service_calls[0].data["some"] == "state.node_status - device - alive" + assert service_calls[1].data["some"] == "state.node_status2 - device - alive" async def test_get_trigger_capabilities_node_status( @@ -645,7 +639,7 @@ async def test_if_basic_value_notification_fires( client, ge_in_wall_dimmer_switch, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for event.value_notification.basic trigger firing.""" node: Node = ge_in_wall_dimmer_switch @@ -742,13 +736,13 @@ async def test_if_basic_value_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.value_notification.basic - device - zwave_js_value_notification - {CommandClass.BASIC}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.value_notification.basic2 - device - zwave_js_value_notification - {CommandClass.BASIC}" ) @@ -830,7 +824,7 @@ async def test_if_central_scene_value_notification_fires( client, wallmote_central_scene, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for event.value_notification.central_scene trigger firing.""" node: Node = wallmote_central_scene @@ -933,13 +927,13 @@ async def test_if_central_scene_value_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.value_notification.central_scene - device - zwave_js_value_notification - {CommandClass.CENTRAL_SCENE}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.value_notification.central_scene2 - device - zwave_js_value_notification - {CommandClass.CENTRAL_SCENE}" ) @@ -1020,7 +1014,7 @@ async def test_if_scene_activation_value_notification_fires( client, hank_binary_switch, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for event.value_notification.scene_activation trigger firing.""" node: Node = hank_binary_switch @@ -1117,13 +1111,13 @@ async def test_if_scene_activation_value_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.value_notification.scene_activation - device - zwave_js_value_notification - {CommandClass.SCENE_ACTIVATION}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.value_notification.scene_activation2 - device - zwave_js_value_notification - {CommandClass.SCENE_ACTIVATION}" ) @@ -1200,7 +1194,7 @@ async def test_if_value_updated_value_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for zwave_js.value_updated.value trigger firing.""" node: Node = lock_schlage_be469 @@ -1261,7 +1255,7 @@ async def test_if_value_updated_value_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Publish fake value update that should trigger event = Event( @@ -1283,9 +1277,9 @@ async def test_if_value_updated_value_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "zwave_js.value_updated.value - zwave_js.value_updated - open" ) @@ -1296,7 +1290,7 @@ async def test_value_updated_value_no_driver( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test zwave_js.value_updated.value trigger with missing driver.""" node: Node = lock_schlage_be469 @@ -1362,7 +1356,7 @@ async def test_value_updated_value_no_driver( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_get_trigger_capabilities_value_updated_value( @@ -1455,7 +1449,7 @@ async def test_if_value_updated_config_parameter_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for zwave_js.value_updated.config_parameter trigger firing.""" node: Node = lock_schlage_be469 @@ -1517,9 +1511,9 @@ async def test_if_value_updated_config_parameter_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "zwave_js.value_updated.config_parameter - zwave_js.value_updated - 255" ) diff --git a/tests/components/zwave_js/test_discovery.py b/tests/components/zwave_js/test_discovery.py index 1179d8e843c..57841ef2a83 100644 --- a/tests/components/zwave_js/test_discovery.py +++ b/tests/components/zwave_js/test_discovery.py @@ -49,6 +49,18 @@ async def test_iblinds_v2(hass: HomeAssistant, client, iblinds_v2, integration) assert state +async def test_zvidar_state(hass: HomeAssistant, client, zvidar, integration) -> None: + """Test that an ZVIDAR Z-CM-V01 multilevel switch value is discovered as a cover.""" + node = zvidar + assert node.device_class.specific.label == "Unused" + + state = hass.states.get("light.window_blind_controller") + assert not state + + state = hass.states.get("cover.window_blind_controller") + assert state + + async def test_ge_12730(hass: HomeAssistant, client, ge_12730, integration) -> None: """Test GE 12730 Fan Controller v2.0 multilevel switch is discovered as a fan.""" node = ge_12730 diff --git a/tests/components/zwave_js/test_fan.py b/tests/components/zwave_js/test_fan.py index 03cd6bfb704..2551fc7b34a 100644 --- a/tests/components/zwave_js/test_fan.py +++ b/tests/components/zwave_js/test_fan.py @@ -653,7 +653,12 @@ async def test_thermostat_fan( assert state.state == STATE_ON assert state.attributes.get(ATTR_FAN_STATE) == "Idle / off" assert state.attributes.get(ATTR_PRESET_MODE) == "Auto low" - assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == FanEntityFeature.PRESET_MODE + assert ( + state.attributes.get(ATTR_SUPPORTED_FEATURES) + == FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) # Test setting preset mode await hass.services.async_call( diff --git a/tests/components/zwave_js/test_helpers.py b/tests/components/zwave_js/test_helpers.py index 016a2d718ac..2df2e134f49 100644 --- a/tests/components/zwave_js/test_helpers.py +++ b/tests/components/zwave_js/test_helpers.py @@ -42,4 +42,4 @@ async def test_get_value_state_schema_boolean_config_value( aeon_smart_switch_6.values["102-112-0-255"] ) assert isinstance(schema_validator, vol.Coerce) - assert schema_validator.type == bool + assert schema_validator.type is bool diff --git a/tests/conftest.py b/tests/conftest.py index 14e6f97d7c4..ea0453e7450 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,14 +3,16 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Coroutine -from contextlib import asynccontextmanager, contextmanager +from collections.abc import AsyncGenerator, Callable, Coroutine, Generator +from contextlib import AsyncExitStack, asynccontextmanager, contextmanager +import datetime import functools import gc import itertools import logging import os import reprlib +from shutil import rmtree import sqlite3 import ssl import threading @@ -32,12 +34,16 @@ import multidict import pytest import pytest_socket import requests_mock +import respx from syrupy.assertion import SnapshotAssertion -from typing_extensions import AsyncGenerator, Generator from homeassistant import block_async_io +from homeassistant.exceptions import ServiceNotFound -# Setup patching if dt_util time functions before any other Home Assistant imports +# Setup patching of recorder functions before any other Home Assistant imports +from . import patch_recorder # noqa: F401, isort:skip + +# Setup patching of dt_util time functions before any other Home Assistant imports from . import patch_time # noqa: F401, isort:skip from homeassistant import core as ha, loader, runner @@ -53,8 +59,9 @@ from homeassistant.components.websocket_api.auth import ( from homeassistant.components.websocket_api.http import URL from homeassistant.config import YAML_CONFIG_FILE from homeassistant.config_entries import ConfigEntries, ConfigEntry, ConfigEntryState -from homeassistant.const import HASSIO_USER_NAME +from homeassistant.const import BASE_PLATFORMS, HASSIO_USER_NAME from homeassistant.core import ( + Context, CoreState, HassJob, HomeAssistant, @@ -75,9 +82,9 @@ from homeassistant.helpers import ( from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.translation import _TranslationsCacheData from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import BASE_PLATFORMS, async_setup_component -from homeassistant.util import location -from homeassistant.util.async_ import create_eager_task +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util, location +from homeassistant.util.async_ import create_eager_task, get_scheduled_timer_handles from homeassistant.util.json import json_loads from .ignore_uncaught_exceptions import IGNORE_UNCAUGHT_EXCEPTIONS @@ -106,8 +113,6 @@ from .common import ( # noqa: E402, isort:skip MockUser, async_fire_mqtt_message, async_test_home_assistant, - get_test_home_assistant, - init_recorder_component, mock_storage, patch_yaml_files, extract_stack_to_frame, @@ -367,7 +372,7 @@ def verify_cleanup( if tasks: event_loop.run_until_complete(asyncio.wait(tasks)) - for handle in event_loop._scheduled: # type: ignore[attr-defined] + for handle in get_scheduled_timer_handles(event_loop): if not handle.cancelled(): with long_repr_strings(): if expected_lingering_timers: @@ -387,6 +392,20 @@ def verify_cleanup( "waitpid-" ) + try: + # Verify the default time zone has been restored + assert dt_util.DEFAULT_TIME_ZONE is datetime.UTC + finally: + # Restore the default time zone to not break subsequent tests + dt_util.DEFAULT_TIME_ZONE = datetime.UTC + + try: + # Verify respx.mock has been cleaned up + assert not respx.mock.routes, "respx.mock routes not cleaned up, maybe the test needs to be decorated with @respx.mock" + finally: + # Clear mock routes not break subsequent tests + respx.mock.clear() + @pytest.fixture(autouse=True) def reset_hass_threading_local_object() -> Generator[None]: @@ -886,7 +905,7 @@ def fail_on_log_exception( return def log_exception(format_err, *args): - raise # pylint: disable=misplaced-bare-raise + raise # noqa: PLE0704 monkeypatch.setattr("homeassistant.util.logging.log_exception", log_exception) @@ -960,6 +979,7 @@ def mqtt_client_mock(hass: HomeAssistant) -> Generator[MqttMockPahoClient]: mock_client.subscribe.side_effect = _subscribe mock_client.unsubscribe.side_effect = _unsubscribe mock_client.publish.side_effect = _async_fire_mqtt_message + mock_client.loop_read.return_value = 0 yield mock_client @@ -1292,6 +1312,16 @@ def enable_migrate_entity_ids() -> bool: return False +@pytest.fixture +def enable_migrate_event_ids() -> bool: + """Fixture to control enabling of recorder's event id migration. + + To enable context id migration, tests can be marked with: + @pytest.mark.parametrize("enable_migrate_event_ids", [True]) + """ + return False + + @pytest.fixture def recorder_config() -> dict[str, Any] | None: """Fixture to override recorder config. @@ -1302,16 +1332,36 @@ def recorder_config() -> dict[str, Any] | None: return None +@pytest.fixture +def persistent_database() -> bool: + """Fixture to control if database should persist when recorder is shut down in test. + + When using sqlite, this uses on disk database instead of in memory database. + This does nothing when using mysql or postgresql. + + Note that the database is always destroyed in between tests. + + To use a persistent database, tests can be marked with: + @pytest.mark.parametrize("persistent_database", [True]) + """ + return False + + @pytest.fixture def recorder_db_url( pytestconfig: pytest.Config, hass_fixture_setup: list[bool], + persistent_database: str, + tmp_path_factory: pytest.TempPathFactory, ) -> Generator[str]: """Prepare a default database for tests and return a connection URL.""" assert not hass_fixture_setup db_url = cast(str, pytestconfig.getoption("dburl")) - if db_url.startswith("mysql://"): + if db_url == "sqlite://" and persistent_database: + tmp_path = tmp_path_factory.mktemp("recorder") + db_url = "sqlite:///" + str(tmp_path / "pytest.db") + elif db_url.startswith("mysql://"): # pylint: disable-next=import-outside-toplevel import sqlalchemy_utils @@ -1325,7 +1375,9 @@ def recorder_db_url( assert not sqlalchemy_utils.database_exists(db_url) sqlalchemy_utils.create_database(db_url, encoding="utf8") yield db_url - if db_url.startswith("mysql://"): + if db_url == "sqlite://" and persistent_database: + rmtree(tmp_path, ignore_errors=True) + elif db_url.startswith("mysql://"): # pylint: disable-next=import-outside-toplevel import sqlalchemy as sa @@ -1349,124 +1401,13 @@ def recorder_db_url( sqlalchemy_utils.drop_database(db_url) -@pytest.fixture -def hass_recorder( - recorder_db_url: str, - enable_nightly_purge: bool, - enable_statistics: bool, - enable_schema_validation: bool, - enable_migrate_context_ids: bool, - enable_migrate_event_type_ids: bool, - enable_migrate_entity_ids: bool, - hass_storage: dict[str, Any], -) -> Generator[Callable[..., HomeAssistant]]: - """Home Assistant fixture with in-memory recorder.""" - # pylint: disable-next=import-outside-toplevel - from homeassistant.components import recorder - - # pylint: disable-next=import-outside-toplevel - from homeassistant.components.recorder import migration - - with get_test_home_assistant() as hass: - nightly = ( - recorder.Recorder.async_nightly_tasks if enable_nightly_purge else None - ) - stats = ( - recorder.Recorder.async_periodic_statistics if enable_statistics else None - ) - compile_missing = ( - recorder.Recorder._schedule_compile_missing_statistics - if enable_statistics - else None - ) - schema_validate = ( - migration._find_schema_errors - if enable_schema_validation - else itertools.repeat(set()) - ) - migrate_states_context_ids = ( - recorder.Recorder._migrate_states_context_ids - if enable_migrate_context_ids - else None - ) - migrate_events_context_ids = ( - recorder.Recorder._migrate_events_context_ids - if enable_migrate_context_ids - else None - ) - migrate_event_type_ids = ( - recorder.Recorder._migrate_event_type_ids - if enable_migrate_event_type_ids - else None - ) - migrate_entity_ids = ( - recorder.Recorder._migrate_entity_ids if enable_migrate_entity_ids else None - ) - with ( - patch( - "homeassistant.components.recorder.Recorder.async_nightly_tasks", - side_effect=nightly, - autospec=True, - ), - patch( - "homeassistant.components.recorder.Recorder.async_periodic_statistics", - side_effect=stats, - autospec=True, - ), - patch( - "homeassistant.components.recorder.migration._find_schema_errors", - side_effect=schema_validate, - autospec=True, - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_events_context_ids", - side_effect=migrate_events_context_ids, - autospec=True, - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_states_context_ids", - side_effect=migrate_states_context_ids, - autospec=True, - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_event_type_ids", - side_effect=migrate_event_type_ids, - autospec=True, - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_entity_ids", - side_effect=migrate_entity_ids, - autospec=True, - ), - patch( - "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", - side_effect=compile_missing, - autospec=True, - ), - ): - - def setup_recorder( - *, config: dict[str, Any] | None = None, timezone: str | None = None - ) -> HomeAssistant: - """Set up with params.""" - if timezone is not None: - asyncio.run_coroutine_threadsafe( - hass.config.async_set_time_zone(timezone), hass.loop - ).result() - init_recorder_component(hass, config, recorder_db_url) - hass.start() - hass.block_till_done() - hass.data[recorder.DATA_INSTANCE].block_till_done() - return hass - - yield setup_recorder - hass.stop() - - async def _async_init_recorder_component( hass: HomeAssistant, add_config: dict[str, Any] | None = None, db_url: str | None = None, + *, + expected_setup_result: bool, + wait_setup: bool, ) -> None: """Initialize the recorder asynchronously.""" # pylint: disable-next=import-outside-toplevel @@ -1481,18 +1422,34 @@ async def _async_init_recorder_component( with patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True): if recorder.DOMAIN not in hass.data: recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, recorder.DOMAIN, {recorder.DOMAIN: config} + setup_task = asyncio.ensure_future( + async_setup_component(hass, recorder.DOMAIN, {recorder.DOMAIN: config}) ) - assert recorder.DOMAIN in hass.config.components + if wait_setup: + # Wait for recorder integration to setup + setup_result = await setup_task + assert setup_result == expected_setup_result + assert (recorder.DOMAIN in hass.config.components) == expected_setup_result + else: + # Wait for recorder to connect to the database + await recorder_helper.async_wait_recorder(hass) _LOGGER.info( "Test recorder successfully started, database location: %s", config[recorder.CONF_DB_URL], ) +class ThreadSession(threading.local): + """Keep track of session per thread.""" + + has_session = False + + +thread_session = ThreadSession() + + @pytest.fixture -async def async_setup_recorder_instance( +async def async_test_recorder( recorder_db_url: str, enable_nightly_purge: bool, enable_statistics: bool, @@ -1500,8 +1457,9 @@ async def async_setup_recorder_instance( enable_migrate_context_ids: bool, enable_migrate_event_type_ids: bool, enable_migrate_entity_ids: bool, + enable_migrate_event_ids: bool, ) -> AsyncGenerator[RecorderInstanceGenerator]: - """Yield callable to setup recorder instance.""" + """Yield context manager to setup recorder instance.""" # pylint: disable-next=import-outside-toplevel from homeassistant.components import recorder @@ -1511,6 +1469,39 @@ async def async_setup_recorder_instance( # pylint: disable-next=import-outside-toplevel from .components.recorder.common import async_recorder_block_till_done + # pylint: disable-next=import-outside-toplevel + from .patch_recorder import real_session_scope + + if TYPE_CHECKING: + # pylint: disable-next=import-outside-toplevel + from sqlalchemy.orm.session import Session + + @contextmanager + def debug_session_scope( + *, + hass: HomeAssistant | None = None, + session: Session | None = None, + exception_filter: Callable[[Exception], bool] | None = None, + read_only: bool = False, + ) -> Generator[Session]: + """Wrap session_scope to bark if we create nested sessions.""" + if thread_session.has_session: + raise RuntimeError( + f"Thread '{threading.current_thread().name}' already has an " + "active session" + ) + thread_session.has_session = True + try: + with real_session_scope( + hass=hass, + session=session, + exception_filter=exception_filter, + read_only=read_only, + ) as ses: + yield ses + finally: + thread_session.has_session = False + nightly = recorder.Recorder.async_nightly_tasks if enable_nightly_purge else None stats = recorder.Recorder.async_periodic_statistics if enable_statistics else None schema_validate = ( @@ -1524,22 +1515,27 @@ async def async_setup_recorder_instance( else None ) migrate_states_context_ids = ( - recorder.Recorder._migrate_states_context_ids + migration.StatesContextIDMigration.migrate_data if enable_migrate_context_ids else None ) migrate_events_context_ids = ( - recorder.Recorder._migrate_events_context_ids + migration.EventsContextIDMigration.migrate_data if enable_migrate_context_ids else None ) migrate_event_type_ids = ( - recorder.Recorder._migrate_event_type_ids + migration.EventTypeIDMigration.migrate_data if enable_migrate_event_type_ids else None ) migrate_entity_ids = ( - recorder.Recorder._migrate_entity_ids if enable_migrate_entity_ids else None + migration.EntityIDMigration.migrate_data if enable_migrate_entity_ids else None + ) + legacy_event_id_foreign_key_exists = ( + migration.EventIDPostMigration._legacy_event_id_foreign_key_exists + if enable_migrate_event_ids + else lambda _: None ) with ( patch( @@ -1558,43 +1554,101 @@ async def async_setup_recorder_instance( autospec=True, ), patch( - "homeassistant.components.recorder.Recorder._migrate_events_context_ids", + "homeassistant.components.recorder.migration.EventsContextIDMigration.migrate_data", side_effect=migrate_events_context_ids, autospec=True, ), patch( - "homeassistant.components.recorder.Recorder._migrate_states_context_ids", + "homeassistant.components.recorder.migration.StatesContextIDMigration.migrate_data", side_effect=migrate_states_context_ids, autospec=True, ), patch( - "homeassistant.components.recorder.Recorder._migrate_event_type_ids", + "homeassistant.components.recorder.migration.EventTypeIDMigration.migrate_data", side_effect=migrate_event_type_ids, autospec=True, ), patch( - "homeassistant.components.recorder.Recorder._migrate_entity_ids", + "homeassistant.components.recorder.migration.EntityIDMigration.migrate_data", side_effect=migrate_entity_ids, autospec=True, ), + patch( + "homeassistant.components.recorder.migration.EventIDPostMigration._legacy_event_id_foreign_key_exists", + side_effect=legacy_event_id_foreign_key_exists, + autospec=True, + ), patch( "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", side_effect=compile_missing, autospec=True, ), + patch.object( + patch_recorder, + "real_session_scope", + side_effect=debug_session_scope, + autospec=True, + ), ): - async def async_setup_recorder( - hass: HomeAssistant, config: ConfigType | None = None - ) -> recorder.Recorder: + @asynccontextmanager + async def async_test_recorder( + hass: HomeAssistant, + config: ConfigType | None = None, + *, + expected_setup_result: bool = True, + wait_recorder: bool = True, + wait_recorder_setup: bool = True, + ) -> AsyncGenerator[recorder.Recorder]: """Setup and return recorder instance.""" # noqa: D401 - await _async_init_recorder_component(hass, config, recorder_db_url) + await _async_init_recorder_component( + hass, + config, + recorder_db_url, + expected_setup_result=expected_setup_result, + wait_setup=wait_recorder_setup, + ) await hass.async_block_till_done() instance = hass.data[recorder.DATA_INSTANCE] # The recorder's worker is not started until Home Assistant is running - if hass.state is CoreState.running: + if hass.state is CoreState.running and wait_recorder: await async_recorder_block_till_done(hass) - return instance + try: + yield instance + finally: + if instance.is_alive(): + await instance._async_shutdown(None) + + yield async_test_recorder + + +@pytest.fixture +async def async_setup_recorder_instance( + async_test_recorder: RecorderInstanceGenerator, +) -> AsyncGenerator[RecorderInstanceGenerator]: + """Yield callable to setup recorder instance.""" + + async with AsyncExitStack() as stack: + + async def async_setup_recorder( + hass: HomeAssistant, + config: ConfigType | None = None, + *, + expected_setup_result: bool = True, + wait_recorder: bool = True, + wait_recorder_setup: bool = True, + ) -> AsyncGenerator[recorder.Recorder]: + """Set up and return recorder instance.""" + + return await stack.enter_async_context( + async_test_recorder( + hass, + config, + expected_setup_result=expected_setup_result, + wait_recorder=wait_recorder, + wait_recorder_setup=wait_recorder_setup, + ) + ) yield async_setup_recorder @@ -1602,11 +1656,12 @@ async def async_setup_recorder_instance( @pytest.fixture async def recorder_mock( recorder_config: dict[str, Any] | None, - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, hass: HomeAssistant, -) -> recorder.Recorder: +) -> AsyncGenerator[recorder.Recorder]: """Fixture with in-memory recorder.""" - return await async_setup_recorder_instance(hass, recorder_config) + async with async_test_recorder(hass, recorder_config) as instance: + yield instance @pytest.fixture @@ -1769,7 +1824,7 @@ def label_registry(hass: HomeAssistant) -> lr.LabelRegistry: @pytest.fixture -def service_calls(hass: HomeAssistant) -> Generator[None, None, list[ServiceCall]]: +def service_calls(hass: HomeAssistant) -> Generator[list[ServiceCall]]: """Track all service calls.""" calls = [] @@ -1780,17 +1835,25 @@ def service_calls(hass: HomeAssistant) -> Generator[None, None, list[ServiceCall domain: str, service: str, service_data: dict[str, Any] | None = None, - **kwargs: Any, + blocking: bool = False, + context: Context | None = None, + target: dict[str, Any] | None = None, + return_response: bool = False, ) -> ServiceResponse: - calls.append(ServiceCall(domain, service, service_data)) + calls.append( + ServiceCall(domain, service, service_data, context, return_response) + ) try: return await _original_async_call( domain, service, service_data, - **kwargs, + blocking, + context, + target, + return_response, ) - except ha.ServiceNotFound: + except ServiceNotFound: _LOGGER.debug("Ignoring unknown service call to %s.%s", domain, service) return None @@ -1805,7 +1868,7 @@ def snapshot(snapshot: SnapshotAssertion) -> SnapshotAssertion: @pytest.fixture -def disable_block_async_io() -> Generator[Any, Any, None]: +def disable_block_async_io() -> Generator[None]: """Fixture to disable the loop protection from block_async_io.""" yield calls = block_async_io._BLOCKED_CALLS.calls diff --git a/tests/helpers/test_aiohttp_client.py b/tests/helpers/test_aiohttp_client.py index 7dd34fd2c64..4feb03493e9 100644 --- a/tests/helpers/test_aiohttp_client.py +++ b/tests/helpers/test_aiohttp_client.py @@ -1,5 +1,6 @@ """Test the aiohttp client helper.""" +import socket from unittest.mock import Mock, patch import aiohttp @@ -16,9 +17,10 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, CONF_VERIFY_SSL, + EVENT_HOMEASSISTANT_CLOSE, HTTP_BASIC_AUTHENTICATION, ) -from homeassistant.core import EVENT_HOMEASSISTANT_CLOSE, HomeAssistant +from homeassistant.core import HomeAssistant import homeassistant.helpers.aiohttp_client as client from homeassistant.util.color import RGBColor @@ -82,7 +84,14 @@ async def test_get_clientsession_without_ssl(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("verify_ssl", "expected_family"), - [(True, 0), (False, 0), (True, 4), (False, 4), (True, 6), (False, 6)], + [ + (True, socket.AF_UNSPEC), + (False, socket.AF_UNSPEC), + (True, socket.AF_INET), + (False, socket.AF_INET), + (True, socket.AF_INET6), + (False, socket.AF_INET6), + ], ) async def test_get_clientsession( hass: HomeAssistant, verify_ssl: bool, expected_family: int diff --git a/tests/helpers/test_area_registry.py b/tests/helpers/test_area_registry.py index e6d637d1a99..ad571ac50cc 100644 --- a/tests/helpers/test_area_registry.py +++ b/tests/helpers/test_area_registry.py @@ -1,8 +1,10 @@ """Tests for the Area Registry.""" +from datetime import datetime, timedelta from functools import partial from typing import Any +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant @@ -11,6 +13,7 @@ from homeassistant.helpers import ( floor_registry as fr, label_registry as lr, ) +from homeassistant.util.dt import utcnow from tests.common import ANY, async_capture_events, flush_store @@ -24,7 +27,11 @@ async def test_list_areas(area_registry: ar.AreaRegistry) -> None: assert len(areas) == len(area_registry.areas) -async def test_create_area(hass: HomeAssistant, area_registry: ar.AreaRegistry) -> None: +async def test_create_area( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + area_registry: ar.AreaRegistry, +) -> None: """Make sure that we can create an area.""" update_events = async_capture_events(hass, ar.EVENT_AREA_REGISTRY_UPDATED) @@ -40,9 +47,13 @@ async def test_create_area(hass: HomeAssistant, area_registry: ar.AreaRegistry) name="mock", normalized_name=ANY, picture=None, + created_at=utcnow(), + modified_at=utcnow(), ) assert len(area_registry.areas) == 1 + freezer.tick(timedelta(minutes=5)) + await hass.async_block_till_done() assert len(update_events) == 1 @@ -52,14 +63,14 @@ async def test_create_area(hass: HomeAssistant, area_registry: ar.AreaRegistry) } # Create area with all parameters - area = area_registry.async_create( + area2 = area_registry.async_create( "mock 2", aliases={"alias_1", "alias_2"}, labels={"label1", "label2"}, picture="/image/example.png", ) - assert area == ar.AreaEntry( + assert area2 == ar.AreaEntry( aliases={"alias_1", "alias_2"}, floor_id=None, icon=None, @@ -68,15 +79,19 @@ async def test_create_area(hass: HomeAssistant, area_registry: ar.AreaRegistry) name="mock 2", normalized_name=ANY, picture="/image/example.png", + created_at=utcnow(), + modified_at=utcnow(), ) assert len(area_registry.areas) == 2 + assert area.created_at != area2.created_at + assert area.modified_at != area2.modified_at await hass.async_block_till_done() assert len(update_events) == 2 assert update_events[-1].data == { "action": "create", - "area_id": area.id, + "area_id": area2.id, } @@ -150,11 +165,18 @@ async def test_update_area( area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can read areas.""" + created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") + freezer.move_to(created_at) update_events = async_capture_events(hass, ar.EVENT_AREA_REGISTRY_UPDATED) floor_registry.async_create("first") area = area_registry.async_create("mock") + assert area.modified_at == created_at + + modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") + freezer.move_to(modified_at) updated_area = area_registry.async_update( area.id, @@ -176,6 +198,8 @@ async def test_update_area( name="mock1", normalized_name=ANY, picture="/image/example.png", + created_at=created_at, + modified_at=modified_at, ) assert len(area_registry.areas) == 1 @@ -285,6 +309,8 @@ async def test_loading_area_from_storage( "labels": ["mock-label1", "mock-label2"], "name": "mock", "picture": "blah", + "created_at": utcnow().isoformat(), + "modified_at": utcnow().isoformat(), } ] }, @@ -329,6 +355,8 @@ async def test_migration_from_1_1( "labels": [], "name": "mock", "picture": None, + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", } ] }, diff --git a/tests/helpers/test_category_registry.py b/tests/helpers/test_category_registry.py index 1317750ebec..cad997fd50f 100644 --- a/tests/helpers/test_category_registry.py +++ b/tests/helpers/test_category_registry.py @@ -1,13 +1,16 @@ """Tests for the category registry.""" +from datetime import datetime from functools import partial import re from typing import Any +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import category_registry as cr +from homeassistant.util.dt import UTC from tests.common import async_capture_events, flush_store @@ -152,9 +155,13 @@ async def test_delete_non_existing_category( async def test_update_category( - hass: HomeAssistant, category_registry: cr.CategoryRegistry + hass: HomeAssistant, + category_registry: cr.CategoryRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can update categories.""" + created = datetime(2024, 2, 14, 12, 0, 0, tzinfo=UTC) + freezer.move_to(created) update_events = async_capture_events(hass, cr.EVENT_CATEGORY_REGISTRY_UPDATED) category = category_registry.async_create( scope="automation", @@ -162,9 +169,16 @@ async def test_update_category( ) assert len(category_registry.categories["automation"]) == 1 - assert category.category_id - assert category.name == "Energy saving" - assert category.icon is None + assert category == cr.CategoryEntry( + category_id=category.category_id, + created_at=created, + modified_at=created, + name="Energy saving", + icon=None, + ) + + modified = datetime(2024, 3, 14, 12, 0, 0, tzinfo=UTC) + freezer.move_to(modified) updated_category = category_registry.async_update( scope="automation", @@ -174,9 +188,13 @@ async def test_update_category( ) assert updated_category != category - assert updated_category.category_id == category.category_id - assert updated_category.name == "ENERGY SAVING" - assert updated_category.icon == "mdi:leaf" + assert updated_category == cr.CategoryEntry( + category_id=category.category_id, + created_at=created, + modified_at=modified, + name="ENERGY SAVING", + icon="mdi:leaf", + ) assert len(category_registry.categories["automation"]) == 1 @@ -343,18 +361,25 @@ async def test_loading_categories_from_storage( hass: HomeAssistant, hass_storage: dict[str, Any] ) -> None: """Test loading stored categories on start.""" + date_1 = datetime(2024, 2, 14, 12, 0, 0) + date_2 = datetime(2024, 2, 14, 12, 0, 0) hass_storage[cr.STORAGE_KEY] = { "version": cr.STORAGE_VERSION_MAJOR, + "minor_version": cr.STORAGE_VERSION_MINOR, "data": { "categories": { "automation": [ { "category_id": "uuid1", + "created_at": date_1.isoformat(), + "modified_at": date_1.isoformat(), "name": "Energy saving", "icon": "mdi:leaf", }, { "category_id": "uuid2", + "created_at": date_1.isoformat(), + "modified_at": date_2.isoformat(), "name": "Something else", "icon": None, }, @@ -362,6 +387,8 @@ async def test_loading_categories_from_storage( "zone": [ { "category_id": "uuid3", + "created_at": date_2.isoformat(), + "modified_at": date_2.isoformat(), "name": "Grocery stores", "icon": "mdi:store", }, @@ -380,21 +407,33 @@ async def test_loading_categories_from_storage( category1 = category_registry.async_get_category( scope="automation", category_id="uuid1" ) - assert category1.category_id == "uuid1" - assert category1.name == "Energy saving" - assert category1.icon == "mdi:leaf" + assert category1 == cr.CategoryEntry( + category_id="uuid1", + created_at=date_1, + modified_at=date_1, + name="Energy saving", + icon="mdi:leaf", + ) category2 = category_registry.async_get_category( scope="automation", category_id="uuid2" ) - assert category2.category_id == "uuid2" - assert category2.name == "Something else" - assert category2.icon is None + assert category2 == cr.CategoryEntry( + category_id="uuid2", + created_at=date_1, + modified_at=date_2, + name="Something else", + icon=None, + ) category3 = category_registry.async_get_category(scope="zone", category_id="uuid3") - assert category3.category_id == "uuid3" - assert category3.name == "Grocery stores" - assert category3.icon == "mdi:store" + assert category3 == cr.CategoryEntry( + category_id="uuid3", + created_at=date_2, + modified_at=date_2, + name="Grocery stores", + icon="mdi:store", + ) async def test_async_create_thread_safety( @@ -447,3 +486,83 @@ async def test_async_update_thread_safety( name="new name", ) ) + + +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_from_1_1( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.1.""" + hass_storage[cr.STORAGE_KEY] = { + "version": 1, + "data": { + "categories": { + "automation": [ + { + "category_id": "uuid1", + "name": "Energy saving", + "icon": "mdi:leaf", + }, + { + "category_id": "uuid2", + "name": "Something else", + "icon": None, + }, + ], + "zone": [ + { + "category_id": "uuid3", + "name": "Grocery stores", + "icon": "mdi:store", + }, + ], + } + }, + } + + await cr.async_load(hass) + registry = cr.async_get(hass) + + # Test data was loaded + assert len(registry.categories) == 2 + assert len(registry.categories["automation"]) == 2 + assert len(registry.categories["zone"]) == 1 + + assert registry.async_get_category(scope="automation", category_id="uuid1") + + # Check we store migrated data + await flush_store(registry._store) + assert hass_storage[cr.STORAGE_KEY] == { + "version": cr.STORAGE_VERSION_MAJOR, + "minor_version": cr.STORAGE_VERSION_MINOR, + "key": cr.STORAGE_KEY, + "data": { + "categories": { + "automation": [ + { + "category_id": "uuid1", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + "name": "Energy saving", + "icon": "mdi:leaf", + }, + { + "category_id": "uuid2", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + "name": "Something else", + "icon": None, + }, + ], + "zone": [ + { + "category_id": "uuid3", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + "name": "Grocery stores", + "icon": "mdi:store", + }, + ], + } + }, + } diff --git a/tests/helpers/test_config_entry_flow.py b/tests/helpers/test_config_entry_flow.py index 6a198b7a297..498e57d45a4 100644 --- a/tests/helpers/test_config_entry_flow.py +++ b/tests/helpers/test_config_entry_flow.py @@ -1,9 +1,9 @@ """Tests for the Config Entry Flow helper.""" +from collections.abc import Generator from unittest.mock import Mock, PropertyMock, patch import pytest -from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow, setup from homeassistant.config import async_process_ha_core_config diff --git a/tests/helpers/test_config_entry_oauth2_flow.py b/tests/helpers/test_config_entry_oauth2_flow.py index 132a0b41707..52def52f3f0 100644 --- a/tests/helpers/test_config_entry_oauth2_flow.py +++ b/tests/helpers/test_config_entry_oauth2_flow.py @@ -1,5 +1,6 @@ """Tests for the Somfy config flow.""" +from collections.abc import Generator from http import HTTPStatus import logging import time @@ -8,7 +9,6 @@ from unittest.mock import patch import aiohttp import pytest -from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow, setup from homeassistant.core import HomeAssistant @@ -873,7 +873,9 @@ async def test_implementation_provider(hass: HomeAssistant, local_impl) -> None: provider_source = [] - async def async_provide_implementation(hass, domain): + async def async_provide_implementation( + hass: HomeAssistant, domain: str + ) -> list[config_entry_oauth2_flow.AbstractOAuth2Implementation]: """Mock implementation provider.""" return provider_source diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 6df29eefaff..ac3af13949b 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -6,6 +6,7 @@ import enum import logging import os from socket import _GLOBAL_DEFAULT_TIMEOUT +from typing import Any from unittest.mock import Mock, patch import uuid @@ -32,7 +33,7 @@ def test_boolean() -> None: "T", "negative", "lock", - "tr ue", + "tr ue", # codespell:ignore ue [], [1, 2], {"one": "two"}, @@ -193,12 +194,12 @@ def test_platform_config() -> None: def test_ensure_list() -> None: """Test ensure_list.""" schema = vol.Schema(cv.ensure_list) - assert [] == schema(None) - assert [1] == schema(1) - assert [1] == schema([1]) - assert ["1"] == schema("1") - assert ["1"] == schema(["1"]) - assert [{"1": "2"}] == schema({"1": "2"}) + assert schema(None) == [] + assert schema(1) == [1] + assert schema([1]) == [1] + assert schema("1") == ["1"] + assert schema(["1"]) == ["1"] + assert schema({"1": "2"}) == [{"1": "2"}] def test_entity_id() -> None: @@ -416,27 +417,9 @@ def test_service() -> None: schema("homeassistant.turn_on") -def test_service_schema(hass: HomeAssistant) -> None: - """Test service_schema validation.""" - options = ( - {}, - None, - { - "service": "homeassistant.turn_on", - "service_template": "homeassistant.turn_on", - }, - {"data": {"entity_id": "light.kitchen"}}, - {"service": "homeassistant.turn_on", "data": None}, - { - "service": "homeassistant.turn_on", - "data_template": {"brightness": "{{ no_end"}, - }, - ) - for value in options: - with pytest.raises(vol.MultipleInvalid): - cv.SERVICE_SCHEMA(value) - - options = ( +@pytest.mark.parametrize( + "config", + [ {"service": "homeassistant.turn_on"}, {"service": "homeassistant.turn_on", "entity_id": "light.kitchen"}, {"service": "light.turn_on", "entity_id": "all"}, @@ -450,14 +433,70 @@ def test_service_schema(hass: HomeAssistant) -> None: "alias": "turn on kitchen lights", }, {"service": "scene.turn_on", "metadata": {}}, - ) - for value in options: - cv.SERVICE_SCHEMA(value) + {"action": "homeassistant.turn_on"}, + {"action": "homeassistant.turn_on", "entity_id": "light.kitchen"}, + {"action": "light.turn_on", "entity_id": "all"}, + { + "action": "homeassistant.turn_on", + "entity_id": ["light.kitchen", "light.ceiling"], + }, + { + "action": "light.turn_on", + "entity_id": "all", + "alias": "turn on kitchen lights", + }, + {"action": "scene.turn_on", "metadata": {}}, + ], +) +def test_service_schema(hass: HomeAssistant, config: dict[str, Any]) -> None: + """Test service_schema validation.""" + validated = cv.SERVICE_SCHEMA(config) - # Check metadata is removed from the validated output - assert cv.SERVICE_SCHEMA({"service": "scene.turn_on", "metadata": {}}) == { - "service": "scene.turn_on" - } + # Ensure metadata is removed from the validated output + assert "metadata" not in validated + + # Ensure service is migrated to action + assert "service" not in validated + assert "action" in validated + assert validated["action"] == config.get("service", config["action"]) + + +@pytest.mark.parametrize( + "config", + [ + {}, + None, + {"data": {"entity_id": "light.kitchen"}}, + { + "service": "homeassistant.turn_on", + "service_template": "homeassistant.turn_on", + }, + {"service": "homeassistant.turn_on", "data": None}, + { + "service": "homeassistant.turn_on", + "data_template": {"brightness": "{{ no_end"}, + }, + { + "service": "homeassistant.turn_on", + "action": "homeassistant.turn_on", + }, + { + "action": "homeassistant.turn_on", + "service_template": "homeassistant.turn_on", + }, + {"action": "homeassistant.turn_on", "data": None}, + { + "action": "homeassistant.turn_on", + "data_template": {"brightness": "{{ no_end"}, + }, + ], +) +def test_invalid_service_schema( + hass: HomeAssistant, config: dict[str, Any] | None +) -> None: + """Test service_schema validation fails.""" + with pytest.raises(vol.MultipleInvalid): + cv.SERVICE_SCHEMA(config) def test_entity_service_schema() -> None: @@ -865,7 +904,7 @@ def schema(): @pytest.fixture -def version(monkeypatch): +def version(monkeypatch: pytest.MonkeyPatch) -> None: """Patch the version used for testing to 0.5.0.""" monkeypatch.setattr(homeassistant.const, "__version__", "0.5.0") @@ -965,7 +1004,7 @@ def test_deprecated_with_replacement_key( assert ( "The 'mars' option is deprecated, please replace it with 'jupiter'" ) in caplog.text - assert {"jupiter": True} == output + assert output == {"jupiter": True} caplog.clear() assert len(caplog.records) == 0 @@ -1036,7 +1075,7 @@ def test_deprecated_with_replacement_key_and_default( assert ( "The 'mars' option is deprecated, please replace it with 'jupiter'" ) in caplog.text - assert {"jupiter": True} == output + assert output == {"jupiter": True} caplog.clear() assert len(caplog.records) == 0 @@ -1049,7 +1088,7 @@ def test_deprecated_with_replacement_key_and_default( test_data = {"venus": True} output = deprecated_schema(test_data.copy()) assert len(caplog.records) == 0 - assert {"venus": True, "jupiter": False} == output + assert output == {"venus": True, "jupiter": False} deprecated_schema_with_default = vol.All( vol.Schema( @@ -1068,7 +1107,7 @@ def test_deprecated_with_replacement_key_and_default( assert ( "The 'mars' option is deprecated, please replace it with 'jupiter'" ) in caplog.text - assert {"jupiter": True} == output + assert output == {"jupiter": True} def test_deprecated_cant_find_module() -> None: @@ -1453,7 +1492,7 @@ def test_whitespace() -> None: "T", "negative", "lock", - "tr ue", + "tr ue", # codespell:ignore ue [], [1, 2], {"one": "two"}, diff --git a/tests/helpers/test_device.py b/tests/helpers/test_device.py index 72c602bec48..852d418da23 100644 --- a/tests/helpers/test_device.py +++ b/tests/helpers/test_device.py @@ -169,7 +169,7 @@ async def test_remove_stale_device_links_keep_entity_device( config_entry.entry_id ) - # After cleanup, only one device is expected to be linked to the configuration entry if at least source_entity_id_or_uuid or device_id was given, else zero + # After cleanup, only one device is expected to be linked to the config entry assert len(devices_config_entry) == 1 assert current_device in devices_config_entry @@ -220,7 +220,7 @@ async def test_remove_stale_devices_links_keep_current_device( config_entry.entry_id ) - # After cleanup, only one device is expected to be linked to the configuration entry + # After cleanup, only one device is expected to be linked to the config entry assert len(devices_config_entry) == 1 assert current_device in devices_config_entry diff --git a/tests/helpers/test_device_registry.py b/tests/helpers/test_device_registry.py index b141e29f678..129c6b0d37c 100644 --- a/tests/helpers/test_device_registry.py +++ b/tests/helpers/test_device_registry.py @@ -2,11 +2,13 @@ from collections.abc import Iterable from contextlib import AbstractContextManager, nullcontext +from datetime import datetime from functools import partial import time from typing import Any from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory import pytest from yarl import URL @@ -19,6 +21,7 @@ from homeassistant.helpers import ( device_registry as dr, entity_registry as er, ) +from homeassistant.util.dt import utcnow from tests.common import ( MockConfigEntry, @@ -90,7 +93,7 @@ async def test_get_or_create_returns_same_entry( await hass.async_block_till_done() # Only 2 update events. The third entry did not generate any changes. - assert len(update_events) == 2, update_events + assert len(update_events) == 2 assert update_events[0].data == { "action": "create", "device_id": entry.id, @@ -170,18 +173,22 @@ async def test_multiple_config_entries( assert len(device_registry.devices) == 1 assert entry.id == entry2.id assert entry.id == entry3.id - assert entry2.config_entries == [config_entry_2.entry_id, config_entry_1.entry_id] - # the 3rd get_or_create was a primary update, so that's now first config entry - assert entry3.config_entries == [config_entry_1.entry_id, config_entry_2.entry_id] + assert entry2.config_entries == {config_entry_1.entry_id, config_entry_2.entry_id} + assert entry2.primary_config_entry == config_entry_1.entry_id + assert entry3.config_entries == {config_entry_1.entry_id, config_entry_2.entry_id} + assert entry3.primary_config_entry == config_entry_1.entry_id @pytest.mark.parametrize("load_registries", [False]) +@pytest.mark.usefixtures("freezer") async def test_loading_from_storage( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: """Test loading stored devices on start.""" + created_at = "2024-01-01T00:00:00+00:00" + modified_at = "2024-02-01T00:00:00+00:00" hass_storage[dr.STORAGE_KEY] = { "version": dr.STORAGE_VERSION_MAJOR, "minor_version": dr.STORAGE_VERSION_MINOR, @@ -192,6 +199,7 @@ async def test_loading_from_storage( "config_entries": [mock_config_entry.entry_id], "configuration_url": "https://example.com/config", "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": created_at, "disabled_by": dr.DeviceEntryDisabler.USER, "entry_type": dr.DeviceEntryType.SERVICE, "hw_version": "hw_version", @@ -200,8 +208,11 @@ async def test_loading_from_storage( "labels": {"label1", "label2"}, "manufacturer": "manufacturer", "model": "model", + "model_id": "model_id", + "modified_at": modified_at, "name_by_user": "Test Friendly Name", "name": "name", + "primary_config_entry": mock_config_entry.entry_id, "serial_number": "serial_no", "sw_version": "version", "via_device_id": None, @@ -211,8 +222,10 @@ async def test_loading_from_storage( { "config_entries": [mock_config_entry.entry_id], "connections": [["Zigbee", "23.45.67.89.01"]], + "created_at": created_at, "id": "bcdefghijklmn", "identifiers": [["serial", "3456ABCDEF12"]], + "modified_at": modified_at, "orphaned_timestamp": None, } ], @@ -224,6 +237,16 @@ async def test_loading_from_storage( assert len(registry.devices) == 1 assert len(registry.deleted_devices) == 1 + assert registry.deleted_devices["bcdefghijklmn"] == dr.DeletedDeviceEntry( + config_entries={mock_config_entry.entry_id}, + connections={("Zigbee", "23.45.67.89.01")}, + created_at=datetime.fromisoformat(created_at), + id="bcdefghijklmn", + identifiers={("serial", "3456ABCDEF12")}, + modified_at=datetime.fromisoformat(modified_at), + orphaned_timestamp=None, + ) + entry = registry.async_get_or_create( config_entry_id=mock_config_entry.entry_id, connections={("Zigbee", "01.23.45.67.89")}, @@ -233,9 +256,10 @@ async def test_loading_from_storage( ) assert entry == dr.DeviceEntry( area_id="12345A", - config_entries=[mock_config_entry.entry_id], + config_entries={mock_config_entry.entry_id}, configuration_url="https://example.com/config", connections={("Zigbee", "01.23.45.67.89")}, + created_at=datetime.fromisoformat(created_at), disabled_by=dr.DeviceEntryDisabler.USER, entry_type=dr.DeviceEntryType.SERVICE, hw_version="hw_version", @@ -244,13 +268,16 @@ async def test_loading_from_storage( labels={"label1", "label2"}, manufacturer="manufacturer", model="model", + model_id="model_id", + modified_at=datetime.fromisoformat(modified_at), name_by_user="Test Friendly Name", name="name", + primary_config_entry=mock_config_entry.entry_id, serial_number="serial_no", suggested_area=None, # Not stored sw_version="version", ) - assert isinstance(entry.config_entries, list) + assert isinstance(entry.config_entries, set) assert isinstance(entry.connections, set) assert isinstance(entry.identifiers, set) @@ -263,26 +290,30 @@ async def test_loading_from_storage( model="model", ) assert entry == dr.DeviceEntry( - config_entries=[mock_config_entry.entry_id], + config_entries={mock_config_entry.entry_id}, connections={("Zigbee", "23.45.67.89.01")}, + created_at=datetime.fromisoformat(created_at), id="bcdefghijklmn", identifiers={("serial", "3456ABCDEF12")}, manufacturer="manufacturer", model="model", + modified_at=utcnow(), + primary_config_entry=mock_config_entry.entry_id, ) assert entry.id == "bcdefghijklmn" - assert isinstance(entry.config_entries, list) + assert isinstance(entry.config_entries, set) assert isinstance(entry.connections, set) assert isinstance(entry.identifiers, set) @pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_1_to_1_5( +@pytest.mark.usefixtures("freezer") +async def test_migration_1_1_to_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.1 to 1.5.""" + """Test migration from version 1.1 to 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 1, @@ -361,6 +392,7 @@ async def test_migration_1_1_to_1_5( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": None, @@ -369,8 +401,11 @@ async def test_migration_1_1_to_1_5( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, "serial_number": None, "sw_version": "new_version", "via_device_id": None, @@ -380,6 +415,7 @@ async def test_migration_1_1_to_1_5( "config_entries": [None], "configuration_url": None, "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -388,8 +424,11 @@ async def test_migration_1_1_to_1_5( "labels": [], "manufacturer": None, "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, + "primary_config_entry": None, "serial_number": None, "sw_version": None, "via_device_id": None, @@ -399,8 +438,10 @@ async def test_migration_1_1_to_1_5( { "config_entries": ["123456"], "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", "id": "deletedid", "identifiers": [["serial", "123456ABCDFF"]], + "modified_at": "1970-01-01T00:00:00+00:00", "orphaned_timestamp": None, } ], @@ -409,12 +450,13 @@ async def test_migration_1_1_to_1_5( @pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_2_to_1_5( +@pytest.mark.usefixtures("freezer") +async def test_migration_1_2_to_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.2 to 1.5.""" + """Test migration from version 1.2 to 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 2, @@ -432,6 +474,7 @@ async def test_migration_1_2_to_1_5( "identifiers": [["serial", "123456ABCDEF"]], "manufacturer": "manufacturer", "model": "model", + "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "sw_version": "version", @@ -448,6 +491,7 @@ async def test_migration_1_2_to_1_5( "identifiers": [["serial", "mock-id-invalid-entry"]], "manufacturer": None, "model": None, + "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, "sw_version": None, @@ -492,6 +536,7 @@ async def test_migration_1_2_to_1_5( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": None, @@ -500,8 +545,11 @@ async def test_migration_1_2_to_1_5( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, "serial_number": None, "sw_version": "new_version", "via_device_id": None, @@ -511,6 +559,7 @@ async def test_migration_1_2_to_1_5( "config_entries": [None], "configuration_url": None, "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -519,8 +568,11 @@ async def test_migration_1_2_to_1_5( "labels": [], "manufacturer": None, "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, + "primary_config_entry": None, "serial_number": None, "sw_version": None, "via_device_id": None, @@ -532,12 +584,13 @@ async def test_migration_1_2_to_1_5( @pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_3_to_1_5( +@pytest.mark.usefixtures("freezer") +async def test_migration_1_3_to_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.3 to 1.5.""" + """Test migration from version 1.3 to 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 3, @@ -617,6 +670,7 @@ async def test_migration_1_3_to_1_5( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": "hw_version", @@ -625,8 +679,11 @@ async def test_migration_1_3_to_1_5( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, "serial_number": None, "sw_version": "new_version", "via_device_id": None, @@ -636,6 +693,7 @@ async def test_migration_1_3_to_1_5( "config_entries": [None], "configuration_url": None, "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -644,8 +702,11 @@ async def test_migration_1_3_to_1_5( "labels": [], "manufacturer": None, "model": None, - "name_by_user": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", "name": None, + "name_by_user": None, + "primary_config_entry": None, "serial_number": None, "sw_version": None, "via_device_id": None, @@ -657,12 +718,13 @@ async def test_migration_1_3_to_1_5( @pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_4_to_1_5( +@pytest.mark.usefixtures("freezer") +async def test_migration_1_4_to_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.4 to 1.5.""" + """Test migration from version 1.4 to 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 4, @@ -744,6 +806,7 @@ async def test_migration_1_4_to_1_5( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": "hw_version", @@ -752,6 +815,71 @@ async def test_migration_1_4_to_1_5( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().isoformat(), + "name": "name", + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": [], + "manufacturer": None, + "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "name": None, + "primary_config_entry": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + + +@pytest.mark.parametrize("load_registries", [False]) +@pytest.mark.usefixtures("freezer") +async def test_migration_1_5_to_1_7( + hass: HomeAssistant, + hass_storage: dict[str, Any], + mock_config_entry: MockConfigEntry, +) -> None: + """Test migration from version 1.5 to 1.7.""" + hass_storage[dr.STORAGE_KEY] = { + "version": 1, + "minor_version": 5, + "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", "name": "name", "name_by_user": None, "serial_number": None, @@ -768,7 +896,7 @@ async def test_migration_1_4_to_1_5( "hw_version": None, "id": "invalid-entry-type", "identifiers": [["serial", "mock-id-invalid-entry"]], - "labels": [], + "labels": ["blah"], "manufacturer": None, "model": None, "name_by_user": None, @@ -782,6 +910,368 @@ async def test_migration_1_4_to_1_5( }, } + await dr.async_load(hass) + registry = dr.async_get(hass) + + # Test data was loaded + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + ) + assert entry.id == "abcdefghijklm" + + # Update to trigger a store + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + sw_version="new_version", + ) + assert entry.id == "abcdefghijklm" + + # Check we store migrated data + await flush_store(registry._store) + + assert hass_storage[dr.STORAGE_KEY] == { + "version": dr.STORAGE_VERSION_MAJOR, + "minor_version": dr.STORAGE_VERSION_MINOR, + "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", + "name": "name", + "model_id": None, + "modified_at": utcnow().isoformat(), + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": ["blah"], + "manufacturer": None, + "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "name": None, + "primary_config_entry": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + + +@pytest.mark.parametrize("load_registries", [False]) +@pytest.mark.usefixtures("freezer") +async def test_migration_1_6_to_1_8( + hass: HomeAssistant, + hass_storage: dict[str, Any], + mock_config_entry: MockConfigEntry, +) -> None: + """Test migration from version 1.6 to 1.8.""" + hass_storage[dr.STORAGE_KEY] = { + "version": 1, + "minor_version": 6, + "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", + "name": "name", + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": ["blah"], + "manufacturer": None, + "model": None, + "name_by_user": None, + "primary_config_entry": None, + "name": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + + await dr.async_load(hass) + registry = dr.async_get(hass) + + # Test data was loaded + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + ) + assert entry.id == "abcdefghijklm" + + # Update to trigger a store + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + sw_version="new_version", + ) + assert entry.id == "abcdefghijklm" + + # Check we store migrated data + await flush_store(registry._store) + + assert hass_storage[dr.STORAGE_KEY] == { + "version": dr.STORAGE_VERSION_MAJOR, + "minor_version": dr.STORAGE_VERSION_MINOR, + "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", + "name": "name", + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": ["blah"], + "manufacturer": None, + "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "name": None, + "primary_config_entry": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + + +@pytest.mark.parametrize("load_registries", [False]) +@pytest.mark.usefixtures("freezer") +async def test_migration_1_7_to_1_8( + hass: HomeAssistant, + hass_storage: dict[str, Any], + mock_config_entry: MockConfigEntry, +) -> None: + """Test migration from version 1.7 to 1.8.""" + hass_storage[dr.STORAGE_KEY] = { + "version": 1, + "minor_version": 7, + "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", + "model_id": None, + "name": "name", + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": ["blah"], + "manufacturer": None, + "model": None, + "model_id": None, + "name_by_user": None, + "primary_config_entry": None, + "name": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + + await dr.async_load(hass) + registry = dr.async_get(hass) + + # Test data was loaded + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + ) + assert entry.id == "abcdefghijklm" + + # Update to trigger a store + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + sw_version="new_version", + ) + assert entry.id == "abcdefghijklm" + + # Check we store migrated data + await flush_store(registry._store) + + assert hass_storage[dr.STORAGE_KEY] == { + "version": dr.STORAGE_VERSION_MAJOR, + "minor_version": dr.STORAGE_VERSION_MINOR, + "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", + "name": "name", + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": ["blah"], + "manufacturer": None, + "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "name": None, + "primary_config_entry": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + async def test_removing_config_entries( hass: HomeAssistant, device_registry: dr.DeviceRegistry @@ -818,7 +1308,7 @@ async def test_removing_config_entries( assert len(device_registry.devices) == 2 assert entry.id == entry2.id assert entry.id != entry3.id - assert entry2.config_entries == [config_entry_2.entry_id, config_entry_1.entry_id] + assert entry2.config_entries == {config_entry_1.entry_id, config_entry_2.entry_id} device_registry.async_clear_config_entry(config_entry_1.entry_id) entry = device_registry.async_get_device(identifiers={("bridgeid", "0123")}) @@ -826,7 +1316,7 @@ async def test_removing_config_entries( identifiers={("bridgeid", "4567")} ) - assert entry.config_entries == [config_entry_2.entry_id] + assert entry.config_entries == {config_entry_2.entry_id} assert entry3_removed is None await hass.async_block_till_done() @@ -839,7 +1329,9 @@ async def test_removing_config_entries( assert update_events[1].data == { "action": "update", "device_id": entry.id, - "changes": {"config_entries": [config_entry_1.entry_id]}, + "changes": { + "config_entries": {config_entry_1.entry_id}, + }, } assert update_events[2].data == { "action": "create", @@ -849,7 +1341,8 @@ async def test_removing_config_entries( "action": "update", "device_id": entry.id, "changes": { - "config_entries": [config_entry_2.entry_id, config_entry_1.entry_id] + "config_entries": {config_entry_1.entry_id, config_entry_2.entry_id}, + "primary_config_entry": config_entry_1.entry_id, }, } assert update_events[4].data == { @@ -894,7 +1387,7 @@ async def test_deleted_device_removing_config_entries( assert len(device_registry.deleted_devices) == 0 assert entry.id == entry2.id assert entry.id != entry3.id - assert entry2.config_entries == [config_entry_2.entry_id, config_entry_1.entry_id] + assert entry2.config_entries == {config_entry_1.entry_id, config_entry_2.entry_id} device_registry.async_remove_device(entry.id) device_registry.async_remove_device(entry3.id) @@ -911,7 +1404,9 @@ async def test_deleted_device_removing_config_entries( assert update_events[1].data == { "action": "update", "device_id": entry2.id, - "changes": {"config_entries": [config_entry_1.entry_id]}, + "changes": { + "config_entries": {config_entry_1.entry_id}, + }, } assert update_events[2].data == { "action": "create", @@ -1251,8 +1746,11 @@ async def test_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Verify that we can update some attributes of a device.""" + created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") + freezer.move_to(created_at) update_events = async_capture_events(hass, dr.EVENT_DEVICE_REGISTRY_UPDATED) entry = device_registry.async_get_or_create( config_entry_id=mock_config_entry.entry_id, @@ -1264,7 +1762,11 @@ async def test_update( assert not entry.area_id assert not entry.labels assert not entry.name_by_user + assert entry.created_at == created_at + assert entry.modified_at == created_at + modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") + freezer.move_to(modified_at) with patch.object(device_registry, "async_schedule_save") as mock_save: updated_entry = device_registry.async_update_device( entry.id, @@ -1276,6 +1778,7 @@ async def test_update( labels={"label1", "label2"}, manufacturer="Test Producer", model="Test Model", + model_id="Test Model Name", name_by_user="Test Friendly Name", name="name", new_connections=new_connections, @@ -1290,9 +1793,10 @@ async def test_update( assert updated_entry != entry assert updated_entry == dr.DeviceEntry( area_id="12345A", - config_entries=[mock_config_entry.entry_id], + config_entries={mock_config_entry.entry_id}, configuration_url="https://example.com/config", connections={("mac", "65:43:21:fe:dc:ba")}, + created_at=created_at, disabled_by=dr.DeviceEntryDisabler.USER, entry_type=dr.DeviceEntryType.SERVICE, hw_version="hw_version", @@ -1301,6 +1805,8 @@ async def test_update( labels={"label1", "label2"}, manufacturer="Test Producer", model="Test Model", + model_id="Test Model Name", + modified_at=modified_at, name_by_user="Test Friendly Name", name="name", serial_number="serial_no", @@ -1355,6 +1861,7 @@ async def test_update( "labels": set(), "manufacturer": None, "model": None, + "model_id": None, "name": None, "name_by_user": None, "serial_number": None, @@ -1473,6 +1980,8 @@ async def test_update_remove_config_entries( config_entry_1.add_to_hass(hass) config_entry_2 = MockConfigEntry() config_entry_2.add_to_hass(hass) + config_entry_3 = MockConfigEntry() + config_entry_3.add_to_hass(hass) entry = device_registry.async_get_or_create( config_entry_id=config_entry_1.entry_id, @@ -1495,20 +2004,34 @@ async def test_update_remove_config_entries( manufacturer="manufacturer", model="model", ) + entry4 = device_registry.async_update_device( + entry2.id, add_config_entry_id=config_entry_3.entry_id + ) + # Try to add an unknown config entry + with pytest.raises(HomeAssistantError): + device_registry.async_update_device(entry2.id, add_config_entry_id="blabla") assert len(device_registry.devices) == 2 - assert entry.id == entry2.id + assert entry.id == entry2.id == entry4.id assert entry.id != entry3.id - assert entry2.config_entries == [config_entry_2.entry_id, config_entry_1.entry_id] + assert entry2.config_entries == {config_entry_1.entry_id, config_entry_2.entry_id} + assert entry4.config_entries == { + config_entry_1.entry_id, + config_entry_2.entry_id, + config_entry_3.entry_id, + } - updated_entry = device_registry.async_update_device( + device_registry.async_update_device( entry2.id, remove_config_entry_id=config_entry_1.entry_id ) + updated_entry = device_registry.async_update_device( + entry2.id, remove_config_entry_id=config_entry_3.entry_id + ) removed_entry = device_registry.async_update_device( entry3.id, remove_config_entry_id=config_entry_1.entry_id ) - assert updated_entry.config_entries == [config_entry_2.entry_id] + assert updated_entry.config_entries == {config_entry_2.entry_id} assert removed_entry is None removed_entry = device_registry.async_get_device(identifiers={("bridgeid", "4567")}) @@ -1517,7 +2040,7 @@ async def test_update_remove_config_entries( await hass.async_block_till_done() - assert len(update_events) == 5 + assert len(update_events) == 7 assert update_events[0].data == { "action": "create", "device_id": entry.id, @@ -1525,7 +2048,9 @@ async def test_update_remove_config_entries( assert update_events[1].data == { "action": "update", "device_id": entry2.id, - "changes": {"config_entries": [config_entry_1.entry_id]}, + "changes": { + "config_entries": {config_entry_1.entry_id}, + }, } assert update_events[2].data == { "action": "create", @@ -1535,10 +2060,29 @@ async def test_update_remove_config_entries( "action": "update", "device_id": entry.id, "changes": { - "config_entries": [config_entry_2.entry_id, config_entry_1.entry_id] + "config_entries": {config_entry_1.entry_id, config_entry_2.entry_id} }, } assert update_events[4].data == { + "action": "update", + "device_id": entry2.id, + "changes": { + "config_entries": { + config_entry_1.entry_id, + config_entry_2.entry_id, + config_entry_3.entry_id, + }, + "primary_config_entry": config_entry_1.entry_id, + }, + } + assert update_events[5].data == { + "action": "update", + "device_id": entry2.id, + "changes": { + "config_entries": {config_entry_2.entry_id, config_entry_3.entry_id} + }, + } + assert update_events[6].data == { "action": "remove", "device_id": entry3.id, } @@ -1768,7 +2312,7 @@ async def test_restore_device( assert len(device_registry.devices) == 2 assert len(device_registry.deleted_devices) == 0 - assert isinstance(entry3.config_entries, list) + assert isinstance(entry3.config_entries, set) assert isinstance(entry3.connections, set) assert isinstance(entry3.identifiers, set) @@ -1900,7 +2444,7 @@ async def test_restore_shared_device( assert len(device_registry.devices) == 1 assert len(device_registry.deleted_devices) == 0 - assert isinstance(entry2.config_entries, list) + assert isinstance(entry2.config_entries, set) assert isinstance(entry2.connections, set) assert isinstance(entry2.identifiers, set) @@ -1918,7 +2462,7 @@ async def test_restore_shared_device( assert len(device_registry.devices) == 1 assert len(device_registry.deleted_devices) == 0 - assert isinstance(entry3.config_entries, list) + assert isinstance(entry3.config_entries, set) assert isinstance(entry3.connections, set) assert isinstance(entry3.identifiers, set) @@ -1934,7 +2478,7 @@ async def test_restore_shared_device( assert len(device_registry.devices) == 1 assert len(device_registry.deleted_devices) == 0 - assert isinstance(entry4.config_entries, list) + assert isinstance(entry4.config_entries, set) assert isinstance(entry4.connections, set) assert isinstance(entry4.identifiers, set) @@ -1949,7 +2493,7 @@ async def test_restore_shared_device( "action": "update", "device_id": entry.id, "changes": { - "config_entries": [config_entry_1.entry_id], + "config_entries": {config_entry_1.entry_id}, "identifiers": {("entry_123", "0123")}, }, } @@ -1973,7 +2517,7 @@ async def test_restore_shared_device( "action": "update", "device_id": entry.id, "changes": { - "config_entries": [config_entry_2.entry_id], + "config_entries": {config_entry_2.entry_id}, "identifiers": {("entry_234", "2345")}, }, } @@ -2281,6 +2825,7 @@ async def test_loading_invalid_configuration_url_from_storage( "config_entries": ["1234"], "configuration_url": "invalid", "connections": [], + "created_at": "2024-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": dr.DeviceEntryType.SERVICE, "hw_version": None, @@ -2289,8 +2834,11 @@ async def test_loading_invalid_configuration_url_from_storage( "labels": [], "manufacturer": None, "model": None, + "model_id": None, + "modified_at": "2024-02-01T00:00:00+00:00", "name_by_user": None, "name": None, + "primary_config_entry": "1234", "serial_number": None, "sw_version": None, "via_device_id": None, @@ -2630,3 +3178,258 @@ async def test_async_remove_device_thread_safety( await hass.async_add_executor_job( device_registry.async_remove_device, device.id ) + + +async def test_device_registry_connections_collision( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test connection collisions in the device registry.""" + config_entry = MockConfigEntry() + config_entry.add_to_hass(hass) + + device1 = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "none")}, + manufacturer="manufacturer", + model="model", + ) + device2 = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "none")}, + manufacturer="manufacturer", + model="model", + ) + + assert device1.id == device2.id + + device3 = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={("bridgeid", "0123")}, + manufacturer="manufacturer", + model="model", + ) + + # Attempt to merge connection for device3 with the same + # connection that already exists in device1 + with pytest.raises( + HomeAssistantError, match=f"Connections.*already registered.*{device1.id}" + ): + device_registry.async_update_device( + device3.id, + merge_connections={ + (dr.CONNECTION_NETWORK_MAC, "EE:EE:EE:EE:EE:EE"), + (dr.CONNECTION_NETWORK_MAC, "none"), + }, + ) + + # Attempt to add new connections for device3 with the same + # connection that already exists in device1 + with pytest.raises( + HomeAssistantError, match=f"Connections.*already registered.*{device1.id}" + ): + device_registry.async_update_device( + device3.id, + new_connections={ + (dr.CONNECTION_NETWORK_MAC, "EE:EE:EE:EE:EE:EE"), + (dr.CONNECTION_NETWORK_MAC, "none"), + }, + ) + + device3_refetched = device_registry.async_get(device3.id) + assert device3_refetched.connections == set() + assert device3_refetched.identifiers == {("bridgeid", "0123")} + + device1_refetched = device_registry.async_get(device1.id) + assert device1_refetched.connections == {(dr.CONNECTION_NETWORK_MAC, "none")} + assert device1_refetched.identifiers == set() + + device2_refetched = device_registry.async_get(device2.id) + assert device2_refetched.connections == {(dr.CONNECTION_NETWORK_MAC, "none")} + assert device2_refetched.identifiers == set() + + assert device2_refetched.id == device1_refetched.id + assert len(device_registry.devices) == 2 + + # Attempt to implicitly merge connection for device3 with the same + # connection that already exists in device1 + device4 = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={("bridgeid", "0123")}, + connections={ + (dr.CONNECTION_NETWORK_MAC, "EE:EE:EE:EE:EE:EE"), + (dr.CONNECTION_NETWORK_MAC, "none"), + }, + ) + assert len(device_registry.devices) == 2 + assert device4.id in (device1.id, device3.id) + + device3_refetched = device_registry.async_get(device3.id) + device1_refetched = device_registry.async_get(device1.id) + assert not device1_refetched.connections.isdisjoint(device3_refetched.connections) + + +async def test_device_registry_identifiers_collision( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test identifiers collisions in the device registry.""" + config_entry = MockConfigEntry() + config_entry.add_to_hass(hass) + + device1 = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={("bridgeid", "0123")}, + manufacturer="manufacturer", + model="model", + ) + device2 = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={("bridgeid", "0123")}, + manufacturer="manufacturer", + model="model", + ) + + assert device1.id == device2.id + + device3 = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={("bridgeid", "4567")}, + manufacturer="manufacturer", + model="model", + ) + + # Attempt to merge identifiers for device3 with the same + # connection that already exists in device1 + with pytest.raises( + HomeAssistantError, match=f"Identifiers.*already registered.*{device1.id}" + ): + device_registry.async_update_device( + device3.id, merge_identifiers={("bridgeid", "0123"), ("bridgeid", "8888")} + ) + + # Attempt to add new identifiers for device3 with the same + # connection that already exists in device1 + with pytest.raises( + HomeAssistantError, match=f"Identifiers.*already registered.*{device1.id}" + ): + device_registry.async_update_device( + device3.id, new_identifiers={("bridgeid", "0123"), ("bridgeid", "8888")} + ) + + device3_refetched = device_registry.async_get(device3.id) + assert device3_refetched.connections == set() + assert device3_refetched.identifiers == {("bridgeid", "4567")} + + device1_refetched = device_registry.async_get(device1.id) + assert device1_refetched.connections == set() + assert device1_refetched.identifiers == {("bridgeid", "0123")} + + device2_refetched = device_registry.async_get(device2.id) + assert device2_refetched.connections == set() + assert device2_refetched.identifiers == {("bridgeid", "0123")} + + assert device2_refetched.id == device1_refetched.id + assert len(device_registry.devices) == 2 + + # Attempt to implicitly merge identifiers for device3 with the same + # connection that already exists in device1 + device4 = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={("bridgeid", "4567"), ("bridgeid", "0123")}, + ) + assert len(device_registry.devices) == 2 + assert device4.id in (device1.id, device3.id) + + device3_refetched = device_registry.async_get(device3.id) + device1_refetched = device_registry.async_get(device1.id) + assert not device1_refetched.identifiers.isdisjoint(device3_refetched.identifiers) + + +async def test_primary_config_entry( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, +) -> None: + """Test the primary integration field.""" + mock_config_entry_1 = MockConfigEntry(domain="mqtt", title=None) + mock_config_entry_1.add_to_hass(hass) + mock_config_entry_2 = MockConfigEntry(title=None) + mock_config_entry_2.add_to_hass(hass) + mock_config_entry_3 = MockConfigEntry(title=None) + mock_config_entry_3.add_to_hass(hass) + mock_config_entry_4 = MockConfigEntry(domain="matter", title=None) + mock_config_entry_4.add_to_hass(hass) + + # Create device without model name etc, config entry will not be marked primary + device = device_registry.async_get_or_create( + config_entry_id=mock_config_entry_1.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + identifiers=set(), + ) + assert device.primary_config_entry is None + + # Set model, mqtt config entry will be promoted to primary + device = device_registry.async_get_or_create( + config_entry_id=mock_config_entry_1.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + model="model", + ) + assert device.primary_config_entry == mock_config_entry_1.entry_id + + # New config entry with model will be promoted to primary + device = device_registry.async_get_or_create( + config_entry_id=mock_config_entry_2.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + model="model 2", + ) + assert device.primary_config_entry == mock_config_entry_2.entry_id + + # New config entry with model will not be promoted to primary + device = device_registry.async_get_or_create( + config_entry_id=mock_config_entry_3.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + model="model 3", + ) + assert device.primary_config_entry == mock_config_entry_2.entry_id + + # New matter config entry with model will not be promoted to primary + device = device_registry.async_get_or_create( + config_entry_id=mock_config_entry_4.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + model="model 3", + ) + assert device.primary_config_entry == mock_config_entry_2.entry_id + + # Remove the primary config entry + device = device_registry.async_update_device( + device.id, + remove_config_entry_id=mock_config_entry_2.entry_id, + ) + assert device.primary_config_entry is None + + # Create new + device = device_registry.async_get_or_create( + config_entry_id=mock_config_entry_1.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + identifiers=set(), + manufacturer="manufacturer", + model="model", + ) + assert device.primary_config_entry == mock_config_entry_1.entry_id + + +async def test_update_device_no_connections_or_identifiers( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, +) -> None: + """Test updating a device clearing connections and identifiers.""" + mock_config_entry = MockConfigEntry(domain="mqtt", title=None) + mock_config_entry.add_to_hass(hass) + + device = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + identifiers={("bridgeid", "0123")}, + ) + with pytest.raises(HomeAssistantError): + device_registry.async_update_device( + device.id, new_connections=set(), new_identifiers=set() + ) diff --git a/tests/helpers/test_discovery.py b/tests/helpers/test_discovery.py index 100b50e2749..a66ac7474e3 100644 --- a/tests/helpers/test_discovery.py +++ b/tests/helpers/test_discovery.py @@ -9,6 +9,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import discovery from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from tests.common import MockModule, MockPlatform, mock_integration, mock_platform @@ -115,7 +117,7 @@ async def test_circular_import(hass: HomeAssistant) -> None: component_calls = [] platform_calls = [] - def component_setup(hass, config): + def component_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" discovery.load_platform( hass, Platform.SWITCH, "test_circular", {"key": "value"}, config @@ -123,7 +125,12 @@ async def test_circular_import(hass: HomeAssistant) -> None: component_calls.append(1) return True - def setup_platform(hass, config, add_entities_callback, discovery_info=None): + def setup_platform( + hass: HomeAssistant, + config: ConfigType, + add_entities_callback: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, + ) -> None: """Set up mock platform.""" platform_calls.append("disc" if discovery_info else "component") @@ -162,14 +169,14 @@ async def test_1st_discovers_2nd_component(hass: HomeAssistant) -> None: """ component_calls = [] - async def component1_setup(hass, config): + async def component1_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" await discovery.async_discover( hass, "test_component2", {}, "test_component2", {} ) return True - def component2_setup(hass, config): + def component2_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" component_calls.append(1) return True diff --git a/tests/helpers/test_discovery_flow.py b/tests/helpers/test_discovery_flow.py index 9c2249ac17f..0fa315d684b 100644 --- a/tests/helpers/test_discovery_flow.py +++ b/tests/helpers/test_discovery_flow.py @@ -1,12 +1,13 @@ """Test the discovery flow helper.""" +from collections.abc import Generator from unittest.mock import AsyncMock, call, patch import pytest -from typing_extensions import Generator from homeassistant import config_entries -from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, CoreState, HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_STARTED +from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import discovery_flow diff --git a/tests/helpers/test_dispatcher.py b/tests/helpers/test_dispatcher.py index c2c8663f47c..0350b2e6e3a 100644 --- a/tests/helpers/test_dispatcher.py +++ b/tests/helpers/test_dispatcher.py @@ -188,8 +188,7 @@ async def test_callback_exception_gets_logged( @callback def bad_handler(*args): """Record calls.""" - # pylint: disable-next=broad-exception-raised - raise Exception("This is a bad message callback") + raise Exception("This is a bad message callback") # noqa: TRY002 # wrap in partial to test message logging. async_dispatcher_connect(hass, "test", partial(bad_handler)) @@ -209,8 +208,7 @@ async def test_coro_exception_gets_logged( async def bad_async_handler(*args): """Record calls.""" - # pylint: disable-next=broad-exception-raised - raise Exception("This is a bad message in a coro") + raise Exception("This is a bad message in a coro") # noqa: TRY002 # wrap in partial to test message logging. async_dispatcher_connect(hass, "test", bad_async_handler) diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index f76b8555580..58554059fb4 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -16,23 +16,26 @@ import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, STATE_UNAVAILABLE, STATE_UNKNOWN, + EntityCategory, ) from homeassistant.core import ( Context, HassJobType, HomeAssistant, - HomeAssistantError, ReleaseChannel, callback, ) +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UNDEFINED, UndefinedType from tests.common import ( @@ -922,13 +925,13 @@ async def test_entity_category_property(hass: HomeAssistant) -> None: key="abc", entity_category="ignore_me" ) mock_entity1.entity_id = "hello.world" - mock_entity1._attr_entity_category = entity.EntityCategory.CONFIG + mock_entity1._attr_entity_category = EntityCategory.CONFIG assert mock_entity1.entity_category == "config" mock_entity2 = entity.Entity() mock_entity2.hass = hass mock_entity2.entity_description = entity.EntityDescription( - key="abc", entity_category=entity.EntityCategory.CONFIG + key="abc", entity_category=EntityCategory.CONFIG ) mock_entity2.entity_id = "hello.world" assert mock_entity2.entity_category == "config" @@ -937,8 +940,8 @@ async def test_entity_category_property(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("value", "expected"), [ - ("config", entity.EntityCategory.CONFIG), - ("diagnostic", entity.EntityCategory.DIAGNOSTIC), + ("config", EntityCategory.CONFIG), + ("diagnostic", EntityCategory.DIAGNOSTIC), ], ) def test_entity_category_schema(value, expected) -> None: @@ -946,7 +949,7 @@ def test_entity_category_schema(value, expected) -> None: schema = vol.Schema(entity.ENTITY_CATEGORIES_SCHEMA) result = schema(value) assert result == expected - assert isinstance(result, entity.EntityCategory) + assert isinstance(result, EntityCategory) @pytest.mark.parametrize("value", [None, "non_existing"]) @@ -980,10 +983,13 @@ async def _test_friendly_name( ) -> None: """Test friendly name.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([ent]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1305,10 +1311,13 @@ async def test_entity_name_translation_placeholder_errors( """Return all backend translations.""" return translations[language] - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([ent]) - return True ent = MockEntity( unique_id="qwer", @@ -1530,7 +1539,11 @@ async def test_friendly_name_updated( ) -> None: """Test friendly name is updated when device or entity registry updates.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1546,7 +1559,6 @@ async def test_friendly_name_updated( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 32ce740edb2..5ce0292c2ec 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -117,7 +117,7 @@ async def test_setup_does_discovery( await hass.async_block_till_done() assert mock_setup.called - assert ("platform_test", {}, {"msg": "discovery_info"}) == mock_setup.call_args[0] + assert mock_setup.call_args[0] == ("platform_test", {}, {"msg": "discovery_info"}) async def test_set_scan_interval_via_config(hass: HomeAssistant) -> None: @@ -191,9 +191,9 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) - assert ["test_domain.test_1", "test_domain.test_3"] == sorted( + assert sorted( ent.entity_id for ent in (await component.async_extract_from_service(call_1)) - ) + ) == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( "test", @@ -201,9 +201,9 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) - assert ["test_domain.test_3"] == sorted( + assert sorted( ent.entity_id for ent in (await component.async_extract_from_service(call_2)) - ) + ) == ["test_domain.test_3"] async def test_platform_not_ready(hass: HomeAssistant) -> None: @@ -288,9 +288,9 @@ async def test_extract_from_service_filter_out_non_existing_entities( {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) - assert ["test_domain.test_2"] == [ + assert [ ent.entity_id for ent in await component.async_extract_from_service(call) - ] + ] == ["test_domain.test_2"] async def test_extract_from_service_no_group_expand(hass: HomeAssistant) -> None: @@ -467,8 +467,11 @@ async def test_extract_all_omit_entity_id( call = ServiceCall("test", "service") - assert [] == sorted( - ent.entity_id for ent in await component.async_extract_from_service(call) + assert ( + sorted( + ent.entity_id for ent in await component.async_extract_from_service(call) + ) + == [] ) @@ -484,15 +487,27 @@ async def test_extract_all_use_match_all( call = ServiceCall("test", "service", {"entity_id": "all"}) - assert ["test_domain.test_1", "test_domain.test_2"] == sorted( + assert sorted( ent.entity_id for ent in await component.async_extract_from_service(call) - ) + ) == ["test_domain.test_1", "test_domain.test_2"] assert ( "Not passing an entity ID to a service to target all entities is deprecated" ) not in caplog.text -async def test_register_entity_service(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("schema", "service_data"), + [ + ({"some": str}, {"some": "data"}), + ({}, {}), + (None, {}), + ], +) +async def test_register_entity_service( + hass: HomeAssistant, + schema: dict | None, + service_data: dict, +) -> None: """Test registering an enttiy service and calling it.""" entity = MockEntity(entity_id=f"{DOMAIN}.entity") calls = [] @@ -507,9 +522,7 @@ async def test_register_entity_service(hass: HomeAssistant) -> None: await component.async_setup({}) await component.async_add_entities([entity]) - component.async_register_entity_service( - "hello", {"some": str}, "async_called_by_service" - ) + component.async_register_entity_service("hello", schema, "async_called_by_service") with pytest.raises(vol.Invalid): await hass.services.async_call( @@ -521,28 +534,55 @@ async def test_register_entity_service(hass: HomeAssistant) -> None: assert len(calls) == 0 await hass.services.async_call( - DOMAIN, "hello", {"entity_id": entity.entity_id, "some": "data"}, blocking=True + DOMAIN, "hello", {"entity_id": entity.entity_id} | service_data, blocking=True ) assert len(calls) == 1 - assert calls[0] == {"some": "data"} + assert calls[0] == service_data await hass.services.async_call( - DOMAIN, "hello", {"entity_id": ENTITY_MATCH_ALL, "some": "data"}, blocking=True + DOMAIN, "hello", {"entity_id": ENTITY_MATCH_ALL} | service_data, blocking=True ) assert len(calls) == 2 - assert calls[1] == {"some": "data"} + assert calls[1] == service_data await hass.services.async_call( - DOMAIN, "hello", {"entity_id": ENTITY_MATCH_NONE, "some": "data"}, blocking=True + DOMAIN, "hello", {"entity_id": ENTITY_MATCH_NONE} | service_data, blocking=True ) assert len(calls) == 2 await hass.services.async_call( - DOMAIN, "hello", {"area_id": ENTITY_MATCH_NONE, "some": "data"}, blocking=True + DOMAIN, "hello", {"area_id": ENTITY_MATCH_NONE} | service_data, blocking=True ) assert len(calls) == 2 +async def test_register_entity_service_non_entity_service_schema( + hass: HomeAssistant, +) -> None: + """Test attempting to register a service with an incomplete schema.""" + component = EntityComponent(_LOGGER, DOMAIN, hass) + + with pytest.raises( + HomeAssistantError, + match=( + "The schema does not include all required keys: entity_id, device_id, area_id, " + "floor_id, label_id" + ), + ): + component.async_register_entity_service( + "hello", vol.Schema({"some": str}), Mock() + ) + + # The check currently does not recurse into vol.All or vol.Any allowing these + # non-compliant schemas to pass + component.async_register_entity_service( + "hello", vol.All(vol.Schema({"some": str})), Mock() + ) + component.async_register_entity_service( + "hello", vol.Any(vol.Schema({"some": str})), Mock() + ) + + async def test_register_entity_service_response_data(hass: HomeAssistant) -> None: """Test an entity service that does support response data.""" entity = MockEntity(entity_id=f"{DOMAIN}.entity") diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 68024bc936f..2cc3348626c 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -8,8 +8,10 @@ from typing import Any from unittest.mock import ANY, AsyncMock, Mock, patch import pytest +import voluptuous as vol -from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, PERCENTAGE +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, PERCENTAGE, EntityCategory from homeassistant.core import ( CoreState, HomeAssistant, @@ -26,16 +28,13 @@ from homeassistant.helpers import ( entity_registry as er, issue_registry as ir, ) -from homeassistant.helpers.entity import ( - DeviceInfo, - Entity, - EntityCategory, - async_generate_entity_id, -) +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity, async_generate_entity_id from homeassistant.helpers.entity_component import ( DEFAULT_SCAN_INTERVAL, EntityComponent, ) +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util @@ -858,10 +857,13 @@ async def test_setup_entry( ) -> None: """Test we can setup an entry.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([MockEntity(name="test1", unique_id="unique")]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1141,7 +1143,11 @@ async def test_device_info_called( model="via", ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1166,7 +1172,6 @@ async def test_device_info_called( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1187,6 +1192,7 @@ async def test_device_info_called( assert device.manufacturer == "test-manuf" assert device.model == "test-model" assert device.name == "test-name" + assert device.primary_config_entry == config_entry.entry_id assert device.suggested_area == "Heliport" assert device.sw_version == "test-sw" assert device.hw_version == "test-hw" @@ -1209,7 +1215,11 @@ async def test_device_info_not_overrides( assert device.manufacturer == "test-manufacturer" assert device.model == "test-model" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1224,7 +1234,6 @@ async def test_device_info_not_overrides( ) ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1259,7 +1268,11 @@ async def test_device_info_homeassistant_url( model="via", ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1273,7 +1286,6 @@ async def test_device_info_homeassistant_url( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1308,7 +1320,11 @@ async def test_device_info_change_to_no_url( configuration_url="homeassistant://config/mqtt", ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1322,7 +1338,6 @@ async def test_device_info_change_to_no_url( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1377,10 +1392,13 @@ async def test_entity_disabled_by_device( unique_id="disabled", device_info=DeviceInfo(connections=connections) ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([entity_disabled]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id", domain=DOMAIN) @@ -1425,6 +1443,7 @@ async def test_entity_hidden_by_integration( assert entry_hidden.hidden_by is er.RegistryEntryHider.INTEGRATION +@pytest.mark.usefixtures("freezer") async def test_entity_info_added_to_entity_registry( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: @@ -1453,11 +1472,13 @@ async def test_entity_info_added_to_entity_registry( "default", "test_domain", capabilities={"max": 100}, + created_at=dt_util.utcnow(), device_class=None, entity_category=EntityCategory.CONFIG, has_entity_name=True, icon=None, id=ANY, + modified_at=dt_util.utcnow(), name=None, original_device_class="mock-device-class", original_icon="nice:icon", @@ -1760,6 +1781,64 @@ async def test_register_entity_service_limited_to_matching_platforms( } +async def test_register_entity_service_none_schema( + hass: HomeAssistant, +) -> None: + """Test registering a service with schema set to None.""" + entity_platform = MockEntityPlatform( + hass, domain="mock_integration", platform_name="mock_platform", platform=None + ) + entity1 = SlowEntity(name="entity_1") + entity2 = SlowEntity(name="entity_1") + await entity_platform.async_add_entities([entity1, entity2]) + + entities = [] + + @callback + def handle_service(entity, *_): + entities.append(entity) + + entity_platform.async_register_entity_service("hello", None, handle_service) + + await hass.services.async_call( + "mock_platform", "hello", {"entity_id": "all"}, blocking=True + ) + + assert len(entities) == 2 + assert entity1 in entities + assert entity2 in entities + + +async def test_register_entity_service_non_entity_service_schema( + hass: HomeAssistant, +) -> None: + """Test attempting to register a service with an incomplete schema.""" + entity_platform = MockEntityPlatform( + hass, domain="mock_integration", platform_name="mock_platform", platform=None + ) + + with pytest.raises( + HomeAssistantError, + match=( + "The schema does not include all required keys: entity_id, device_id, area_id, " + "floor_id, label_id" + ), + ): + entity_platform.async_register_entity_service( + "hello", + vol.Schema({"some": str}), + Mock(), + ) + # The check currently does not recurse into vol.All or vol.Any allowing these + # non-compliant schemas to pass + entity_platform.async_register_entity_service( + "hello", vol.All(vol.Schema({"some": str})), Mock() + ) + entity_platform.async_register_entity_service( + "hello", vol.Any(vol.Schema({"some": str})), Mock() + ) + + @pytest.mark.parametrize("update_before_add", [True, False]) async def test_invalid_entity_id( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, update_before_add: bool @@ -1796,13 +1875,16 @@ async def test_setup_entry_with_entities_that_block_forever( ) -> None: """Test we cancel adding entities when we reach the timeout.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [MockBlockingEntity(name="test1", unique_id="unique")], update_before_add=update_before_add, ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1841,13 +1923,16 @@ async def test_cancellation_is_not_blocked( ) -> None: """Test cancellation is not blocked while adding entities.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [MockCancellingEntity(name="test1", unique_id="unique")], update_before_add=update_before_add, ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1935,7 +2020,11 @@ async def test_entity_name_influences_entity_id( ) -> None: """Test entity_id is influenced by entity name.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1952,7 +2041,6 @@ async def test_entity_name_influences_entity_id( ], update_before_add=update_before_add, ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2020,12 +2108,15 @@ async def test_translated_entity_name_influences_entity_id( """Return all backend translations.""" return translations[language] - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [TranslatedEntity(has_entity_name)], update_before_add=update_before_add ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2105,10 +2196,13 @@ async def test_translated_device_class_name_influences_entity_id( """Return all backend translations.""" return translations[language] - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([TranslatedDeviceClassEntity(device_class, has_entity_name)]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2164,10 +2258,13 @@ async def test_device_name_defaulting_config_entry( _attr_unique_id = "qwer" _attr_device_info = device_info - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([DeviceNameEntity()]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(title=config_entry_title, entry_id="super-mock-id") @@ -2217,10 +2314,13 @@ async def test_device_type_error_checking( _attr_unique_id = "qwer" _attr_device_info = device_info - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([DeviceNameEntity()]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry( diff --git a/tests/helpers/test_entity_registry.py b/tests/helpers/test_entity_registry.py index 1390ef3889d..9b1d68c7777 100644 --- a/tests/helpers/test_entity_registry.py +++ b/tests/helpers/test_entity_registry.py @@ -1,6 +1,6 @@ """Tests for the Entity Registry.""" -from datetime import timedelta +from datetime import datetime, timedelta from functools import partial from typing import Any from unittest.mock import patch @@ -19,8 +19,10 @@ from homeassistant.const import ( from homeassistant.core import CoreState, HomeAssistant, callback from homeassistant.exceptions import MaxLengthExceeded from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util.dt import utc_from_timestamp from tests.common import ( + ANY, MockConfigEntry, async_capture_events, async_fire_time_changed, @@ -69,9 +71,14 @@ def test_get_or_create_suggested_object_id(entity_registry: er.EntityRegistry) - assert entry.entity_id == "light.beer" -def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: +def test_get_or_create_updates_data( + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: """Test that we update data in get_or_create.""" orig_config_entry = MockConfigEntry(domain="light") + created = datetime.fromisoformat("2024-02-14T12:00:00.0+00:00") + freezer.move_to(created) orig_entry = entity_registry.async_get_or_create( "light", @@ -100,6 +107,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: "hue", capabilities={"max": 100}, config_entry_id=orig_config_entry.entry_id, + created_at=created, device_class=None, device_id="mock-dev-id", disabled_by=er.RegistryEntryDisabler.HASS, @@ -108,6 +116,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: hidden_by=er.RegistryEntryHider.INTEGRATION, icon=None, id=orig_entry.id, + modified_at=created, name=None, original_device_class="mock-device-class", original_icon="initial-original_icon", @@ -118,6 +127,8 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: ) new_config_entry = MockConfigEntry(domain="light") + modified = created + timedelta(minutes=5) + freezer.move_to(modified) new_entry = entity_registry.async_get_or_create( "light", @@ -146,6 +157,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: area_id=None, capabilities={"new-max": 150}, config_entry_id=new_config_entry.entry_id, + created_at=created, device_class=None, device_id="new-mock-dev-id", disabled_by=er.RegistryEntryDisabler.HASS, # Should not be updated @@ -154,6 +166,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: hidden_by=er.RegistryEntryHider.INTEGRATION, # Should not be updated icon=None, id=orig_entry.id, + modified_at=modified, name=None, original_device_class="new-mock-device-class", original_icon="updated-original_icon", @@ -164,6 +177,8 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: ) assert set(entity_registry.async_device_ids()) == {"new-mock-dev-id"} + modified = created + timedelta(minutes=5) + freezer.move_to(modified) new_entry = entity_registry.async_get_or_create( "light", @@ -192,6 +207,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: area_id=None, capabilities=None, config_entry_id=None, + created_at=created, device_class=None, device_id=None, disabled_by=er.RegistryEntryDisabler.HASS, # Should not be updated @@ -200,6 +216,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: hidden_by=er.RegistryEntryHider.INTEGRATION, # Should not be updated icon=None, id=orig_entry.id, + modified_at=modified, name=None, original_device_class=None, original_icon=None, @@ -309,8 +326,12 @@ async def test_loading_saving_data( assert orig_entry1 == new_entry1 assert orig_entry2 == new_entry2 - assert orig_entry3 == new_entry3 - assert orig_entry4 == new_entry4 + + # By converting a deleted device to a active device, the modified_at will be updated + assert orig_entry3.modified_at < new_entry3.modified_at + assert attr.evolve(orig_entry3, modified_at=new_entry3.modified_at) == new_entry3 + assert orig_entry4.modified_at < new_entry4.modified_at + assert attr.evolve(orig_entry4, modified_at=new_entry4.modified_at) == new_entry4 assert new_entry2.area_id == "mock-area-id" assert new_entry2.categories == {"scope", "id"} @@ -422,6 +443,8 @@ async def test_filter_on_load( assert entry_with_name.name == "registry override" assert entry_without_name.name is None assert not entry_with_name.disabled + assert entry_with_name.created_at == utc_from_timestamp(0) + assert entry_with_name.modified_at == utc_from_timestamp(0) entry_disabled_hass = registry.async_get_or_create( "test", "super_platform", "disabled-hass" @@ -453,6 +476,7 @@ async def test_load_bad_data( "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": "2024-02-14T12:00:00.900075+00:00", "device_class": None, "device_id": None, "disabled_by": None, @@ -463,6 +487,7 @@ async def test_load_bad_data( "icon": None, "id": "00001", "labels": [], + "modified_at": "2024-02-14T12:00:00.900075+00:00", "name": None, "options": None, "original_device_class": None, @@ -481,6 +506,7 @@ async def test_load_bad_data( "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": "2024-02-14T12:00:00.900075+00:00", "device_class": None, "device_id": None, "disabled_by": None, @@ -491,6 +517,7 @@ async def test_load_bad_data( "icon": None, "id": "00002", "labels": [], + "modified_at": "2024-02-14T12:00:00.900075+00:00", "name": None, "options": None, "original_device_class": None, @@ -507,16 +534,20 @@ async def test_load_bad_data( "deleted_entities": [ { "config_entry_id": None, + "created_at": "2024-02-14T12:00:00.900075+00:00", "entity_id": "test.test3", "id": "00003", + "modified_at": "2024-02-14T12:00:00.900075+00:00", "orphaned_timestamp": None, "platform": "super_platform", "unique_id": 234, # Should not load }, { "config_entry_id": None, + "created_at": "2024-02-14T12:00:00.900075+00:00", "entity_id": "test.test4", "id": "00004", + "modified_at": "2024-02-14T12:00:00.900075+00:00", "orphaned_timestamp": None, "platform": "super_platform", "unique_id": ["also", "not", "valid"], # Should not load @@ -695,6 +726,49 @@ async def test_migration_1_1(hass: HomeAssistant, hass_storage: dict[str, Any]) assert entry.device_class is None assert entry.original_device_class == "best_class" + # Check we store migrated data + await flush_store(registry._store) + assert hass_storage[er.STORAGE_KEY] == { + "version": er.STORAGE_VERSION_MAJOR, + "minor_version": er.STORAGE_VERSION_MINOR, + "key": er.STORAGE_KEY, + "data": { + "entities": [ + { + "aliases": [], + "area_id": None, + "capabilities": {}, + "categories": {}, + "config_entry_id": None, + "created_at": "1970-01-01T00:00:00+00:00", + "device_id": None, + "disabled_by": None, + "entity_category": None, + "entity_id": "test.entity", + "has_entity_name": False, + "hidden_by": None, + "icon": None, + "id": ANY, + "labels": [], + "modified_at": "1970-01-01T00:00:00+00:00", + "name": None, + "options": {}, + "original_device_class": "best_class", + "original_icon": None, + "original_name": None, + "platform": "super_platform", + "previous_unique_id": None, + "supported_features": 0, + "translation_key": None, + "unique_id": "very_unique", + "unit_of_measurement": None, + "device_class": None, + } + ], + "deleted_entities": [], + }, + } + @pytest.mark.parametrize("load_registries", [False]) async def test_migration_1_7(hass: HomeAssistant, hass_storage: dict[str, Any]) -> None: @@ -1106,10 +1180,10 @@ async def test_remove_config_entry_from_device_removes_entities( config_entry_id=config_entry_2.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - assert device_entry.config_entries == [ + assert device_entry.config_entries == { config_entry_1.entry_id, config_entry_2.entry_id, - ] + } # Create one entity for each config entry entry_1 = entity_registry.async_get_or_create( @@ -1174,10 +1248,10 @@ async def test_remove_config_entry_from_device_removes_entities_2( config_entry_id=config_entry_2.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - assert device_entry.config_entries == [ + assert device_entry.config_entries == { config_entry_1.entry_id, config_entry_2.entry_id, - ] + } # Create one entity for each config entry entry_1 = entity_registry.async_get_or_create( diff --git a/tests/helpers/test_event.py b/tests/helpers/test_event.py index edce36218e8..6c71f1d8a7c 100644 --- a/tests/helpers/test_event.py +++ b/tests/helpers/test_event.py @@ -15,7 +15,13 @@ import pytest from homeassistant.const import MATCH_ALL import homeassistant.core as ha -from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback +from homeassistant.core import ( + Event, + EventStateChangedData, + EventStateReportedData, + HomeAssistant, + callback, +) from homeassistant.exceptions import TemplateError from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED @@ -34,6 +40,7 @@ from homeassistant.helpers.event import ( async_track_state_change_event, async_track_state_change_filtered, async_track_state_removed_domain, + async_track_state_report_event, async_track_sunrise, async_track_sunset, async_track_template, @@ -1469,7 +1476,7 @@ async def test_track_template_result_super_template_2( wildercard_runs = [] wildercard_runs_availability = [] - template_availability = Template(availability_template) + template_availability = Template(availability_template, hass) template_condition = Template("{{states.sensor.test.state}}", hass) template_condition_var = Template( "{{(states.sensor.test.state|int) + test }}", hass @@ -1621,7 +1628,7 @@ async def test_track_template_result_super_template_2_initially_false( wildercard_runs = [] wildercard_runs_availability = [] - template_availability = Template(availability_template) + template_availability = Template(availability_template, hass) template_condition = Template("{{states.sensor.test.state}}", hass) template_condition_var = Template( "{{(states.sensor.test.state|int) + test }}", hass @@ -3117,11 +3124,11 @@ async def test_async_track_template_result_multiple_templates( ) -> None: """Test tracking multiple templates.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}") - template_2 = Template("{{ states.switch.test.state == 'on' }}") - template_3 = Template("{{ states.switch.test.state == 'off' }}") + template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_2 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_3 = Template("{{ states.switch.test.state == 'off' }}", hass) template_4 = Template( - "{{ states.binary_sensor | map(attribute='entity_id') | list }}" + "{{ states.binary_sensor | map(attribute='entity_id') | list }}", hass ) refresh_runs = [] @@ -3181,11 +3188,12 @@ async def test_async_track_template_result_multiple_templates_mixing_domain( ) -> None: """Test tracking multiple templates when tracking entities and an entire domain.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}") - template_2 = Template("{{ states.switch.test.state == 'on' }}") - template_3 = Template("{{ states.switch.test.state == 'off' }}") + template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_2 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_3 = Template("{{ states.switch.test.state == 'off' }}", hass) template_4 = Template( - "{{ states.switch | sort(attribute='entity_id') | map(attribute='entity_id') | list }}" + "{{ states.switch | sort(attribute='entity_id') | map(attribute='entity_id') | list }}", + hass, ) refresh_runs = [] @@ -3410,8 +3418,8 @@ async def test_async_track_template_result_multiple_templates_mixing_listeners( ) -> None: """Test tracking multiple templates with mixing listener types.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}") - template_2 = Template("{{ now() and True }}") + template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_2 = Template("{{ now() and True }}", hass) refresh_runs = [] @@ -4907,3 +4915,26 @@ async def test_track_point_in_time_repr( assert "Exception in callback _TrackPointUTCTime" in caplog.text assert "._raise_exception" in caplog.text await hass.async_block_till_done(wait_background_tasks=True) + + +async def test_async_track_state_report_event(hass: HomeAssistant) -> None: + """Test async_track_state_report_event.""" + tracker_called: list[ha.State] = [] + + @ha.callback + def single_run_callback(event: Event[EventStateReportedData]) -> None: + new_state = event.data["new_state"] + tracker_called.append(new_state) + + unsub = async_track_state_report_event( + hass, ["light.bowl", "light.top"], single_run_callback + ) + hass.states.async_set("light.bowl", "on") + hass.states.async_set("light.top", "on") + await hass.async_block_till_done() + assert len(tracker_called) == 0 + hass.states.async_set("light.bowl", "on") + hass.states.async_set("light.top", "on") + await hass.async_block_till_done() + assert len(tracker_called) == 2 + unsub() diff --git a/tests/helpers/test_floor_registry.py b/tests/helpers/test_floor_registry.py index 3b07563fd11..c39ac3c40b4 100644 --- a/tests/helpers/test_floor_registry.py +++ b/tests/helpers/test_floor_registry.py @@ -1,15 +1,18 @@ """Tests for the floor registry.""" +from datetime import datetime from functools import partial import re from typing import Any +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import area_registry as ar, floor_registry as fr +from homeassistant.util.dt import utcnow -from tests.common import async_capture_events, flush_store +from tests.common import ANY, async_capture_events, flush_store async def test_list_floors(floor_registry: fr.FloorRegistry) -> None: @@ -18,8 +21,10 @@ async def test_list_floors(floor_registry: fr.FloorRegistry) -> None: assert len(list(floors)) == len(floor_registry.floors) +@pytest.mark.usefixtures("freezer") async def test_create_floor( - hass: HomeAssistant, floor_registry: fr.FloorRegistry + hass: HomeAssistant, + floor_registry: fr.FloorRegistry, ) -> None: """Make sure that we can create floors.""" update_events = async_capture_events(hass, fr.EVENT_FLOOR_REGISTRY_UPDATED) @@ -30,11 +35,16 @@ async def test_create_floor( level=1, ) - assert floor.floor_id == "first_floor" - assert floor.name == "First floor" - assert floor.icon == "mdi:home-floor-1" - assert floor.aliases == {"first", "ground", "ground floor"} - assert floor.level == 1 + assert floor == fr.FloorEntry( + floor_id="first_floor", + name="First floor", + icon="mdi:home-floor-1", + aliases={"first", "ground", "ground floor"}, + level=1, + created_at=utcnow(), + modified_at=utcnow(), + normalized_name=ANY, + ) assert len(floor_registry.floors) == 1 @@ -116,18 +126,31 @@ async def test_delete_non_existing_floor(floor_registry: fr.FloorRegistry) -> No async def test_update_floor( - hass: HomeAssistant, floor_registry: fr.FloorRegistry + hass: HomeAssistant, + floor_registry: fr.FloorRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can update floors.""" + created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") + freezer.move_to(created_at) + update_events = async_capture_events(hass, fr.EVENT_FLOOR_REGISTRY_UPDATED) floor = floor_registry.async_create("First floor") + assert floor == fr.FloorEntry( + floor_id="first_floor", + name="First floor", + icon=None, + aliases=set(), + level=None, + created_at=created_at, + modified_at=created_at, + normalized_name=ANY, + ) assert len(floor_registry.floors) == 1 - assert floor.floor_id == "first_floor" - assert floor.name == "First floor" - assert floor.icon is None - assert floor.aliases == set() - assert floor.level is None + + modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") + freezer.move_to(modified_at) updated_floor = floor_registry.async_update( floor.floor_id, @@ -138,11 +161,16 @@ async def test_update_floor( ) assert updated_floor != floor - assert updated_floor.floor_id == "first_floor" - assert updated_floor.name == "Second floor" - assert updated_floor.icon == "mdi:home-floor-2" - assert updated_floor.aliases == {"ground", "downstairs"} - assert updated_floor.level == 2 + assert updated_floor == fr.FloorEntry( + floor_id="first_floor", + name="Second floor", + icon="mdi:home-floor-2", + aliases={"ground", "downstairs"}, + level=2, + created_at=created_at, + modified_at=modified_at, + normalized_name=ANY, + ) assert len(floor_registry.floors) == 1 @@ -236,15 +264,22 @@ async def test_update_floor_with_normalized_name_already_in_use( async def test_load_floors( - hass: HomeAssistant, floor_registry: fr.FloorRegistry + hass: HomeAssistant, + floor_registry: fr.FloorRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can load/save data correctly.""" + floor1_created = datetime.fromisoformat("2024-01-01T00:00:00+00:00") + freezer.move_to(floor1_created) floor1 = floor_registry.async_create( "First floor", icon="mdi:home-floor-1", aliases={"first", "ground"}, level=1, ) + + floor2_created = datetime.fromisoformat("2024-02-01T00:00:00+00:00") + freezer.move_to(floor2_created) floor2 = floor_registry.async_create( "Second floor", icon="mdi:home-floor-2", @@ -262,25 +297,16 @@ async def test_load_floors( assert list(floor_registry.floors) == list(registry2.floors) floor1_registry2 = registry2.async_get_floor_by_name("First floor") - assert floor1_registry2.floor_id == floor1.floor_id - assert floor1_registry2.name == floor1.name - assert floor1_registry2.icon == floor1.icon - assert floor1_registry2.aliases == floor1.aliases - assert floor1_registry2.level == floor1.level - assert floor1_registry2.normalized_name == floor1.normalized_name + assert floor1_registry2 == floor1 floor2_registry2 = registry2.async_get_floor_by_name("Second floor") - assert floor2_registry2.floor_id == floor2.floor_id - assert floor2_registry2.name == floor2.name - assert floor2_registry2.icon == floor2.icon - assert floor2_registry2.aliases == floor2.aliases - assert floor2_registry2.level == floor2.level - assert floor2_registry2.normalized_name == floor2.normalized_name + assert floor2_registry2 == floor2 @pytest.mark.parametrize("load_registries", [False]) async def test_loading_floors_from_storage( - hass: HomeAssistant, hass_storage: dict[str, Any] + hass: HomeAssistant, + hass_storage: dict[str, Any], ) -> None: """Test loading stored floors on start.""" hass_storage[fr.STORAGE_KEY] = { @@ -392,3 +418,52 @@ async def test_async_update_thread_safety( await hass.async_add_executor_job( partial(floor_registry.async_update, any_floor.floor_id, name="new name") ) + + +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_from_1_1( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.1.""" + hass_storage[fr.STORAGE_KEY] = { + "version": 1, + "data": { + "floors": [ + { + "floor_id": "12345A", + "name": "mock", + "aliases": [], + "icon": None, + "level": None, + } + ] + }, + } + + await fr.async_load(hass) + registry = fr.async_get(hass) + + # Test data was loaded + entry = registry.async_get_floor_by_name("mock") + assert entry.floor_id == "12345A" + + # Check we store migrated data + await flush_store(registry._store) + assert hass_storage[fr.STORAGE_KEY] == { + "version": fr.STORAGE_VERSION_MAJOR, + "minor_version": fr.STORAGE_VERSION_MINOR, + "key": fr.STORAGE_KEY, + "data": { + "floors": [ + { + "aliases": [], + "icon": None, + "floor_id": "12345A", + "level": None, + "name": "mock", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + } + ] + }, + } diff --git a/tests/helpers/test_httpx_client.py b/tests/helpers/test_httpx_client.py index 60bdbe607e3..ccfccb3d698 100644 --- a/tests/helpers/test_httpx_client.py +++ b/tests/helpers/test_httpx_client.py @@ -5,7 +5,8 @@ from unittest.mock import Mock, patch import httpx import pytest -from homeassistant.core import EVENT_HOMEASSISTANT_CLOSE, HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import HomeAssistant import homeassistant.helpers.httpx_client as client from tests.common import MockModule, extract_stack_to_frame, mock_integration diff --git a/tests/helpers/test_integration_platform.py b/tests/helpers/test_integration_platform.py index 81eb1f2fd38..93bfeb2da5b 100644 --- a/tests/helpers/test_integration_platform.py +++ b/tests/helpers/test_integration_platform.py @@ -2,17 +2,19 @@ from collections.abc import Callable from types import ModuleType +from typing import Any from unittest.mock import Mock, patch import pytest from homeassistant import loader +from homeassistant.const import EVENT_COMPONENT_LOADED from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) -from homeassistant.setup import ATTR_COMPONENT, EVENT_COMPONENT_LOADED +from homeassistant.setup import ATTR_COMPONENT from tests.common import mock_platform @@ -28,7 +30,9 @@ async def test_process_integration_platforms_with_wait(hass: HomeAssistant) -> N processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -66,7 +70,9 @@ async def test_process_integration_platforms(hass: HomeAssistant) -> None: processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -106,7 +112,9 @@ async def test_process_integration_platforms_import_fails( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -149,7 +157,9 @@ async def test_process_integration_platforms_import_fails_after_registered( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -241,7 +251,9 @@ async def test_broken_integration( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -264,7 +276,9 @@ async def test_process_integration_platforms_no_integrations( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) diff --git a/tests/helpers/test_intent.py b/tests/helpers/test_intent.py index c592fc50c0a..ae8c2ed65d0 100644 --- a/tests/helpers/test_intent.py +++ b/tests/helpers/test_intent.py @@ -765,7 +765,7 @@ async def test_service_intent_handler_required_domains(hass: HomeAssistant) -> N ) # Still fails even if we provide the domain - with pytest.raises(intent.MatchFailedError): + with pytest.raises(intent.InvalidSlotInfo): await intent.async_handle( hass, "test", @@ -777,7 +777,10 @@ async def test_service_intent_handler_required_domains(hass: HomeAssistant) -> N async def test_service_handler_empty_strings(hass: HomeAssistant) -> None: """Test that passing empty strings for filters fails in ServiceIntentHandler.""" handler = intent.ServiceIntentHandler( - "TestType", "light", "turn_on", "Turned {} on" + "TestType", + "light", + "turn_on", + "Turned {} on", ) intent.async_register(hass, handler) @@ -814,3 +817,55 @@ async def test_service_handler_no_filter(hass: HomeAssistant) -> None: "test", "TestType", ) + + +async def test_service_handler_device_classes( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test that passing empty strings for filters fails in ServiceIntentHandler.""" + + # Register a fake service and a switch intent handler + call_done = asyncio.Event() + calls = [] + + # Register a service that takes 0.1 seconds to execute + async def mock_service(call): + """Mock service.""" + call_done.set() + calls.append(call) + + hass.services.async_register("switch", "turn_on", mock_service) + + handler = intent.ServiceIntentHandler( + "TestType", + "switch", + "turn_on", + "Turned {} on", + device_classes={switch.SwitchDeviceClass}, + ) + intent.async_register(hass, handler) + + # Create a switch enttiy and match by device class + hass.states.async_set( + "switch.bedroom", "off", attributes={"device_class": "outlet"} + ) + hass.states.async_set("switch.living_room", "off") + + await intent.async_handle( + hass, + "test", + "TestType", + slots={"device_class": {"value": "outlet"}}, + ) + await call_done.wait() + assert [call.data.get("entity_id") for call in calls] == ["switch.bedroom"] + calls.clear() + + # Validate which device classes are allowed + with pytest.raises(intent.InvalidSlotInfo): + await intent.async_handle( + hass, + "test", + "TestType", + slots={"device_class": {"value": "light"}}, + ) diff --git a/tests/helpers/test_json.py b/tests/helpers/test_json.py index 061faed6f93..123731de68d 100644 --- a/tests/helpers/test_json.py +++ b/tests/helpers/test_json.py @@ -13,6 +13,7 @@ from unittest.mock import Mock, patch import pytest from homeassistant.core import Event, HomeAssistant, State +from homeassistant.helpers import json as json_helper from homeassistant.helpers.json import ( ExtendedJSONEncoder, JSONEncoder as DefaultHASSJSONEncoder, @@ -25,9 +26,14 @@ from homeassistant.helpers.json import ( ) from homeassistant.util import dt as dt_util from homeassistant.util.color import RGBColor -from homeassistant.util.json import SerializationError, load_json +from homeassistant.util.json import ( + JSON_DECODE_EXCEPTIONS, + JSON_ENCODE_EXCEPTIONS, + SerializationError, + load_json, +) -from tests.common import json_round_trip +from tests.common import import_and_test_deprecated_constant, json_round_trip # Test data that can be saved as JSON TEST_JSON_A = {"a": 1, "B": "two"} @@ -335,3 +341,50 @@ def test_find_unserializable_data() -> None: BadData(), dump=partial(json.dumps, cls=MockJSONEncoder), ) == {"$(BadData).bla": bad_data} + + +def test_deprecated_json_loads(caplog: pytest.LogCaptureFixture) -> None: + """Test deprecated json_loads function. + + It was moved from helpers to util in #88099 + """ + json_helper.json_loads("{}") + assert ( + "json_loads is a deprecated function which will be removed in " + "HA Core 2025.8. Use homeassistant.util.json.json_loads instead" + ) in caplog.text + + +@pytest.mark.parametrize( + ("constant_name", "replacement_name", "replacement"), + [ + ( + "JSON_DECODE_EXCEPTIONS", + "homeassistant.util.json.JSON_DECODE_EXCEPTIONS", + JSON_DECODE_EXCEPTIONS, + ), + ( + "JSON_ENCODE_EXCEPTIONS", + "homeassistant.util.json.JSON_ENCODE_EXCEPTIONS", + JSON_ENCODE_EXCEPTIONS, + ), + ], +) +def test_deprecated_aliases( + caplog: pytest.LogCaptureFixture, + constant_name: str, + replacement_name: str, + replacement: Any, +) -> None: + """Test deprecated JSON_DECODE_EXCEPTIONS and JSON_ENCODE_EXCEPTIONS constants. + + They were moved from helpers to util in #88099 + """ + import_and_test_deprecated_constant( + caplog, + json_helper, + constant_name, + replacement_name, + replacement, + "2025.8", + ) diff --git a/tests/helpers/test_label_registry.py b/tests/helpers/test_label_registry.py index 445319a4b62..f466edad874 100644 --- a/tests/helpers/test_label_registry.py +++ b/tests/helpers/test_label_registry.py @@ -1,9 +1,11 @@ """Tests for the Label Registry.""" +from datetime import datetime from functools import partial import re from typing import Any +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant @@ -12,8 +14,9 @@ from homeassistant.helpers import ( entity_registry as er, label_registry as lr, ) +from homeassistant.util.dt import utcnow -from tests.common import MockConfigEntry, async_capture_events, flush_store +from tests.common import ANY, MockConfigEntry, async_capture_events, flush_store async def test_list_labels(label_registry: lr.LabelRegistry) -> None: @@ -22,6 +25,7 @@ async def test_list_labels(label_registry: lr.LabelRegistry) -> None: assert len(list(labels)) == len(label_registry.labels) +@pytest.mark.usefixtures("freezer") async def test_create_label( hass: HomeAssistant, label_registry: lr.LabelRegistry ) -> None: @@ -34,11 +38,16 @@ async def test_create_label( description="This label is for testing", ) - assert label.label_id == "my_label" - assert label.name == "My Label" - assert label.color == "#FF0000" - assert label.icon == "mdi:test" - assert label.description == "This label is for testing" + assert label == lr.LabelEntry( + label_id="my_label", + name="My Label", + color="#FF0000", + icon="mdi:test", + description="This label is for testing", + created_at=utcnow(), + modified_at=utcnow(), + normalized_name=ANY, + ) assert len(label_registry.labels) == 1 @@ -119,19 +128,30 @@ async def test_delete_non_existing_label(label_registry: lr.LabelRegistry) -> No async def test_update_label( - hass: HomeAssistant, label_registry: lr.LabelRegistry + hass: HomeAssistant, + label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can update labels.""" + created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") + freezer.move_to(created_at) update_events = async_capture_events(hass, lr.EVENT_LABEL_REGISTRY_UPDATED) label = label_registry.async_create("Mock") assert len(label_registry.labels) == 1 - assert label.label_id == "mock" - assert label.name == "Mock" - assert label.color is None - assert label.icon is None - assert label.description is None + assert label == lr.LabelEntry( + label_id="mock", + name="Mock", + color=None, + icon=None, + description=None, + created_at=created_at, + modified_at=created_at, + normalized_name=ANY, + ) + modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") + freezer.move_to(modified_at) updated_label = label_registry.async_update( label.label_id, name="Updated", @@ -141,12 +161,16 @@ async def test_update_label( ) assert updated_label != label - assert updated_label.label_id == "mock" - assert updated_label.name == "Updated" - assert updated_label.color == "#FFFFFF" - assert updated_label.icon == "mdi:update" - assert updated_label.description == "Updated description" - + assert updated_label == lr.LabelEntry( + label_id="mock", + name="Updated", + color="#FFFFFF", + icon="mdi:update", + description="Updated description", + created_at=created_at, + modified_at=modified_at, + normalized_name=ANY, + ) assert len(label_registry.labels) == 1 await hass.async_block_till_done() @@ -242,15 +266,21 @@ async def test_update_label_with_normalized_name_already_in_use( async def test_load_labels( - hass: HomeAssistant, label_registry: lr.LabelRegistry + hass: HomeAssistant, + label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can load/save data correctly.""" + label1_created = datetime.fromisoformat("2024-01-01T00:00:00+00:00") + freezer.move_to(label1_created) label1 = label_registry.async_create( "Label One", color="#FF000", icon="mdi:one", description="This label is label one", ) + label2_created = datetime.fromisoformat("2024-02-01T00:00:00+00:00") + freezer.move_to(label2_created) label2 = label_registry.async_create( "Label Two", color="#000FF", @@ -268,19 +298,10 @@ async def test_load_labels( assert list(label_registry.labels) == list(registry2.labels) label1_registry2 = registry2.async_get_label_by_name("Label One") - assert label1_registry2.label_id == label1.label_id - assert label1_registry2.name == label1.name - assert label1_registry2.color == label1.color - assert label1_registry2.description == label1.description - assert label1_registry2.icon == label1.icon - assert label1_registry2.normalized_name == label1.normalized_name + assert label1_registry2 == label1 label2_registry2 = registry2.async_get_label_by_name("Label Two") - assert label2_registry2.name == label2.name - assert label2_registry2.color == label2.color - assert label2_registry2.description == label2.description - assert label2_registry2.icon == label2.icon - assert label2_registry2.normalized_name == label2.normalized_name + assert label2_registry2 == label2 @pytest.mark.parametrize("load_registries", [False]) @@ -298,6 +319,8 @@ async def test_loading_label_from_storage( "icon": "mdi:test", "label_id": "one", "name": "One", + "created_at": "2024-01-01T00:00:00+00:00", + "modified_at": "2024-02-01T00:00:00+00:00", } ] }, @@ -489,3 +512,52 @@ async def test_async_update_thread_safety( await hass.async_add_executor_job( partial(label_registry.async_update, any_label.label_id, name="new name") ) + + +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_from_1_1( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.1.""" + hass_storage[lr.STORAGE_KEY] = { + "version": 1, + "data": { + "labels": [ + { + "color": None, + "description": None, + "icon": None, + "label_id": "12345A", + "name": "mock", + } + ] + }, + } + + await lr.async_load(hass) + registry = lr.async_get(hass) + + # Test data was loaded + entry = registry.async_get_label_by_name("mock") + assert entry.label_id == "12345A" + + # Check we store migrated data + await flush_store(registry._store) + assert hass_storage[lr.STORAGE_KEY] == { + "version": lr.STORAGE_VERSION_MAJOR, + "minor_version": lr.STORAGE_VERSION_MINOR, + "key": lr.STORAGE_KEY, + "data": { + "labels": [ + { + "color": None, + "description": None, + "icon": None, + "label_id": "12345A", + "name": "mock", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + } + ] + }, + } diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index e62d9ffdbee..4d14abb9819 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -1,5 +1,6 @@ """Tests for the llm helpers.""" +from decimal import Decimal from unittest.mock import patch import pytest @@ -7,6 +8,7 @@ import voluptuous as vol from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.intent import async_register_timer_handler +from homeassistant.components.script.config import ScriptConfig from homeassistant.core import Context, HomeAssistant, State from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import ( @@ -17,9 +19,9 @@ from homeassistant.helpers import ( floor_registry as fr, intent, llm, + selector, ) from homeassistant.setup import async_setup_component -from homeassistant.util import yaml from tests.common import MockConfigEntry @@ -402,10 +404,16 @@ async def test_assist_api_prompt( suggested_object_id="living_room", device_id=device.id, ) - hass.states.async_set(entry1.entity_id, "on", {"friendly_name": "Kitchen"}) + hass.states.async_set( + entry1.entity_id, + "on", + {"friendly_name": "Kitchen", "temperature": Decimal("0.9"), "humidity": 65}, + ) hass.states.async_set(entry2.entity_id, "on", {"friendly_name": "Living Room"}) - def create_entity(device: dr.DeviceEntry, write_state=True) -> None: + def create_entity( + device: dr.DeviceEntry, write_state=True, aliases: set[str] | None = None + ) -> None: """Create an entity for a device and track entity_id.""" entity = entity_registry.async_get_or_create( "light", @@ -415,6 +423,8 @@ async def test_assist_api_prompt( original_name=str(device.name or "Unnamed Device"), suggested_object_id=str(device.name or "unnamed_device"), ) + if aliases: + entity_registry.async_update_entity(entity.entity_id, aliases=aliases) if write_state: entity.write_unavailable_state(hass) @@ -426,7 +436,8 @@ async def test_assist_api_prompt( manufacturer="Test Manufacturer", model="Test Model", suggested_area="Test Area", - ) + ), + aliases={"my test light"}, ) for i in range(3): create_entity( @@ -499,81 +510,61 @@ async def test_assist_api_prompt( suggested_area="Test Area 2", ) ) - - exposed_entities = llm._get_exposed_entities(hass, llm_context.assistant) - assert exposed_entities == { - "light.1": { - "areas": "Test Area 2", - "names": "1", - "state": "unavailable", - }, - entry1.entity_id: { - "names": "Kitchen", - "state": "on", - }, - entry2.entity_id: { - "areas": "Test Area, Alternative name", - "names": "Living Room", - "state": "on", - }, - "light.test_device": { - "areas": "Test Area, Alternative name", - "names": "Test Device", - "state": "unavailable", - }, - "light.test_device_2": { - "areas": "Test Area 2", - "names": "Test Device 2", - "state": "unavailable", - }, - "light.test_device_3": { - "areas": "Test Area 2", - "names": "Test Device 3", - "state": "unavailable", - }, - "light.test_device_4": { - "areas": "Test Area 2", - "names": "Test Device 4", - "state": "unavailable", - }, - "light.test_service": { - "areas": "Test Area, Alternative name", - "names": "Test Service", - "state": "unavailable", - }, - "light.test_service_2": { - "areas": "Test Area, Alternative name", - "names": "Test Service", - "state": "unavailable", - }, - "light.test_service_3": { - "areas": "Test Area, Alternative name", - "names": "Test Service", - "state": "unavailable", - }, - "light.unnamed_device": { - "areas": "Test Area 2", - "names": "Unnamed Device", - "state": "unavailable", - }, - "script.test_script": { - "description": "This is a test script", - "names": "test_script", - "state": "off", - }, - } - exposed_entities_prompt = ( - "An overview of the areas and the devices in this smart home:\n" - + yaml.dump(exposed_entities) - ) + exposed_entities_prompt = """An overview of the areas and the devices in this smart home: +- names: Kitchen + domain: light + state: 'on' + attributes: + temperature: '0.9' + humidity: '65' +- names: Living Room + domain: light + state: 'on' + areas: Test Area, Alternative name +- names: Test Device, my test light + domain: light + state: unavailable + areas: Test Area, Alternative name +- names: Test Service + domain: light + state: unavailable + areas: Test Area, Alternative name +- names: Test Service + domain: light + state: unavailable + areas: Test Area, Alternative name +- names: Test Service + domain: light + state: unavailable + areas: Test Area, Alternative name +- names: Test Device 2 + domain: light + state: unavailable + areas: Test Area 2 +- names: Test Device 3 + domain: light + state: unavailable + areas: Test Area 2 +- names: Test Device 4 + domain: light + state: unavailable + areas: Test Area 2 +- names: Unnamed Device + domain: light + state: unavailable + areas: Test Area 2 +- names: '1' + domain: light + state: unavailable + areas: Test Area 2 +""" first_part_prompt = ( "When controlling Home Assistant always call the intent tools. " "Use HassTurnOn to lock and HassTurnOff to unlock a lock. " - "When controlling a device, prefer passing just its name and its domain " - "(what comes before the dot in its entity id). " + "When controlling a device, prefer passing just name and domain. " "When controlling an area, prefer passing just area name and domain." ) - no_timer_prompt = "This device does not support timers." + no_timer_prompt = "This device is not able to start timers." area_prompt = ( "When a user asks to turn on all devices of a specific type, " @@ -626,3 +617,445 @@ async def test_assist_api_prompt( {area_prompt} {exposed_entities_prompt}""" ) + + +async def test_script_tool( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test ScriptTool for the assist API.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "intent", {}) + context = Context() + llm_context = llm.LLMContext( + platform="test_platform", + context=context, + user_prompt="test_text", + language="*", + assistant="conversation", + device_id=None, + ) + + # Create a script with a unique ID + assert await async_setup_component( + hass, + "script", + { + "script": { + "test_script": { + "description": "This is a test script", + "sequence": [], + "fields": { + "beer": {"description": "Number of beers", "required": True}, + "wine": {"selector": {"number": {"min": 0, "max": 3}}}, + "where": {"selector": {"area": {}}}, + "area_list": {"selector": {"area": {"multiple": True}}}, + "floor": {"selector": {"floor": {}}}, + "floor_list": {"selector": {"floor": {"multiple": True}}}, + "extra_field": {"selector": {"area": {}}}, + }, + }, + "unexposed_script": { + "sequence": [], + }, + } + }, + ) + async_expose_entity(hass, "conversation", "script.test_script", True) + + entity_registry.async_update_entity( + "script.test_script", name="script name", aliases={"script alias"} + ) + + area = area_registry.async_create("Living room") + floor = floor_registry.async_create("2") + + assert llm.SCRIPT_PARAMETERS_CACHE not in hass.data + + api = await llm.async_get_api(hass, "assist", llm_context) + + tools = [tool for tool in api.tools if isinstance(tool, llm.ScriptTool)] + assert len(tools) == 1 + + tool = tools[0] + assert tool.name == "test_script" + assert ( + tool.description + == "This is a test script. Aliases: ['script name', 'script alias']" + ) + schema = { + vol.Required("beer", description="Number of beers"): cv.string, + vol.Optional("wine"): selector.NumberSelector({"min": 0, "max": 3}), + vol.Optional("where"): selector.AreaSelector(), + vol.Optional("area_list"): selector.AreaSelector({"multiple": True}), + vol.Optional("floor"): selector.FloorSelector(), + vol.Optional("floor_list"): selector.FloorSelector({"multiple": True}), + vol.Optional("extra_field"): selector.AreaSelector(), + } + assert tool.parameters.schema == schema + + assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { + "test_script": ( + "This is a test script. Aliases: ['script name', 'script alias']", + vol.Schema(schema), + ) + } + + tool_input = llm.ToolInput( + tool_name="test_script", + tool_args={ + "beer": "3", + "wine": 0, + "where": "Living room", + "area_list": ["Living room"], + "floor": "2", + "floor_list": ["2"], + }, + ) + + with patch("homeassistant.core.ServiceRegistry.async_call") as mock_service_call: + response = await api.async_call_tool(tool_input) + + mock_service_call.assert_awaited_once_with( + "script", + "turn_on", + { + "entity_id": "script.test_script", + "variables": { + "beer": "3", + "wine": 0, + "where": area.id, + "area_list": [area.id], + "floor": floor.floor_id, + "floor_list": [floor.floor_id], + }, + }, + context=context, + ) + assert response == {"success": True} + + # Test reload script with new parameters + config = { + "script": { + "test_script": ScriptConfig( + { + "description": "This is a new test script", + "sequence": [], + "mode": "single", + "max": 2, + "max_exceeded": "WARNING", + "trace": {}, + "fields": { + "beer": {"description": "Number of beers", "required": True}, + }, + } + ) + } + } + + with patch( + "homeassistant.helpers.entity_component.EntityComponent.async_prepare_reload", + return_value=config, + ): + await hass.services.async_call("script", "reload", blocking=True) + + assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == {} + + api = await llm.async_get_api(hass, "assist", llm_context) + + tools = [tool for tool in api.tools if isinstance(tool, llm.ScriptTool)] + assert len(tools) == 1 + + tool = tools[0] + assert tool.name == "test_script" + assert ( + tool.description + == "This is a new test script. Aliases: ['script name', 'script alias']" + ) + schema = {vol.Required("beer", description="Number of beers"): cv.string} + assert tool.parameters.schema == schema + + assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { + "test_script": ( + "This is a new test script. Aliases: ['script name', 'script alias']", + vol.Schema(schema), + ) + } + + +async def test_script_tool_name(hass: HomeAssistant) -> None: + """Test that script tool name is not started with a digit.""" + assert await async_setup_component(hass, "homeassistant", {}) + context = Context() + llm_context = llm.LLMContext( + platform="test_platform", + context=context, + user_prompt="test_text", + language="*", + assistant="conversation", + device_id=None, + ) + + # Create a script with a unique ID + assert await async_setup_component( + hass, + "script", + { + "script": { + "123456": { + "description": "This is a test script", + "sequence": [], + "fields": { + "beer": {"description": "Number of beers", "required": True}, + }, + }, + } + }, + ) + async_expose_entity(hass, "conversation", "script.123456", True) + + api = await llm.async_get_api(hass, "assist", llm_context) + + tools = [tool for tool in api.tools if isinstance(tool, llm.ScriptTool)] + assert len(tools) == 1 + + tool = tools[0] + assert tool.name == "_123456" + + +async def test_selector_serializer( + hass: HomeAssistant, llm_context: llm.LLMContext +) -> None: + """Test serialization of Selectors in Open API format.""" + api = await llm.async_get_api(hass, "assist", llm_context) + selector_serializer = api.custom_serializer + + assert selector_serializer(selector.ActionSelector()) == {"type": "string"} + assert selector_serializer(selector.AddonSelector()) == {"type": "string"} + assert selector_serializer(selector.AreaSelector()) == {"type": "string"} + assert selector_serializer(selector.AreaSelector({"multiple": True})) == { + "type": "array", + "items": {"type": "string"}, + } + assert selector_serializer(selector.AssistPipelineSelector()) == {"type": "string"} + assert selector_serializer( + selector.AttributeSelector({"entity_id": "sensor.test"}) + ) == {"type": "string"} + assert selector_serializer(selector.BackupLocationSelector()) == { + "type": "string", + "pattern": "^(?:\\/backup|\\w+)$", + } + assert selector_serializer(selector.BooleanSelector()) == {"type": "boolean"} + assert selector_serializer(selector.ColorRGBSelector()) == { + "type": "array", + "items": {"type": "number"}, + "maxItems": 3, + "minItems": 3, + "format": "RGB", + } + assert selector_serializer(selector.ColorTempSelector()) == {"type": "number"} + assert selector_serializer(selector.ColorTempSelector({"min": 0, "max": 1000})) == { + "type": "number", + "minimum": 0, + "maximum": 1000, + } + assert selector_serializer( + selector.ColorTempSelector({"min_mireds": 100, "max_mireds": 1000}) + ) == {"type": "number", "minimum": 100, "maximum": 1000} + assert selector_serializer(selector.ConditionSelector()) == { + "type": "array", + "items": {"nullable": True, "type": "string"}, + } + assert selector_serializer(selector.ConfigEntrySelector()) == {"type": "string"} + assert selector_serializer(selector.ConstantSelector({"value": "test"})) == { + "type": "string", + "enum": ["test"], + } + assert selector_serializer(selector.ConstantSelector({"value": 1})) == { + "type": "integer", + "enum": [1], + } + assert selector_serializer(selector.ConstantSelector({"value": True})) == { + "type": "boolean", + "enum": [True], + } + assert selector_serializer(selector.QrCodeSelector({"data": "test"})) == { + "type": "string" + } + assert selector_serializer(selector.ConversationAgentSelector()) == { + "type": "string" + } + assert selector_serializer(selector.CountrySelector()) == { + "type": "string", + "format": "ISO 3166-1 alpha-2", + } + assert selector_serializer( + selector.CountrySelector({"countries": ["GB", "FR"]}) + ) == {"type": "string", "enum": ["GB", "FR"]} + assert selector_serializer(selector.DateSelector()) == { + "type": "string", + "format": "date", + } + assert selector_serializer(selector.DateTimeSelector()) == { + "type": "string", + "format": "date-time", + } + assert selector_serializer(selector.DeviceSelector()) == {"type": "string"} + assert selector_serializer(selector.DeviceSelector({"multiple": True})) == { + "type": "array", + "items": {"type": "string"}, + } + assert selector_serializer(selector.DurationSelector()) == { + "type": "object", + "properties": { + "days": {"type": "number"}, + "hours": {"type": "number"}, + "minutes": {"type": "number"}, + "seconds": {"type": "number"}, + "milliseconds": {"type": "number"}, + }, + "required": [], + } + assert selector_serializer(selector.EntitySelector()) == { + "type": "string", + "format": "entity_id", + } + assert selector_serializer(selector.EntitySelector({"multiple": True})) == { + "type": "array", + "items": {"type": "string", "format": "entity_id"}, + } + assert selector_serializer(selector.FloorSelector()) == {"type": "string"} + assert selector_serializer(selector.FloorSelector({"multiple": True})) == { + "type": "array", + "items": {"type": "string"}, + } + assert selector_serializer(selector.IconSelector()) == {"type": "string"} + assert selector_serializer(selector.LabelSelector()) == {"type": "string"} + assert selector_serializer(selector.LabelSelector({"multiple": True})) == { + "type": "array", + "items": {"type": "string"}, + } + assert selector_serializer(selector.LanguageSelector()) == { + "type": "string", + "format": "RFC 5646", + } + assert selector_serializer( + selector.LanguageSelector({"languages": ["en", "fr"]}) + ) == {"type": "string", "enum": ["en", "fr"]} + assert selector_serializer(selector.LocationSelector()) == { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "radius": {"type": "number"}, + }, + "required": ["latitude", "longitude"], + } + assert selector_serializer(selector.MediaSelector()) == { + "type": "object", + "properties": { + "entity_id": {"type": "string"}, + "media_content_id": {"type": "string"}, + "media_content_type": {"type": "string"}, + "metadata": {"type": "object", "additionalProperties": True}, + }, + "required": ["entity_id", "media_content_id", "media_content_type"], + } + assert selector_serializer(selector.NumberSelector({"mode": "box"})) == { + "type": "number" + } + assert selector_serializer(selector.NumberSelector({"min": 30, "max": 100})) == { + "type": "number", + "minimum": 30, + "maximum": 100, + } + assert selector_serializer(selector.ObjectSelector()) == { + "type": "object", + "additionalProperties": True, + } + assert selector_serializer( + selector.SelectSelector( + { + "options": [ + {"value": "A", "label": "Letter A"}, + {"value": "B", "label": "Letter B"}, + {"value": "C", "label": "Letter C"}, + ] + } + ) + ) == {"type": "string", "enum": ["A", "B", "C"]} + assert selector_serializer( + selector.SelectSelector({"options": ["A", "B", "C"], "multiple": True}) + ) == { + "type": "array", + "items": {"type": "string", "enum": ["A", "B", "C"]}, + "uniqueItems": True, + } + assert selector_serializer( + selector.StateSelector({"entity_id": "sensor.test"}) + ) == {"type": "string"} + target_schema = selector_serializer(selector.TargetSelector()) + target_schema["properties"]["entity_id"]["anyOf"][0][ + "enum" + ].sort() # Order is not deterministic + assert target_schema == { + "type": "object", + "properties": { + "area_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + "device_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + "entity_id": { + "anyOf": [ + {"type": "string", "enum": ["all", "none"], "format": "lower"}, + {"type": "string", "nullable": True}, + {"type": "array", "items": {"type": "string"}}, + ] + }, + "floor_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + "label_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + }, + "required": [], + } + + assert selector_serializer(selector.TemplateSelector()) == { + "type": "string", + "format": "jinja2", + } + assert selector_serializer(selector.TextSelector()) == {"type": "string"} + assert selector_serializer(selector.TextSelector({"multiple": True})) == { + "type": "array", + "items": {"type": "string"}, + } + assert selector_serializer(selector.ThemeSelector()) == {"type": "string"} + assert selector_serializer(selector.TimeSelector()) == { + "type": "string", + "format": "time", + } + assert selector_serializer(selector.TriggerSelector()) == { + "type": "array", + "items": {"type": "string"}, + } + assert selector_serializer(selector.FileSelector({"accept": ".txt"})) == { + "type": "string" + } diff --git a/tests/helpers/test_recorder.py b/tests/helpers/test_recorder.py index 94f30d812bc..8fb8450bcb8 100644 --- a/tests/helpers/test_recorder.py +++ b/tests/helpers/test_recorder.py @@ -18,18 +18,25 @@ async def test_async_migration_in_progress( ): assert recorder.async_migration_in_progress(hass) is False - # The recorder is not loaded - with patch( - "homeassistant.components.recorder.util.async_migration_in_progress", - return_value=True, - ): - assert recorder.async_migration_in_progress(hass) is False - - await async_setup_recorder_instance(hass) - - # The recorder is now loaded with patch( "homeassistant.components.recorder.util.async_migration_in_progress", return_value=True, ): assert recorder.async_migration_in_progress(hass) is True + + +async def test_async_migration_is_live( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: + """Test async_migration_in_progress wraps the recorder.""" + with patch( + "homeassistant.components.recorder.util.async_migration_is_live", + return_value=False, + ): + assert recorder.async_migration_is_live(hass) is False + + with patch( + "homeassistant.components.recorder.util.async_migration_is_live", + return_value=True, + ): + assert recorder.async_migration_is_live(hass) is True diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 08c196a04d3..1bc33140124 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -249,7 +249,7 @@ async def test_calling_service_basic( alias = "service step" sequence = cv.SCRIPT_SCHEMA( - {"alias": alias, "service": "test.script", "data": {"hello": "world"}} + {"alias": alias, "action": "test.script", "data": {"hello": "world"}} ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -352,13 +352,13 @@ async def test_calling_service_response_data( [ { "alias": "service step1", - "service": "test.script", + "action": "test.script", # Store the result of the service call as a variable "response_variable": "my_response", }, { "alias": "service step2", - "service": "test.script", + "action": "test.script", "data_template": { # Result of previous service call "key": "{{ my_response.data }}" @@ -441,7 +441,7 @@ async def test_service_response_data_errors( [ { "alias": "service step1", - "service": "test.script", + "action": "test.script", **params, }, ] @@ -458,7 +458,7 @@ async def test_data_template_with_templated_key(hass: HomeAssistant) -> None: calls = async_mock_service(hass, "test", "script") sequence = cv.SCRIPT_SCHEMA( - {"service": "test.script", "data_template": {"{{ hello_var }}": "world"}} + {"action": "test.script", "data_template": {"{{ hello_var }}": "world"}} ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -525,11 +525,11 @@ async def test_multiple_runs_no_wait(hass: HomeAssistant) -> None: sequence = cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data_template": {"fire": "{{ fire1 }}", "listen": "{{ listen1 }}"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"fire": "{{ fire2 }}", "listen": "{{ listen2 }}"}, }, ] @@ -605,7 +605,7 @@ async def test_stop_no_wait(hass: HomeAssistant, count) -> None: hass.services.async_register("test", "script", async_simulate_long_service) - sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) script_obj = script.Script( hass, sequence, @@ -3894,7 +3894,7 @@ async def test_parallel_error( sequence = cv.SCRIPT_SCHEMA( { "parallel": [ - {"service": "epic.failure"}, + {"action": "epic.failure"}, ] } ) @@ -3906,10 +3906,10 @@ async def test_parallel_error( assert len(events) == 0 expected_trace = { - "0": [{"error": "Service epic.failure not found"}], + "0": [{"error": "Action epic.failure not found"}], "0/parallel/0/sequence/0": [ { - "error": "Service epic.failure not found", + "error": "Action epic.failure not found", "result": { "params": { "domain": "epic", @@ -3946,7 +3946,7 @@ async def test_propagate_error_service_not_found(hass: HomeAssistant) -> None: await async_setup_component(hass, "homeassistant", {}) event = "test_event" events = async_capture_events(hass, event) - sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") with pytest.raises(exceptions.ServiceNotFound): @@ -3958,7 +3958,7 @@ async def test_propagate_error_service_not_found(hass: HomeAssistant) -> None: expected_trace = { "0": [ { - "error": "Service test.script not found", + "error": "Action test.script not found", "result": { "params": { "domain": "test", @@ -3980,7 +3980,7 @@ async def test_propagate_error_invalid_service_data(hass: HomeAssistant) -> None events = async_capture_events(hass, event) calls = async_mock_service(hass, "test", "script", vol.Schema({"text": str})) sequence = cv.SCRIPT_SCHEMA( - [{"service": "test.script", "data": {"text": 1}}, {"event": event}] + [{"action": "test.script", "data": {"text": 1}}, {"event": event}] ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -4022,7 +4022,7 @@ async def test_propagate_error_service_exception(hass: HomeAssistant) -> None: hass.services.async_register("test", "script", record_call) - sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") with pytest.raises(ValueError): @@ -4057,35 +4057,35 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": { "label_id": ["label_service_list_1", "label_service_list_2"] }, }, { - "service": "test.script", + "action": "test.script", "data": {"label_id": "{{ 'label_service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "target": {"label_id": "label_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"label_id": "label_in_data_template"}, }, - {"service": "test.script", "data": {"without": "label_id"}}, + {"action": "test.script", "data": {"without": "label_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_choice_1_seq"}, } ], @@ -4094,7 +4094,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_choice_2_seq"}, } ], @@ -4102,7 +4102,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_default_seq"}, } ], @@ -4113,13 +4113,13 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_if_else"}, } ], @@ -4127,7 +4127,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_parallel"}, } ], @@ -4161,33 +4161,33 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": {"floor_id": ["floor_service_list"]}, }, { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "{{ 'floor_service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "target": {"floor_id": "floor_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"floor_id": "floor_in_data_template"}, }, - {"service": "test.script", "data": {"without": "floor_id"}}, + {"action": "test.script", "data": {"without": "floor_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_choice_1_seq"}, } ], @@ -4196,7 +4196,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_choice_2_seq"}, } ], @@ -4204,7 +4204,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_default_seq"}, } ], @@ -4215,13 +4215,13 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_if_else"}, } ], @@ -4229,7 +4229,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_parallel"}, } ], @@ -4262,33 +4262,33 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": {"area_id": ["area_service_list"]}, }, { - "service": "test.script", + "action": "test.script", "data": {"area_id": "{{ 'area_service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "target": {"area_id": "area_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"area_id": "area_in_data_template"}, }, - {"service": "test.script", "data": {"without": "area_id"}}, + {"action": "test.script", "data": {"without": "area_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_choice_1_seq"}, } ], @@ -4297,7 +4297,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_choice_2_seq"}, } ], @@ -4305,7 +4305,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_default_seq"}, } ], @@ -4316,13 +4316,13 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_if_else"}, } ], @@ -4330,7 +4330,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_parallel"}, } ], @@ -4364,27 +4364,27 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": ["light.service_list"]}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "{{ 'light.service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "entity_id": "light.direct_entity_referenced", }, { - "service": "test.script", + "action": "test.script", "target": {"entity_id": "light.entity_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"entity_id": "light.entity_in_data_template"}, }, { @@ -4392,7 +4392,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "entity_id": "sensor.condition", "state": "100", }, - {"service": "test.script", "data": {"without": "entity_id"}}, + {"action": "test.script", "data": {"without": "entity_id"}}, {"scene": "scene.hello"}, { "choose": [ @@ -4400,7 +4400,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "conditions": "{{ states.light.choice_1_cond == 'on' }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.choice_1_seq"}, } ], @@ -4413,7 +4413,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.choice_2_seq"}, } ], @@ -4421,7 +4421,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.default_seq"}, } ], @@ -4432,13 +4432,13 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.if_else"}, } ], @@ -4446,7 +4446,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.parallel"}, } ], @@ -4491,19 +4491,19 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: "domain": "switch", }, { - "service": "test.script", + "action": "test.script", "data": {"device_id": "data-string-id"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"device_id": "data-template-string-id"}, }, { - "service": "test.script", + "action": "test.script", "target": {"device_id": "target-string-id"}, }, { - "service": "test.script", + "action": "test.script", "target": {"device_id": ["target-list-id-1", "target-list-id-2"]}, }, { @@ -4515,7 +4515,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: ), "sequence": [ { - "service": "test.script", + "action": "test.script", "target": { "device_id": "choice-1-seq-device-target" }, @@ -4530,7 +4530,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: }, "sequence": [ { - "service": "test.script", + "action": "test.script", "target": { "device_id": "choice-2-seq-device-target" }, @@ -4540,7 +4540,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "target": {"device_id": "default-device-target"}, } ], @@ -4549,13 +4549,13 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"device_id": "if-then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"device_id": "if-else"}, } ], @@ -4563,7 +4563,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "target": {"device_id": "parallel-device"}, } ], @@ -5104,7 +5104,7 @@ async def test_set_variable( sequence = cv.SCRIPT_SCHEMA( [ {"alias": alias, "variables": {"variable": "value"}}, - {"service": "test.script", "data": {"value": "{{ variable }}"}}, + {"action": "test.script", "data": {"value": "{{ variable }}"}}, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5143,9 +5143,9 @@ async def test_set_redefines_variable( sequence = cv.SCRIPT_SCHEMA( [ {"variables": {"variable": "1"}}, - {"service": "test.script", "data": {"value": "{{ variable }}"}}, + {"action": "test.script", "data": {"value": "{{ variable }}"}}, {"variables": {"variable": "{{ variable | int + 1 }}"}}, - {"service": "test.script", "data": {"value": "{{ variable }}"}}, + {"action": "test.script", "data": {"value": "{{ variable }}"}}, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5214,7 +5214,7 @@ async def test_validate_action_config( } configs = { - cv.SCRIPT_ACTION_CALL_SERVICE: {"service": "light.turn_on"}, + cv.SCRIPT_ACTION_CALL_SERVICE: {"action": "light.turn_on"}, cv.SCRIPT_ACTION_DELAY: {"delay": 5}, cv.SCRIPT_ACTION_WAIT_TEMPLATE: { "wait_template": "{{ states.light.kitchen.state == 'on' }}" @@ -5349,7 +5349,7 @@ async def test_embedded_wait_for_trigger_in_automation(hass: HomeAssistant) -> N } ] }, - {"service": "test.script"}, + {"action": "test.script"}, ], } }, @@ -5704,12 +5704,12 @@ async def test_continue_on_error(hass: HomeAssistant) -> None: {"event": "test_event"}, { "continue_on_error": True, - "service": "broken.service", + "action": "broken.service", }, {"event": "test_event"}, { "continue_on_error": False, - "service": "broken.service", + "action": "broken.service", }, {"event": "test_event"}, ] @@ -5786,7 +5786,7 @@ async def test_continue_on_error_automation_issue(hass: HomeAssistant) -> None: [ { "continue_on_error": True, - "service": "service.not_found", + "action": "service.not_found", }, ] ) @@ -5799,7 +5799,7 @@ async def test_continue_on_error_automation_issue(hass: HomeAssistant) -> None: { "0": [ { - "error": "Service service.not_found not found", + "error": "Action service.not_found not found", "result": { "params": { "domain": "service", @@ -5834,7 +5834,7 @@ async def test_continue_on_error_unknown_error(hass: HomeAssistant) -> None: [ { "continue_on_error": True, - "service": "some.service", + "action": "some.service", }, ] ) @@ -5884,7 +5884,7 @@ async def test_disabled_actions( { "alias": "Hello", "enabled": enabled_value, - "service": "broken.service", + "action": "broken.service", }, { "alias": "World", @@ -6255,7 +6255,7 @@ async def test_disallowed_recursion( context = Context() calls = 0 alias = "event step" - sequence1 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_2"}) + sequence1 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_2"}) script1_obj = script.Script( hass, sequence1, @@ -6265,7 +6265,7 @@ async def test_disallowed_recursion( running_description="test script1", ) - sequence2 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_3"}) + sequence2 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_3"}) script2_obj = script.Script( hass, sequence2, @@ -6275,7 +6275,7 @@ async def test_disallowed_recursion( running_description="test script2", ) - sequence3 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_1"}) + sequence3 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_1"}) script3_obj = script.Script( hass, sequence3, @@ -6315,3 +6315,43 @@ async def test_disallowed_recursion( "- test_domain2.Test Name2\n" "- test_domain3.Test Name3" ) in caplog.text + + +async def test_calling_service_backwards_compatible( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test the calling of a service with the service instead of the action key.""" + context = Context() + calls = async_mock_service(hass, "test", "script") + + alias = "service step" + sequence = cv.SCRIPT_SCHEMA( + {"alias": alias, "service": "test.script", "data": {"hello": "{{ 'world' }}"}} + ) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + + await script_obj.async_run(context=context) + await hass.async_block_till_done() + + assert len(calls) == 1 + assert calls[0].context is context + assert calls[0].data.get("hello") == "world" + assert f"Executing step {alias}" in caplog.text + + assert_action_trace( + { + "0": [ + { + "result": { + "params": { + "domain": "test", + "service": "script", + "service_data": {"hello": "world"}, + "target": {}, + }, + "running_script": False, + } + } + ], + } + ) diff --git a/tests/helpers/test_script_variables.py b/tests/helpers/test_script_variables.py index ca942acdf66..3675c857279 100644 --- a/tests/helpers/test_script_variables.py +++ b/tests/helpers/test_script_variables.py @@ -3,7 +3,8 @@ import pytest from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv, template +from homeassistant.exceptions import TemplateError +from homeassistant.helpers import config_validation as cv async def test_static_vars() -> None: @@ -110,5 +111,5 @@ async def test_template_vars_run_args_no_default(hass: HomeAssistant) -> None: async def test_template_vars_error(hass: HomeAssistant) -> None: """Test template vars.""" var = cv.SCRIPT_VARIABLES_SCHEMA({"hello": "{{ canont.work }}"}) - with pytest.raises(template.TemplateError): + with pytest.raises(TemplateError): var.async_render(hass, None) diff --git a/tests/helpers/test_selector.py b/tests/helpers/test_selector.py index 6db313baa24..de8c3555831 100644 --- a/tests/helpers/test_selector.py +++ b/tests/helpers/test_selector.py @@ -55,6 +55,8 @@ def _test_selector( config = {selector_type: schema} selector.validate_selector(config) selector_instance = selector.selector(config) + assert selector_instance == selector.selector(config) + assert selector_instance != 5 # We do not allow enums in the config, as they cannot serialize assert not any(isinstance(val, Enum) for val in selector_instance.config.values()) @@ -737,12 +739,13 @@ def test_attribute_selector_schema( ( {"seconds": 10}, {"days": 10}, # Days is allowed also if `enable_day` is not set + {"milliseconds": 500}, ), (None, {}), ), ( - {"enable_day": True}, - ({"seconds": 10}, {"days": 10}), + {"enable_day": True, "enable_millisecond": True}, + ({"seconds": 10}, {"days": 10}, {"milliseconds": 500}), (None, {}), ), ( diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index 60fe87db9d2..81cc189e1af 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -405,7 +405,7 @@ async def test_service_call(hass: HomeAssistant) -> None: """Test service call with templating.""" calls = async_mock_service(hass, "test_domain", "test_service") config = { - "service": "{{ 'test_domain.test_service' }}", + "action": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data": { "hello": "{{ 'goodbye' }}", @@ -435,7 +435,7 @@ async def test_service_call(hass: HomeAssistant) -> None: } config = { - "service": "{{ 'test_domain.test_service' }}", + "action": "{{ 'test_domain.test_service' }}", "target": { "area_id": ["area-42", "{{ 'area-51' }}"], "device_id": ["abcdef", "{{ 'fedcba' }}"], @@ -455,7 +455,7 @@ async def test_service_call(hass: HomeAssistant) -> None: } config = { - "service": "{{ 'test_domain.test_service' }}", + "action": "{{ 'test_domain.test_service' }}", "target": "{{ var_target }}", } @@ -542,19 +542,19 @@ async def test_split_entity_string(hass: HomeAssistant) -> None: await service.async_call_from_config( hass, { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) await hass.async_block_till_done() - assert ["hello.world", "sensor.beer"] == calls[-1].data.get("entity_id") + assert calls[-1].data.get("entity_id") == ["hello.world", "sensor.beer"] async def test_not_mutate_input(hass: HomeAssistant) -> None: """Test for immutable input.""" async_mock_service(hass, "test_domain", "test_service") config = { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, @@ -581,7 +581,7 @@ async def test_fail_silently_if_no_service(mock_log, hass: HomeAssistant) -> Non await service.async_call_from_config(hass, {}) assert mock_log.call_count == 2 - await service.async_call_from_config(hass, {"service": "invalid"}) + await service.async_call_from_config(hass, {"action": "invalid"}) assert mock_log.call_count == 3 @@ -597,7 +597,7 @@ async def test_service_call_entry_id( assert entry.entity_id == "hello.world" config = { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "target": {"entity_id": entry.id}, } @@ -613,7 +613,7 @@ async def test_service_call_all_none(hass: HomeAssistant, target) -> None: calls = async_mock_service(hass, "test_domain", "test_service") config = { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "target": {"entity_id": target}, } @@ -971,6 +971,105 @@ async def test_async_get_all_descriptions_dot_keys(hass: HomeAssistant) -> None: } +async def test_async_get_all_descriptions_filter(hass: HomeAssistant) -> None: + """Test async_get_all_descriptions with filters.""" + service_descriptions = """ + test_service: + target: + entity: + domain: alarm_control_panel + supported_features: + - alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME + fields: + temperature: + filter: + supported_features: + - alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME + attribute: + supported_color_modes: + - light.ColorMode.COLOR_TEMP + selector: + number: + advanced_stuff: + fields: + temperature: + filter: + supported_features: + - alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME + attribute: + supported_color_modes: + - light.ColorMode.COLOR_TEMP + selector: + number: + """ + + domain = "test_domain" + + hass.services.async_register(domain, "test_service", lambda call: None) + mock_integration(hass, MockModule(domain), top_level_files={"services.yaml"}) + assert await async_setup_component(hass, domain, {}) + + def load_yaml(fname, secrets=None): + with io.StringIO(service_descriptions) as file: + return parse_yaml(file) + + with ( + patch( + "homeassistant.helpers.service._load_services_files", + side_effect=service._load_services_files, + ) as proxy_load_services_files, + patch( + "homeassistant.util.yaml.loader.load_yaml", + side_effect=load_yaml, + ) as mock_load_yaml, + ): + descriptions = await service.async_get_all_descriptions(hass) + + mock_load_yaml.assert_called_once_with("services.yaml", None) + assert proxy_load_services_files.mock_calls[0][1][1] == unordered( + [ + await async_get_integration(hass, domain), + ] + ) + + test_service_schema = { + "description": "", + "fields": { + "advanced_stuff": { + "fields": { + "temperature": { + "filter": { + "attribute": {"supported_color_modes": ["color_temp"]}, + "supported_features": [1], + }, + "selector": {"number": None}, + }, + }, + }, + "temperature": { + "filter": { + "attribute": {"supported_color_modes": ["color_temp"]}, + "supported_features": [1], + }, + "selector": {"number": None}, + }, + }, + "name": "", + "target": { + "entity": [ + { + "domain": ["alarm_control_panel"], + "supported_features": [1], + }, + ], + }, + } + + assert descriptions == { + "test_domain": {"test_service": test_service_schema}, + } + + async def test_async_get_all_descriptions_failing_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -1693,10 +1792,10 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) - assert ["test_domain.test_1", "test_domain.test_3"] == [ + assert [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) - ] + ] == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( "test", @@ -1704,10 +1803,10 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) - assert ["test_domain.test_3"] == [ + assert [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) - ] + ] == ["test_domain.test_3"] assert ( await service.async_extract_entities( @@ -1731,10 +1830,10 @@ async def test_extract_from_service_empty_if_no_entity_id(hass: HomeAssistant) - ] call = ServiceCall("test", "service") - assert [] == [ + assert [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) - ] + ] == [] async def test_extract_from_service_filter_out_non_existing_entities( @@ -1752,10 +1851,10 @@ async def test_extract_from_service_filter_out_non_existing_entities( {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) - assert ["test_domain.test_2"] == [ + assert [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) - ] + ] == ["test_domain.test_2"] async def test_extract_from_service_area_id( diff --git a/tests/helpers/test_significant_change.py b/tests/helpers/test_significant_change.py index f9dca5b6034..577ea5907e5 100644 --- a/tests/helpers/test_significant_change.py +++ b/tests/helpers/test_significant_change.py @@ -1,5 +1,8 @@ """Test significant change helper.""" +from types import MappingProxyType +from typing import Any + import pytest from homeassistant.components.sensor import SensorDeviceClass @@ -67,8 +70,14 @@ async def test_significant_change_extra( assert checker.async_is_significant_change(State(ent_id, "100", attrs), extra_arg=1) def extra_significant_check( - hass, old_state, old_attrs, old_extra_arg, new_state, new_attrs, new_extra_arg - ): + hass: HomeAssistant, + old_state: str, + old_attrs: dict | MappingProxyType, + old_extra_arg: Any, + new_state: str, + new_attrs: dict | MappingProxyType, + new_extra_arg: Any, + ) -> bool | None: return old_extra_arg != new_extra_arg checker.extra_significant_check = extra_significant_check diff --git a/tests/helpers/test_singleton.py b/tests/helpers/test_singleton.py index dcda1e2db3a..4722c58dc9f 100644 --- a/tests/helpers/test_singleton.py +++ b/tests/helpers/test_singleton.py @@ -1,9 +1,11 @@ """Test singleton helper.""" +from typing import Any from unittest.mock import Mock import pytest +from homeassistant.core import HomeAssistant from homeassistant.helpers import singleton @@ -14,11 +16,11 @@ def mock_hass(): @pytest.mark.parametrize("result", [object(), {}, []]) -async def test_singleton_async(mock_hass, result) -> None: +async def test_singleton_async(mock_hass: HomeAssistant, result: Any) -> None: """Test singleton with async function.""" @singleton.singleton("test_key") - async def something(hass): + async def something(hass: HomeAssistant) -> Any: return result result1 = await something(mock_hass) @@ -30,11 +32,11 @@ async def test_singleton_async(mock_hass, result) -> None: @pytest.mark.parametrize("result", [object(), {}, []]) -def test_singleton(mock_hass, result) -> None: +def test_singleton(mock_hass: HomeAssistant, result: Any) -> None: """Test singleton with function.""" @singleton.singleton("test_key") - def something(hass): + def something(hass: HomeAssistant) -> Any: return result result1 = something(mock_hass) diff --git a/tests/helpers/test_start.py b/tests/helpers/test_start.py index d9c6bbf441c..bd6b328a2c7 100644 --- a/tests/helpers/test_start.py +++ b/tests/helpers/test_start.py @@ -14,7 +14,7 @@ async def test_at_start_when_running_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -40,7 +40,7 @@ async def test_at_start_when_running_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -65,7 +65,7 @@ async def test_at_start_when_starting_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -88,7 +88,7 @@ async def test_at_start_when_starting_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -116,7 +116,7 @@ async def test_cancelling_at_start_when_running( calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -137,7 +137,7 @@ async def test_cancelling_at_start_when_starting(hass: HomeAssistant) -> None: calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -156,7 +156,7 @@ async def test_at_started_when_running_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -181,7 +181,7 @@ async def test_at_started_when_running_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -205,7 +205,7 @@ async def test_at_started_when_starting_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -231,7 +231,7 @@ async def test_at_started_when_starting_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -263,7 +263,7 @@ async def test_cancelling_at_started_when_running( calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -284,7 +284,7 @@ async def test_cancelling_at_started_when_starting(hass: HomeAssistant) -> None: calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 26e4f986592..3123c01f500 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -17,6 +17,7 @@ import orjson import pytest import voluptuous as vol +from homeassistant import config_entries from homeassistant.components import group from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, @@ -3990,6 +3991,48 @@ async def test_device_attr( assert info.rate_limit is None +async def test_config_entry_attr(hass: HomeAssistant) -> None: + """Test config entry attr.""" + info = { + "domain": "mock_light", + "title": "mock title", + "source": config_entries.SOURCE_BLUETOOTH, + "disabled_by": config_entries.ConfigEntryDisabler.USER, + } + config_entry = MockConfigEntry(**info) + config_entry.add_to_hass(hass) + + info["state"] = config_entries.ConfigEntryState.NOT_LOADED + + for key, value in info.items(): + tpl = template.Template( + "{{ config_entry_attr('" + config_entry.entry_id + "', '" + key + "') }}", + hass, + ) + assert tpl.async_render(parse_result=False) == str(value) + + for config_entry_id, key in ( + (config_entry.entry_id, "invalid_key"), + (56, "domain"), + ): + with pytest.raises(TemplateError): + template.Template( + "{{ config_entry_attr(" + + json.dumps(config_entry_id) + + ", '" + + key + + "') }}", + hass, + ).async_render() + + assert ( + template.Template( + "{{ config_entry_attr('invalid_id', 'domain') }}", hass + ).async_render(parse_result=False) + == "None" + ) + + async def test_issues(hass: HomeAssistant, issue_registry: ir.IssueRegistry) -> None: """Test issues function.""" # Test no issues diff --git a/tests/helpers/test_update_coordinator.py b/tests/helpers/test_update_coordinator.py index 8633bf862a5..d450d924f1f 100644 --- a/tests/helpers/test_update_coordinator.py +++ b/tests/helpers/test_update_coordinator.py @@ -13,7 +13,11 @@ import requests from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import CoreState, HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ( + ConfigEntryAuthFailed, + ConfigEntryError, + ConfigEntryNotReady, +) from homeassistant.helpers import update_coordinator from homeassistant.util.dt import utcnow @@ -525,11 +529,19 @@ async def test_stop_refresh_on_ha_stop( @pytest.mark.parametrize( "err_msg", - KNOWN_ERRORS, + [ + *KNOWN_ERRORS, + (Exception(), Exception, "Unknown exception"), + ], +) +@pytest.mark.parametrize( + "method", + ["update_method", "setup_method"], ) async def test_async_config_entry_first_refresh_failure( err_msg: tuple[Exception, type[Exception], str], crd: update_coordinator.DataUpdateCoordinator[int], + method: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test async_config_entry_first_refresh raises ConfigEntryNotReady on failure. @@ -538,7 +550,7 @@ async def test_async_config_entry_first_refresh_failure( will be caught by config_entries.async_setup which will log it with a decreasing level of logging once the first message is logged. """ - crd.update_method = AsyncMock(side_effect=err_msg[0]) + setattr(crd, method, AsyncMock(side_effect=err_msg[0])) with pytest.raises(ConfigEntryNotReady): await crd.async_config_entry_first_refresh() @@ -548,13 +560,49 @@ async def test_async_config_entry_first_refresh_failure( assert err_msg[2] not in caplog.text +@pytest.mark.parametrize( + "err_msg", + [ + (ConfigEntryError(), ConfigEntryError, "Config entry error"), + (ConfigEntryAuthFailed(), ConfigEntryAuthFailed, "Config entry error"), + ], +) +@pytest.mark.parametrize( + "method", + ["update_method", "setup_method"], +) +async def test_async_config_entry_first_refresh_failure_passed_through( + err_msg: tuple[Exception, type[Exception], str], + crd: update_coordinator.DataUpdateCoordinator[int], + method: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test async_config_entry_first_refresh passes through ConfigEntryError & ConfigEntryAuthFailed. + + Verify we do not log the exception since it + will be caught by config_entries.async_setup which will log it with + a decreasing level of logging once the first message is logged. + """ + setattr(crd, method, AsyncMock(side_effect=err_msg[0])) + + with pytest.raises(err_msg[1]): + await crd.async_config_entry_first_refresh() + + assert crd.last_update_success is False + assert isinstance(crd.last_exception, err_msg[1]) + assert err_msg[2] not in caplog.text + + async def test_async_config_entry_first_refresh_success( crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture ) -> None: """Test first refresh successfully.""" + + crd.setup_method = AsyncMock() await crd.async_config_entry_first_refresh() assert crd.last_update_success is True + crd.setup_method.assert_called_once() async def test_not_schedule_refresh_if_system_option_disable_polling( diff --git a/tests/patch_recorder.py b/tests/patch_recorder.py new file mode 100644 index 00000000000..4993e84fc30 --- /dev/null +++ b/tests/patch_recorder.py @@ -0,0 +1,27 @@ +"""Patch recorder related functions.""" + +from __future__ import annotations + +from contextlib import contextmanager +import sys + +# Patch recorder util session scope +from homeassistant.helpers import recorder as recorder_helper # noqa: E402 + +# Make sure homeassistant.components.recorder.util is not already imported +assert "homeassistant.components.recorder.util" not in sys.modules + +real_session_scope = recorder_helper.session_scope + + +@contextmanager +def _session_scope_wrapper(*args, **kwargs): + """Make session_scope patchable. + + This function will be imported by recorder modules. + """ + with real_session_scope(*args, **kwargs) as ses: + yield ses + + +recorder_helper.session_scope = _session_scope_wrapper diff --git a/tests/patch_time.py b/tests/patch_time.py index c8052b3b8ac..362296ab8b2 100644 --- a/tests/patch_time.py +++ b/tests/patch_time.py @@ -5,9 +5,7 @@ from __future__ import annotations import datetime import time -from homeassistant import runner, util -from homeassistant.helpers import event as event_helper -from homeassistant.util import dt as dt_util +# Do not add any Home Assistant import here def _utcnow() -> datetime.datetime: @@ -20,9 +18,21 @@ def _monotonic() -> float: return time.monotonic() -# Replace partial functions which are not found by freezegun +# Before importing any other Home Assistant functionality, import and replace +# partial dt_util.utcnow with a regular function which can be found by freezegun +from homeassistant import util # noqa: E402 +from homeassistant.util import dt as dt_util # noqa: E402 + dt_util.utcnow = _utcnow # type: ignore[assignment] -event_helper.time_tracker_utcnow = _utcnow # type: ignore[assignment] util.utcnow = _utcnow # type: ignore[assignment] + +# Import other Home Assistant functionality which we need to patch +from homeassistant import runner # noqa: E402 +from homeassistant.helpers import event as event_helper # noqa: E402 + +# Replace partial functions which are not found by freezegun +event_helper.time_tracker_utcnow = _utcnow # type: ignore[assignment] + +# Replace bound methods which are not found by freezegun runner.monotonic = _monotonic # type: ignore[assignment] diff --git a/tests/pylint/test_enforce_type_hints.py b/tests/pylint/test_enforce_type_hints.py index 5b1c494568d..b1692d1d60d 100644 --- a/tests/pylint/test_enforce_type_hints.py +++ b/tests/pylint/test_enforce_type_hints.py @@ -55,6 +55,7 @@ def test_regex_get_module_platform( ("list[dict[str, Any]]", 1, ("list", "dict[str, Any]")), ("tuple[bytes | None, str | None]", 2, ("tuple", "bytes | None", "str | None")), ("Callable[[], TestServer]", 2, ("Callable", "[]", "TestServer")), + ("pytest.CaptureFixture[str]", 1, ("pytest.CaptureFixture", "str")), ], ) def test_regex_x_of_y_i( @@ -1264,6 +1265,7 @@ def test_pytest_fixture(linter: UnittestLinter, type_hint_checker: BaseChecker) def sample_fixture( #@ hass: HomeAssistant, caplog: pytest.LogCaptureFixture, + capsys: pytest.CaptureFixture[str], aiohttp_server: Callable[[], TestServer], unused_tcp_port_factory: Callable[[], int], enable_custom_integrations: None, diff --git a/tests/scripts/test_auth.py b/tests/scripts/test_auth.py index f497751a4d7..e52a2cc6567 100644 --- a/tests/scripts/test_auth.py +++ b/tests/scripts/test_auth.py @@ -1,6 +1,8 @@ """Test the auth script to manage local users.""" +import argparse from asyncio import AbstractEventLoop +from collections.abc import Generator import logging from typing import Any from unittest.mock import Mock, patch @@ -15,7 +17,7 @@ from tests.common import register_auth_provider @pytest.fixture(autouse=True) -def reset_log_level(): +def reset_log_level() -> Generator[None]: """Reset log level after each test case.""" logger = logging.getLogger("homeassistant.core") orig_level = logger.level @@ -24,7 +26,7 @@ def reset_log_level(): @pytest.fixture -def provider(hass): +def provider(hass: HomeAssistant) -> hass_auth.HassAuthProvider: """Home Assistant auth provider.""" provider = hass.loop.run_until_complete( register_auth_provider(hass, {"type": "homeassistant"}) @@ -33,7 +35,11 @@ def provider(hass): return provider -async def test_list_user(hass: HomeAssistant, provider, capsys) -> None: +async def test_list_user( + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], +) -> None: """Test we can list users.""" data = provider.data data.add_auth("test-user", "test-pass") @@ -47,7 +53,10 @@ async def test_list_user(hass: HomeAssistant, provider, capsys) -> None: async def test_add_user( - hass: HomeAssistant, provider, capsys, hass_storage: dict[str, Any] + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], + hass_storage: dict[str, Any], ) -> None: """Test we can add a user.""" data = provider.data @@ -64,7 +73,11 @@ async def test_add_user( data.validate_login("paulus", "test-pass") -async def test_validate_login(hass: HomeAssistant, provider, capsys) -> None: +async def test_validate_login( + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], +) -> None: """Test we can validate a user login.""" data = provider.data data.add_auth("test-user", "test-pass") @@ -89,7 +102,10 @@ async def test_validate_login(hass: HomeAssistant, provider, capsys) -> None: async def test_change_password( - hass: HomeAssistant, provider, capsys, hass_storage: dict[str, Any] + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], + hass_storage: dict[str, Any], ) -> None: """Test we can change a password.""" data = provider.data @@ -108,7 +124,10 @@ async def test_change_password( async def test_change_password_invalid_user( - hass: HomeAssistant, provider, capsys, hass_storage: dict[str, Any] + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], + hass_storage: dict[str, Any], ) -> None: """Test changing password of non-existing user.""" data = provider.data @@ -130,7 +149,9 @@ def test_parsing_args(event_loop: AbstractEventLoop) -> None: """Test we parse args correctly.""" called = False - async def mock_func(hass, provider, args2): + async def mock_func( + hass: HomeAssistant, provider: hass_auth.AuthProvider, args2: argparse.Namespace + ) -> None: """Mock function to be called.""" nonlocal called called = True diff --git a/tests/snapshots/test_config_entries.ambr b/tests/snapshots/test_config_entries.ambr index bfb583ba8db..136749dfb14 100644 --- a/tests/snapshots/test_config_entries.ambr +++ b/tests/snapshots/test_config_entries.ambr @@ -1,12 +1,14 @@ # serializer version: 1 # name: test_as_dict dict({ + 'created_at': '2024-02-14T12:00:00+00:00', 'data': dict({ }), 'disabled_by': None, 'domain': 'test', 'entry_id': 'mock-entry', 'minor_version': 1, + 'modified_at': '2024-02-14T12:00:00+00:00', 'options': dict({ }), 'pref_disable_new_entities': False, diff --git a/tests/syrupy.py b/tests/syrupy.py index e5bbf017bb3..0bdbcf99e2b 100644 --- a/tests/syrupy.py +++ b/tests/syrupy.py @@ -12,13 +12,7 @@ import attr import attrs from syrupy.extensions.amber import AmberDataSerializer, AmberSnapshotExtension from syrupy.location import PyTestLocation -from syrupy.types import ( - PropertyFilter, - PropertyMatcher, - PropertyPath, - SerializableData, - SerializedData, -) +from syrupy.types import PropertyFilter, PropertyMatcher, PropertyPath, SerializableData import voluptuous as vol import voluptuous_serialize @@ -90,7 +84,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): matcher: PropertyMatcher | None = None, path: PropertyPath = (), visited: set[Any] | None = None, - ) -> SerializedData: + ) -> str: """Pre-process data before serializing. This allows us to handle specific cases for Home Assistant data structures. @@ -111,7 +105,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): serializable_data = voluptuous_serialize.convert(data) elif isinstance(data, ConfigEntry): serializable_data = cls._serializable_config_entry(data) - elif dataclasses.is_dataclass(data): + elif dataclasses.is_dataclass(type(data)): serializable_data = dataclasses.asdict(data) elif isinstance(data, IntFlag): # The repr of an enum.IntFlag has changed between Python 3.10 and 3.11 @@ -120,7 +114,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): else: serializable_data = data with suppress(TypeError): - if attr.has(data): + if attr.has(type(data)): serializable_data = attrs.asdict(data) return super()._serialize( @@ -136,14 +130,15 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): @classmethod def _serializable_area_registry_entry(cls, data: ar.AreaEntry) -> SerializableData: """Prepare a Home Assistant area registry entry for serialization.""" - serialized = AreaRegistryEntrySnapshot(attrs.asdict(data) | {"id": ANY}) + serialized = AreaRegistryEntrySnapshot(dataclasses.asdict(data) | {"id": ANY}) serialized.pop("_json_repr") return serialized @classmethod def _serializable_config_entry(cls, data: ConfigEntry) -> SerializableData: """Prepare a Home Assistant config entry for serialization.""" - return ConfigEntrySnapshot(data.as_dict() | {"entry_id": ANY}) + entry = ConfigEntrySnapshot(data.as_dict() | {"entry_id": ANY}) + return cls._remove_created_and_modified_at(entry) @classmethod def _serializable_device_registry_entry( @@ -159,7 +154,18 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): ) if serialized["via_device_id"] is not None: serialized["via_device_id"] = ANY - return serialized + if serialized["primary_config_entry"] is not None: + serialized["primary_config_entry"] = ANY + return cls._remove_created_and_modified_at(serialized) + + @classmethod + def _remove_created_and_modified_at( + cls, data: SerializableData + ) -> SerializableData: + """Remove created_at and modified_at from the data.""" + data.pop("created_at", None) + data.pop("modified_at", None) + return data @classmethod def _serializable_entity_registry_entry( @@ -176,7 +182,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): } ) serialized.pop("categories") - return serialized + return cls._remove_created_and_modified_at(serialized) @classmethod def _serializable_flow_result(cls, data: FlowResult) -> SerializableData: diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index ae77fbee217..dc2b096f595 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -5,6 +5,7 @@ import glob import importlib import os from pathlib import Path, PurePosixPath +import ssl import time from typing import Any from unittest.mock import Mock, patch @@ -44,7 +45,7 @@ async def test_protect_loop_debugger_sleep(caplog: pytest.LogCaptureFixture) -> return_value=frames, ), ): - time.sleep(0) + time.sleep(0) # noqa: ASYNC251 assert "Detected blocking call inside the event loop" not in caplog.text @@ -71,7 +72,7 @@ async def test_protect_loop_sleep() -> None: return_value=frames, ), ): - time.sleep(0) + time.sleep(0) # noqa: ASYNC251 async def test_protect_loop_sleep_get_current_frame_raises() -> None: @@ -97,7 +98,7 @@ async def test_protect_loop_sleep_get_current_frame_raises() -> None: return_value=frames, ), ): - time.sleep(0) + time.sleep(0) # noqa: ASYNC251 async def test_protect_loop_importlib_import_module_non_integration( @@ -211,7 +212,18 @@ async def test_protect_loop_open(caplog: pytest.LogCaptureFixture) -> None: block_async_io.enable() with ( contextlib.suppress(FileNotFoundError), - open("/proc/does_not_exist", encoding="utf8"), + open("/proc/does_not_exist", encoding="utf8"), # noqa: ASYNC230 + ): + pass + assert "Detected blocking call to open with args" not in caplog.text + + +async def test_protect_loop_path_open(caplog: pytest.LogCaptureFixture) -> None: + """Test opening a file in /proc is not reported.""" + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/proc/does_not_exist").open(encoding="utf8"), # noqa: ASYNC230 ): pass assert "Detected blocking call to open with args" not in caplog.text @@ -223,13 +235,78 @@ async def test_protect_open(caplog: pytest.LogCaptureFixture) -> None: block_async_io.enable() with ( contextlib.suppress(FileNotFoundError), - open("/config/data_not_exist", encoding="utf8"), + open("/config/data_not_exist", encoding="utf8"), # noqa: ASYNC230 ): pass assert "Detected blocking call to open with args" in caplog.text +async def test_protect_path_open(caplog: pytest.LogCaptureFixture) -> None: + """Test opening a file in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data_not_exist").open(encoding="utf8"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to open with args" in caplog.text + + +async def test_protect_path_read_bytes(caplog: pytest.LogCaptureFixture) -> None: + """Test reading file bytes in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data_not_exist").read_bytes(), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to read_bytes with args" in caplog.text + + +async def test_protect_path_read_text(caplog: pytest.LogCaptureFixture) -> None: + """Test reading a file text in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data_not_exist").read_text(encoding="utf8"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to read_text with args" in caplog.text + + +async def test_protect_path_write_bytes(caplog: pytest.LogCaptureFixture) -> None: + """Test writing file bytes in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data/not/exist").write_bytes(b"xxx"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to write_bytes with args" in caplog.text + + +async def test_protect_path_write_text(caplog: pytest.LogCaptureFixture) -> None: + """Test writing file text in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data/not/exist").write_text("xxx", encoding="utf8"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to write_text with args" in caplog.text + + async def test_enable_multiple_times(caplog: pytest.LogCaptureFixture) -> None: """Test trying to enable multiple times.""" with patch.object(block_async_io, "_IN_TESTS", False): @@ -253,7 +330,7 @@ async def test_protect_open_path(path: Any, caplog: pytest.LogCaptureFixture) -> """Test opening a file by path in the event loop logs.""" with patch.object(block_async_io, "_IN_TESTS", False): block_async_io.enable() - with contextlib.suppress(FileNotFoundError), open(path, encoding="utf8"): + with contextlib.suppress(FileNotFoundError), open(path, encoding="utf8"): # noqa: ASYNC230 pass assert "Detected blocking call to open with args" in caplog.text @@ -330,13 +407,48 @@ async def test_protect_loop_walk( assert "Detected blocking call to walk with args" not in caplog.text +async def test_protect_loop_load_default_certs( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.load_default_certs calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + assert "Detected blocking call to load_default_certs" in caplog.text + assert context + + +async def test_protect_loop_load_verify_locations( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.load_verify_locations calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + with pytest.raises(OSError): + context.load_verify_locations("/dev/null") + assert "Detected blocking call to load_verify_locations" in caplog.text + + +async def test_protect_loop_load_cert_chain( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.load_cert_chain calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + with pytest.raises(OSError): + context.load_cert_chain("/dev/null") + assert "Detected blocking call to load_cert_chain" in caplog.text + + async def test_open_calls_ignored_in_tests(caplog: pytest.LogCaptureFixture) -> None: """Test opening a file in tests is ignored.""" assert block_async_io._IN_TESTS block_async_io.enable() with ( contextlib.suppress(FileNotFoundError), - open("/config/data_not_exist", encoding="utf8"), + open("/config/data_not_exist", encoding="utf8"), # noqa: ASYNC230 ): pass diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index ca864006852..278bfc631fd 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -1,7 +1,7 @@ """Test the bootstrapping.""" import asyncio -from collections.abc import Iterable +from collections.abc import Generator, Iterable import contextlib import glob import logging @@ -11,19 +11,21 @@ from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant import bootstrap, loader, runner import homeassistant.config as config_util from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_DEBUG, SIGNAL_BOOTSTRAP_INTEGRATIONS +from homeassistant.const import ( + BASE_PLATFORMS, + CONF_DEBUG, + SIGNAL_BOOTSTRAP_INTEGRATIONS, +) from homeassistant.core import CoreState, HomeAssistant, async_get_hass, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.translation import async_translations_loaded from homeassistant.helpers.typing import ConfigType from homeassistant.loader import Integration -from homeassistant.setup import BASE_PLATFORMS from .common import ( MockConfigEntry, @@ -70,7 +72,7 @@ def mock_http_start_stop() -> Generator[None]: yield -@patch("homeassistant.bootstrap.async_enable_logging", Mock()) +@patch("homeassistant.bootstrap.async_enable_logging", AsyncMock()) async def test_home_assistant_core_config_validation(hass: HomeAssistant) -> None: """Test if we pass in wrong information for HA conf.""" # Extensive HA conf validation testing is done @@ -94,10 +96,10 @@ async def test_async_enable_logging( side_effect=OSError, ), ): - bootstrap.async_enable_logging(hass) + await bootstrap.async_enable_logging(hass) mock_async_activate_log_queue_handler.assert_called_once() mock_async_activate_log_queue_handler.reset_mock() - bootstrap.async_enable_logging( + await bootstrap.async_enable_logging( hass, log_rotate_days=5, log_file="test.log", @@ -141,7 +143,7 @@ async def test_config_does_not_turn_off_debug(hass: HomeAssistant) -> None: @pytest.mark.parametrize("hass_config", [{"frontend": {}}]) @pytest.mark.usefixtures("mock_hass_config") async def test_asyncio_debug_on_turns_hass_debug_on( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -434,9 +436,6 @@ async def test_setup_frontend_before_recorder(hass: HomeAssistant) -> None: MockModule( domain="recorder", async_setup=gen_domain_setup("recorder"), - partial_manifest={ - "after_dependencies": ["http"], - }, ), ) @@ -598,7 +597,7 @@ def mock_is_virtual_env() -> Generator[Mock]: @pytest.fixture -def mock_enable_logging() -> Generator[Mock]: +def mock_enable_logging() -> Generator[AsyncMock]: """Mock enable logging.""" with patch("homeassistant.bootstrap.async_enable_logging") as enable_logging: yield enable_logging @@ -634,7 +633,7 @@ def mock_ensure_config_exists() -> Generator[AsyncMock]: @pytest.mark.parametrize("hass_config", [{"browser": {}, "frontend": {}}]) @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -687,7 +686,7 @@ async def test_setup_hass( @pytest.mark.parametrize("hass_config", [{"browser": {}, "frontend": {}}]) @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass_takes_longer_than_log_slow_startup( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -728,7 +727,7 @@ async def test_setup_hass_takes_longer_than_log_slow_startup( async def test_setup_hass_invalid_yaml( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -755,7 +754,7 @@ async def test_setup_hass_invalid_yaml( async def test_setup_hass_config_dir_nonexistent( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -781,7 +780,7 @@ async def test_setup_hass_config_dir_nonexistent( async def test_setup_hass_recovery_mode( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -817,7 +816,7 @@ async def test_setup_hass_recovery_mode( @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass_safe_mode( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -852,7 +851,7 @@ async def test_setup_hass_safe_mode( @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass_recovery_mode_and_safe_mode( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -888,7 +887,7 @@ async def test_setup_hass_recovery_mode_and_safe_mode( @pytest.mark.parametrize("hass_config", [{"homeassistant": {"non-existing": 1}}]) @pytest.mark.usefixtures("mock_hass_config") async def test_setup_hass_invalid_core_config( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -927,7 +926,7 @@ async def test_setup_hass_invalid_core_config( ) @pytest.mark.usefixtures("mock_hass_config") async def test_setup_recovery_mode_if_no_frontend( - mock_enable_logging: Mock, + mock_enable_logging: AsyncMock, mock_is_virtual_env: Mock, mock_mount_local_lib_path: AsyncMock, mock_ensure_config_exists: AsyncMock, @@ -1327,6 +1326,34 @@ async def test_bootstrap_dependencies( ) +@pytest.mark.parametrize("load_registries", [False]) +async def test_bootstrap_dependency_not_found( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test setup when an integration has missing dependencies.""" + mock_integration( + hass, + MockModule("good_integration", dependencies=[]), + ) + # Simulate an integration with missing dependencies. While a core integration + # can't have missing dependencies thanks to checks by hassfest, there's no such + # guarantee for custom integrations. + mock_integration( + hass, + MockModule("bad_integration", dependencies=["hahaha_crash_and_burn"]), + ) + + assert await bootstrap.async_from_config_dict( + {"good_integration": {}, "bad_integration": {}}, hass + ) + + assert "good_integration" in hass.config.components + assert "bad_integration" not in hass.config.components + + assert "Unable to resolve dependencies for bad_integration" in caplog.text + + async def test_pre_import_no_requirements(hass: HomeAssistant) -> None: """Test pre-imported and do not have any requirements.""" pre_imports = [ diff --git a/tests/test_config.py b/tests/test_config.py index 7f94317afea..c7039cabe8b 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,17 +2,18 @@ import asyncio from collections import OrderedDict +from collections.abc import Generator import contextlib import copy import logging import os +from pathlib import Path from typing import Any from unittest import mock from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator import voluptuous as vol from voluptuous import Invalid, MultipleInvalid import yaml @@ -28,15 +29,15 @@ from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, + CONF_PACKAGES, __version__, ) from homeassistant.core import ( - DOMAIN as HA_DOMAIN, + DOMAIN as HOMEASSISTANT_DOMAIN, ConfigSource, HomeAssistant, - HomeAssistantError, ) -from homeassistant.exceptions import ConfigValidationError +from homeassistant.exceptions import ConfigValidationError, HomeAssistantError from homeassistant.helpers import ( check_config, config_validation as cv, @@ -412,11 +413,10 @@ async def test_ensure_config_exists_creates_config(hass: HomeAssistant) -> None: async def test_ensure_config_exists_uses_existing_config(hass: HomeAssistant) -> None: """Test that calling ensure_config_exists uses existing config.""" - create_file(YAML_PATH) + await hass.async_add_executor_job(create_file, YAML_PATH) await config_util.async_ensure_config_exists(hass) - with open(YAML_PATH, encoding="utf8") as fp: - content = fp.read() + content = await hass.async_add_executor_job(Path(YAML_PATH).read_text) # File created with create_file are empty assert content == "" @@ -424,12 +424,11 @@ async def test_ensure_config_exists_uses_existing_config(hass: HomeAssistant) -> async def test_ensure_existing_files_is_not_overwritten(hass: HomeAssistant) -> None: """Test that calling async_create_default_config does not overwrite existing files.""" - create_file(SECRET_PATH) + await hass.async_add_executor_job(create_file, SECRET_PATH) await config_util.async_create_default_config(hass) - with open(SECRET_PATH, encoding="utf8") as fp: - content = fp.read() + content = await hass.async_add_executor_job(Path(SECRET_PATH).read_text) # File created with create_file are empty assert content == "" @@ -490,9 +489,10 @@ def test_load_yaml_config_preserves_key_order() -> None: fp.write("hello: 2\n") fp.write("world: 1\n") - assert [("hello", 2), ("world", 1)] == list( - config_util.load_yaml_config_file(YAML_PATH).items() - ) + assert list(config_util.load_yaml_config_file(YAML_PATH).items()) == [ + ("hello", 2), + ("world", 1), + ] async def test_create_default_config_returns_none_if_write_error( @@ -1070,10 +1070,8 @@ async def test_check_ha_config_file_wrong(mock_check, hass: HomeAssistant) -> No "hass_config", [ { - HA_DOMAIN: { - config_util.CONF_PACKAGES: { - "pack_dict": {"input_boolean": {"ib1": None}} - } + HOMEASSISTANT_DOMAIN: { + CONF_PACKAGES: {"pack_dict": {"input_boolean": {"ib1": None}}} }, "input_boolean": {"ib2": None}, "light": {"platform": "test"}, @@ -1088,7 +1086,7 @@ async def test_async_hass_config_yaml_merge( conf = await config_util.async_hass_config_yaml(hass) assert merge_log_err.call_count == 0 - assert conf[HA_DOMAIN].get(config_util.CONF_PACKAGES) is not None + assert conf[HOMEASSISTANT_DOMAIN].get(CONF_PACKAGES) is not None assert len(conf) == 3 assert len(conf["input_boolean"]) == 2 assert len(conf["light"]) == 1 @@ -1116,7 +1114,7 @@ async def test_merge(merge_log_err: MagicMock, hass: HomeAssistant) -> None: }, } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "input_boolean": {"ib2": None}, "light": {"platform": "test"}, "automation": [], @@ -1143,7 +1141,7 @@ async def test_merge_try_falsy(merge_log_err: MagicMock, hass: HomeAssistant) -> "pack_list2": {"light": OrderedDict()}, } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "automation": {"do": "something"}, "light": {"some": "light"}, } @@ -1166,7 +1164,7 @@ async def test_merge_new(merge_log_err: MagicMock, hass: HomeAssistant) -> None: "api": {}, }, } - config = {HA_DOMAIN: {config_util.CONF_PACKAGES: packages}} + config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}} await config_util.merge_packages_config(hass, config, packages) assert merge_log_err.call_count == 0 @@ -1186,7 +1184,7 @@ async def test_merge_type_mismatch( "pack_2": {"light": {"ib1": None}}, # light gets merged - ensure_list } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "input_boolean": {"ib2": None}, "input_select": [{"ib2": None}], "light": [{"platform": "two"}], @@ -1204,13 +1202,13 @@ async def test_merge_once_only_keys( ) -> None: """Test if we have a merge for a comp that may occur only once. Keys.""" packages = {"pack_2": {"api": None}} - config = {HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "api": None} + config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": None} await config_util.merge_packages_config(hass, config, packages) assert config["api"] == OrderedDict() packages = {"pack_2": {"api": {"key_3": 3}}} config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": {"key_1": 1, "key_2": 2}, } await config_util.merge_packages_config(hass, config, packages) @@ -1219,7 +1217,7 @@ async def test_merge_once_only_keys( # Duplicate keys error packages = {"pack_2": {"api": {"key": 2}}} config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": {"key": 1}, } await config_util.merge_packages_config(hass, config, packages) @@ -1234,7 +1232,7 @@ async def test_merge_once_only_lists(hass: HomeAssistant) -> None: } } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": {"list_1": ["item_1"]}, } await config_util.merge_packages_config(hass, config, packages) @@ -1257,7 +1255,7 @@ async def test_merge_once_only_dictionaries(hass: HomeAssistant) -> None: } } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": {"dict_1": {"key_1": 1, "dict_1.1": {"key_1.1": 1.1}}}, } await config_util.merge_packages_config(hass, config, packages) @@ -1293,7 +1291,7 @@ async def test_merge_duplicate_keys( """Test if keys in dicts are duplicates.""" packages = {"pack_1": {"input_select": {"ib1": None}}} config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "input_select": {"ib1": 1}, } await config_util.merge_packages_config(hass, config, packages) @@ -1451,7 +1449,7 @@ async def test_merge_split_component_definition(hass: HomeAssistant) -> None: "pack_1": {"light one": {"l1": None}}, "pack_2": {"light two": {"l2": None}, "light three": {"l3": None}}, } - config = {HA_DOMAIN: {config_util.CONF_PACKAGES: packages}} + config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}} await config_util.merge_packages_config(hass, config, packages) assert len(config) == 4 @@ -2340,7 +2338,7 @@ async def test_packages_schema_validation_error( ] assert error_records == snapshot - assert len(config[HA_DOMAIN][config_util.CONF_PACKAGES]) == 0 + assert len(config[HOMEASSISTANT_DOMAIN][CONF_PACKAGES]) == 0 def test_extract_domain_configs() -> None: diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index cba7ad8f215..9983886ce44 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -3,16 +3,17 @@ from __future__ import annotations import asyncio +from collections.abc import Generator from datetime import timedelta from functools import cached_property import logging from typing import Any from unittest.mock import ANY, AsyncMock, Mock, patch +from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow, loader from homeassistant.components import dhcp @@ -22,7 +23,12 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, ) -from homeassistant.core import DOMAIN as HA_DOMAIN, CoreState, HomeAssistant, callback +from homeassistant.core import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + CoreState, + HomeAssistant, + callback, +) from homeassistant.data_entry_flow import BaseServiceInfo, FlowResult, FlowResultType from homeassistant.exceptions import ( ConfigEntryAuthFailed, @@ -46,6 +52,7 @@ from .common import ( async_capture_events, async_fire_time_changed, async_get_persistent_notifications, + flush_store, mock_config_flow, mock_integration, mock_platform, @@ -526,13 +533,13 @@ async def test_remove_entry_cancels_reauth( assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR issue_id = f"config_entry_reauth_test_{entry.entry_id}" - assert issue_registry.async_get_issue(HA_DOMAIN, issue_id) + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) await manager.async_remove(entry.entry_id) flows = hass.config_entries.flow.async_progress_by_handler("test") assert len(flows) == 0 - assert not issue_registry.async_get_issue(HA_DOMAIN, issue_id) + assert not issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) async def test_remove_entry_handles_callback_error( @@ -577,7 +584,7 @@ async def test_remove_entry_raises( async def mock_unload_entry(hass, entry): """Mock unload entry function.""" - raise Exception("BROKEN") # pylint: disable=broad-exception-raised + raise Exception("BROKEN") # noqa: TRY002 mock_integration(hass, MockModule("comp", async_unload_entry=mock_unload_entry)) @@ -907,6 +914,7 @@ async def test_saving_and_loading( assert orig.as_dict() == loaded.as_dict() +@freeze_time("2024-02-14 12:00:00") async def test_as_dict(snapshot: SnapshotAssertion) -> None: """Test ConfigEntry.as_dict.""" @@ -1189,14 +1197,14 @@ async def test_reauth_issue( assert len(issue_registry.issues) == 1 issue_id = f"config_entry_reauth_test_{entry.entry_id}" - issue = issue_registry.async_get_issue(HA_DOMAIN, issue_id) + issue = issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) assert issue == ir.IssueEntry( active=True, breaks_in_ha_version=None, created=ANY, data={"flow_id": flows[0]["flow_id"]}, dismissed_version=None, - domain=HA_DOMAIN, + domain=HOMEASSISTANT_DOMAIN, is_fixable=False, is_persistent=False, issue_domain="test", @@ -1246,8 +1254,11 @@ async def test_loading_default_config(hass: HomeAssistant) -> None: assert len(manager.async_entries()) == 0 -async def test_updating_entry_data(manager: config_entries.ConfigEntries) -> None: +async def test_updating_entry_data( + manager: config_entries.ConfigEntries, freezer: FrozenDateTimeFactory +) -> None: """Test that we can update an entry data.""" + created = dt_util.utcnow() entry = MockConfigEntry( domain="test", data={"first": True}, @@ -1255,17 +1266,32 @@ async def test_updating_entry_data(manager: config_entries.ConfigEntries) -> Non ) entry.add_to_manager(manager) + assert len(manager.async_entries()) == 1 + assert manager.async_entries()[0] == entry + assert entry.created_at == created + assert entry.modified_at == created + + freezer.tick() + assert manager.async_update_entry(entry) is False assert entry.data == {"first": True} + assert entry.modified_at == created + assert manager.async_entries()[0].modified_at == created + + freezer.tick() + modified = dt_util.utcnow() assert manager.async_update_entry(entry, data={"second": True}) is True assert entry.data == {"second": True} + assert entry.modified_at == modified + assert manager.async_entries()[0].modified_at == modified async def test_updating_entry_system_options( - manager: config_entries.ConfigEntries, + manager: config_entries.ConfigEntries, freezer: FrozenDateTimeFactory ) -> None: """Test that we can update an entry data.""" + created = dt_util.utcnow() entry = MockConfigEntry( domain="test", data={"first": True}, @@ -1276,6 +1302,11 @@ async def test_updating_entry_system_options( assert entry.pref_disable_new_entities is True assert entry.pref_disable_polling is False + assert entry.created_at == created + assert entry.modified_at == created + + freezer.tick() + modified = dt_util.utcnow() manager.async_update_entry( entry, pref_disable_new_entities=False, pref_disable_polling=True @@ -1283,6 +1314,8 @@ async def test_updating_entry_system_options( assert entry.pref_disable_new_entities is False assert entry.pref_disable_polling is True + assert entry.created_at == created + assert entry.modified_at == modified async def test_update_entry_options_and_trigger_listener( @@ -5098,7 +5131,7 @@ async def test_hashable_non_string_unique_id( { "type": data_entry_flow.FlowResultType.ABORT, "reason": "single_instance_allowed", - "translation_domain": HA_DOMAIN, + "translation_domain": HOMEASSISTANT_DOMAIN, }, ), ], @@ -5296,7 +5329,7 @@ async def test_avoid_adding_second_config_entry_on_single_config_entry( ) assert result["type"] == data_entry_flow.FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" - assert result["translation_domain"] == HA_DOMAIN + assert result["translation_domain"] == HOMEASSISTANT_DOMAIN async def test_in_progress_get_canceled_when_entry_is_created( @@ -5903,3 +5936,67 @@ async def test_config_entry_late_platform_setup( "entry_id test2 cannot forward setup for light because it is " "not loaded in the ConfigEntryState.NOT_LOADED state" ) not in caplog.text + + +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_from_1_2( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.2.""" + hass_storage[config_entries.STORAGE_KEY] = { + "version": 1, + "minor_version": 2, + "data": { + "entries": [ + { + "data": {}, + "disabled_by": None, + "domain": "sun", + "entry_id": "0a8bd02d0d58c7debf5daf7941c9afe2", + "minor_version": 1, + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "import", + "title": "Sun", + "unique_id": None, + "version": 1, + }, + ] + }, + } + + manager = config_entries.ConfigEntries(hass, {}) + await manager.async_initialize() + + # Test data was loaded + entries = manager.async_entries() + assert len(entries) == 1 + + # Check we store migrated data + await flush_store(manager._store) + assert hass_storage[config_entries.STORAGE_KEY] == { + "version": config_entries.STORAGE_VERSION, + "minor_version": config_entries.STORAGE_VERSION_MINOR, + "key": config_entries.STORAGE_KEY, + "data": { + "entries": [ + { + "created_at": "1970-01-01T00:00:00+00:00", + "data": {}, + "disabled_by": None, + "domain": "sun", + "entry_id": "0a8bd02d0d58c7debf5daf7941c9afe2", + "minor_version": 1, + "modified_at": "1970-01-01T00:00:00+00:00", + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "import", + "title": "Sun", + "unique_id": None, + "version": 1, + }, + ] + }, + } diff --git a/tests/test_const.py b/tests/test_const.py index a6a2387b091..64ccb875cf5 100644 --- a/tests/test_const.py +++ b/tests/test_const.py @@ -15,7 +15,7 @@ from .common import ( def _create_tuples( - value: Enum | list[Enum], constant_prefix: str + value: type[Enum] | list[Enum], constant_prefix: str ) -> list[tuple[Enum, str]]: return [(enum, constant_prefix) for enum in value] diff --git a/tests/test_core.py b/tests/test_core.py index a1748638342..9ca57d1563f 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -9,6 +9,7 @@ import functools import gc import logging import os +from pathlib import Path import re from tempfile import TemporaryDirectory import threading @@ -423,11 +424,11 @@ async def test_async_get_hass_can_be_called(hass: HomeAssistant) -> None: try: if ha.async_get_hass() is hass: return True - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 except HomeAssistantError: return False - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 # Test scheduling a coroutine which calls async_get_hass via hass.async_create_task async def _async_create_task() -> None: @@ -920,6 +921,14 @@ def test_event_repr() -> None: ) +def test_event_origin_idx() -> None: + """Test the EventOrigin idx.""" + assert ha.EventOrigin.remote is ha.EventOrigin.remote + assert ha.EventOrigin.local is ha.EventOrigin.local + assert ha.EventOrigin.local.idx == 0 + assert ha.EventOrigin.remote.idx == 1 + + def test_event_as_dict() -> None: """Test an Event as dictionary.""" event_type = "some_type" @@ -1620,7 +1629,7 @@ async def test_serviceregistry_call_non_existing_with_blocking( hass: HomeAssistant, ) -> None: """Test non-existing with blocking.""" - with pytest.raises(ha.ServiceNotFound): + with pytest.raises(ServiceNotFound): await hass.services.async_call("test_domain", "i_do_not_exist", blocking=True) @@ -1706,7 +1715,7 @@ async def test_serviceregistry_service_that_not_exists(hass: HomeAssistant) -> N assert exc.value.domain == "test_do_not" assert exc.value.service == "exist" - assert str(exc.value) == "Service test_do_not.exist not found" + assert str(exc.value) == "Action test_do_not.exist not found" async def test_serviceregistry_async_service_raise_exception( @@ -1797,7 +1806,7 @@ async def test_services_call_return_response_requires_blocking( return_response=True, ) assert str(exc.value) == ( - "A non blocking service call with argument blocking=False " + "A non blocking action call with argument blocking=False " "can't be used together with argument return_response=True" ) @@ -1843,7 +1852,7 @@ async def test_serviceregistry_return_response_invalid( ("supports_response", "return_response", "expected_error"), [ (SupportsResponse.NONE, True, "does not return responses"), - (SupportsResponse.ONLY, False, "call requires responses"), + (SupportsResponse.ONLY, False, "action requires responses"), ], ) async def test_serviceregistry_return_response_arguments( @@ -2001,8 +2010,9 @@ async def test_config_is_allowed_path() -> None: config.allowlist_external_dirs = {os.path.realpath(tmp_dir)} test_file = os.path.join(tmp_dir, "test.jpg") - with open(test_file, "w", encoding="utf8") as tmp_file: - tmp_file.write("test") + await asyncio.get_running_loop().run_in_executor( + None, Path(test_file).write_text, "test" + ) valid = [test_file, tmp_dir, os.path.join(tmp_dir, "notfound321")] for path in valid: @@ -2519,14 +2529,14 @@ async def test_reserving_states(hass: HomeAssistant) -> None: hass.states.async_set("light.bedroom", "on") assert hass.states.async_available("light.bedroom") is False - with pytest.raises(ha.HomeAssistantError): + with pytest.raises(HomeAssistantError): hass.states.async_reserve("light.bedroom") hass.states.async_remove("light.bedroom") assert hass.states.async_available("light.bedroom") is True hass.states.async_set("light.bedroom", "on") - with pytest.raises(ha.HomeAssistantError): + with pytest.raises(HomeAssistantError): hass.states.async_reserve("light.bedroom") assert hass.states.async_available("light.bedroom") is False @@ -2830,7 +2840,7 @@ async def test_state_change_events_context_id_match_state_time( hass: HomeAssistant, ) -> None: """Test last_updated, timed_fired, and the ulid all have the same time.""" - events = async_capture_events(hass, ha.EVENT_STATE_CHANGED) + events = async_capture_events(hass, EVENT_STATE_CHANGED) hass.states.async_set("light.bedroom", "on") await hass.async_block_till_done() state: State = hass.states.get("light.bedroom") @@ -2849,7 +2859,7 @@ async def test_state_change_events_match_time_with_limits_of_precision( a bit better than the precision of datetime.now() which is used for last_updated on some platforms. """ - events = async_capture_events(hass, ha.EVENT_STATE_CHANGED) + events = async_capture_events(hass, EVENT_STATE_CHANGED) hass.states.async_set("light.bedroom", "on") await hass.async_block_till_done() state: State = hass.states.get("light.bedroom") @@ -3221,7 +3231,7 @@ async def test_async_add_import_executor_job(hass: HomeAssistant) -> None: evt = threading.Event() loop = asyncio.get_running_loop() - def executor_func() -> None: + def executor_func() -> threading.Event: evt.set() return evt @@ -3385,24 +3395,24 @@ async def test_statemachine_report_state(hass: HomeAssistant) -> None: hass.states.async_set("light.bowl", "on", None, True) await hass.async_block_till_done() assert len(state_changed_events) == 1 - assert len(state_reported_events) == 2 + assert len(state_reported_events) == 1 hass.states.async_set("light.bowl", "off") await hass.async_block_till_done() assert len(state_changed_events) == 2 - assert len(state_reported_events) == 3 + assert len(state_reported_events) == 1 hass.states.async_remove("light.bowl") await hass.async_block_till_done() assert len(state_changed_events) == 3 - assert len(state_reported_events) == 4 + assert len(state_reported_events) == 1 unsub() hass.states.async_set("light.bowl", "on") await hass.async_block_till_done() assert len(state_changed_events) == 4 - assert len(state_reported_events) == 4 + assert len(state_reported_events) == 1 async def test_report_state_listener_restrictions(hass: HomeAssistant) -> None: diff --git a/tests/test_data_entry_flow.py b/tests/test_data_entry_flow.py index 782f349f9f2..967b2565206 100644 --- a/tests/test_data_entry_flow.py +++ b/tests/test_data_entry_flow.py @@ -10,6 +10,7 @@ import voluptuous as vol from homeassistant import config_entries, data_entry_flow from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.helpers import config_validation as cv from homeassistant.util.decorator import Registry from .common import ( @@ -1075,3 +1076,25 @@ def test_deprecated_constants( import_and_test_deprecated_constant_enum( caplog, data_entry_flow, enum, "RESULT_TYPE_", "2025.1" ) + + +def test_section_in_serializer() -> None: + """Test section with custom_serializer.""" + assert cv.custom_serializer( + data_entry_flow.section( + vol.Schema( + { + vol.Optional("option_1", default=False): bool, + vol.Required("option_2"): int, + } + ), + {"collapsed": False}, + ) + ) == { + "expanded": True, + "schema": [ + {"default": False, "name": "option_1", "optional": True, "type": "boolean"}, + {"name": "option_2", "required": True, "type": "integer"}, + ], + "type": "expandable", + } diff --git a/tests/test_requirements.py b/tests/test_requirements.py index 161214160aa..2885fa30036 100644 --- a/tests/test_requirements.py +++ b/tests/test_requirements.py @@ -602,12 +602,12 @@ async def test_discovery_requirements_ssdp(hass: HomeAssistant) -> None: ) as mock_process: await async_get_integration_with_requirements(hass, "ssdp_comp") - assert len(mock_process.mock_calls) == 3 + assert len(mock_process.mock_calls) == 2 assert mock_process.mock_calls[0][1][1] == ssdp.requirements assert { + mock_process.mock_calls[0][1][0], mock_process.mock_calls[1][1][0], - mock_process.mock_calls[2][1][0], - } == {"network", "recorder"} + } == {"network", "ssdp"} @pytest.mark.parametrize( @@ -631,7 +631,7 @@ async def test_discovery_requirements_zeroconf( ) as mock_process: await async_get_integration_with_requirements(hass, "comp") - assert len(mock_process.mock_calls) == 3 + assert len(mock_process.mock_calls) == 2 assert mock_process.mock_calls[0][1][1] == zeroconf.requirements diff --git a/tests/test_runner.py b/tests/test_runner.py index 90678454adf..c61b8ed5628 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -2,6 +2,7 @@ import asyncio from collections.abc import Iterator +import subprocess import threading from unittest.mock import patch @@ -104,7 +105,7 @@ def test_run_does_not_block_forever_with_shielded_task( try: await asyncio.sleep(2) except asyncio.CancelledError: - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 async def async_shielded(*_): try: @@ -141,8 +142,7 @@ async def test_unhandled_exception_traceback( async def _unhandled_exception(): raised.set() - # pylint: disable-next=broad-exception-raised - raise Exception("This is unhandled") + raise Exception("This is unhandled") # noqa: TRY002 try: hass.loop.set_debug(True) @@ -169,21 +169,21 @@ def test_enable_posix_spawn() -> None: yield from packaging.tags.parse_tag("cp311-cp311-musllinux_1_1_x86_64") with ( - patch.object(runner.subprocess, "_USE_POSIX_SPAWN", False), + patch.object(subprocess, "_USE_POSIX_SPAWN", False), patch( "homeassistant.runner.packaging.tags.sys_tags", side_effect=_mock_sys_tags_musl, ), ): runner._enable_posix_spawn() - assert runner.subprocess._USE_POSIX_SPAWN is True + assert subprocess._USE_POSIX_SPAWN is True with ( - patch.object(runner.subprocess, "_USE_POSIX_SPAWN", False), + patch.object(subprocess, "_USE_POSIX_SPAWN", False), patch( "homeassistant.runner.packaging.tags.sys_tags", side_effect=_mock_sys_tags_any, ), ): runner._enable_posix_spawn() - assert runner.subprocess._USE_POSIX_SPAWN is False + assert subprocess._USE_POSIX_SPAWN is False diff --git a/tests/test_setup.py b/tests/test_setup.py index 4ff0f465e21..4b7df9563ba 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -10,17 +10,19 @@ import voluptuous as vol from homeassistant import config_entries, loader, setup from homeassistant.const import EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START -from homeassistant.core import CoreState, HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import discovery, translation -from homeassistant.helpers.config_validation import ( - PLATFORM_SCHEMA, - PLATFORM_SCHEMA_BASE, +from homeassistant.core import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + CoreState, + HomeAssistant, + callback, ) +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv, discovery, translation from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) +from homeassistant.helpers.issue_registry import IssueRegistry from .common import ( MockConfigEntry, @@ -88,8 +90,8 @@ async def test_validate_platform_config( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test validating platform configuration.""" - platform_schema = PLATFORM_SCHEMA.extend({"hello": str}) - platform_schema_base = PLATFORM_SCHEMA_BASE.extend({}) + platform_schema = cv.PLATFORM_SCHEMA.extend({"hello": str}) + platform_schema_base = cv.PLATFORM_SCHEMA_BASE.extend({}) mock_integration( hass, MockModule("platform_conf", platform_schema_base=platform_schema_base), @@ -149,8 +151,8 @@ async def test_validate_platform_config_2( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test component PLATFORM_SCHEMA_BASE prio over PLATFORM_SCHEMA.""" - platform_schema = PLATFORM_SCHEMA.extend({"hello": str}) - platform_schema_base = PLATFORM_SCHEMA_BASE.extend({"hello": "world"}) + platform_schema = cv.PLATFORM_SCHEMA.extend({"hello": str}) + platform_schema_base = cv.PLATFORM_SCHEMA_BASE.extend({"hello": "world"}) mock_integration( hass, MockModule( @@ -183,8 +185,8 @@ async def test_validate_platform_config_3( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test fallback to component PLATFORM_SCHEMA.""" - component_schema = PLATFORM_SCHEMA_BASE.extend({"hello": str}) - platform_schema = PLATFORM_SCHEMA.extend({"cheers": str, "hello": "world"}) + component_schema = cv.PLATFORM_SCHEMA_BASE.extend({"hello": str}) + platform_schema = cv.PLATFORM_SCHEMA.extend({"cheers": str, "hello": "world"}) mock_integration( hass, MockModule("platform_conf", platform_schema=component_schema) ) @@ -210,8 +212,8 @@ async def test_validate_platform_config_3( async def test_validate_platform_config_4(hass: HomeAssistant) -> None: """Test entity_namespace in PLATFORM_SCHEMA.""" - component_schema = PLATFORM_SCHEMA_BASE - platform_schema = PLATFORM_SCHEMA + component_schema = cv.PLATFORM_SCHEMA_BASE + platform_schema = cv.PLATFORM_SCHEMA mock_integration( hass, MockModule("platform_conf", platform_schema_base=component_schema), @@ -240,9 +242,26 @@ async def test_validate_platform_config_4(hass: HomeAssistant) -> None: hass.config.components.remove("platform_conf") -async def test_component_not_found(hass: HomeAssistant) -> None: - """setup_component should not crash if component doesn't exist.""" +async def test_component_not_found( + hass: HomeAssistant, issue_registry: IssueRegistry +) -> None: + """setup_component should raise a repair issue if component doesn't exist.""" assert await setup.async_setup_component(hass, "non_existing", {}) is False + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, "integration_not_found.non_existing" + ) + assert issue + assert issue.translation_key == "integration_not_found" + + +async def test_component_missing_not_raising_in_safe_mode( + hass: HomeAssistant, issue_registry: IssueRegistry +) -> None: + """setup_component should not raise an issue if component doesn't exist in safe.""" + hass.config.safe_mode = True + assert await setup.async_setup_component(hass, "non_existing", {}) is False + assert len(issue_registry.issues) == 0 async def test_component_not_double_initialized(hass: HomeAssistant) -> None: @@ -328,7 +347,7 @@ async def test_component_exception_setup(hass: HomeAssistant) -> None: def exception_setup(hass, config): """Raise exception.""" - raise Exception("fail!") # pylint: disable=broad-exception-raised + raise Exception("fail!") # noqa: TRY002 mock_integration(hass, MockModule("comp", setup=exception_setup)) @@ -342,7 +361,7 @@ async def test_component_base_exception_setup(hass: HomeAssistant) -> None: def exception_setup(hass, config): """Raise exception.""" - raise BaseException("fail!") # pylint: disable=broad-exception-raised + raise BaseException("fail!") # noqa: TRY002 mock_integration(hass, MockModule("comp", setup=exception_setup)) @@ -362,8 +381,7 @@ async def test_component_setup_with_validation_and_dependency( """Test that config is passed in.""" if config.get("comp_a", {}).get("valid", False): return True - # pylint: disable-next=broad-exception-raised - raise Exception(f"Config not passed in: {config}") + raise Exception(f"Config not passed in: {config}") # noqa: TRY002 platform = MockPlatform() @@ -386,7 +404,9 @@ async def test_component_setup_with_validation_and_dependency( async def test_platform_specific_config_validation(hass: HomeAssistant) -> None: """Test platform that specifies config.""" - platform_schema = PLATFORM_SCHEMA.extend({"valid": True}, extra=vol.PREVENT_EXTRA) + platform_schema = cv.PLATFORM_SCHEMA.extend( + {"valid": True}, extra=vol.PREVENT_EXTRA + ) mock_setup = Mock(spec_set=True) @@ -533,7 +553,7 @@ async def test_component_warn_slow_setup(hass: HomeAssistant) -> None: async def test_platform_no_warn_slow(hass: HomeAssistant) -> None: """Do not warn for long entity setup time.""" mock_integration( - hass, MockModule("test_component1", platform_schema=PLATFORM_SCHEMA) + hass, MockModule("test_component1", platform_schema=cv.PLATFORM_SCHEMA) ) with patch.object(hass.loop, "call_later") as mock_call: result = await setup.async_setup_component(hass, "test_component1", {}) diff --git a/tests/test_util/aiohttp.py b/tests/test_util/aiohttp.py index b4b8cfa4b6d..04d6db509e0 100644 --- a/tests/test_util/aiohttp.py +++ b/tests/test_util/aiohttp.py @@ -1,9 +1,11 @@ """Aiohttp test utils.""" import asyncio +from collections.abc import Iterator from contextlib import contextmanager from http import HTTPStatus import re +from typing import Any from unittest import mock from urllib.parse import parse_qs @@ -18,6 +20,7 @@ from multidict import CIMultiDict from yarl import URL from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import HomeAssistant from homeassistant.helpers.json import json_dumps from homeassistant.util.json import json_loads @@ -36,7 +39,7 @@ def mock_stream(data): class AiohttpClientMocker: """Mock Aiohttp client requests.""" - def __init__(self): + def __init__(self) -> None: """Initialize the request mocker.""" self._mocks = [] self._cookies = {} @@ -173,7 +176,7 @@ class AiohttpClientMockResponse: headers=None, side_effect=None, closing=None, - ): + ) -> None: """Initialize a fake response.""" if json is not None: text = json_dumps(json) @@ -296,11 +299,11 @@ class AiohttpClientMockResponse: @contextmanager -def mock_aiohttp_client(): +def mock_aiohttp_client() -> Iterator[AiohttpClientMocker]: """Context manager to mock aiohttp client.""" mocker = AiohttpClientMocker() - def create_session(hass, *args, **kwargs): + def create_session(hass: HomeAssistant, *args: Any, **kwargs: Any) -> ClientSession: session = mocker.create_session(hass.loop) async def close_session(event): @@ -326,7 +329,7 @@ class MockLongPollSideEffect: If queue is empty, will await until done. """ - def __init__(self): + def __init__(self) -> None: """Initialize the queue.""" self.semaphore = asyncio.Semaphore(0) self.response_list = [] diff --git a/tests/util/test_async.py b/tests/util/test_async.py index ac927b1375a..17349cf6ff9 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -14,7 +14,9 @@ from tests.common import extract_stack_to_frame @patch("concurrent.futures.Future") @patch("threading.get_ident") -def test_run_callback_threadsafe_from_inside_event_loop(mock_ident, _) -> None: +def test_run_callback_threadsafe_from_inside_event_loop( + mock_ident: MagicMock, mock_future: MagicMock +) -> None: """Testing calling run_callback_threadsafe from inside an event loop.""" callback = MagicMock() @@ -197,3 +199,17 @@ async def test_create_eager_task_from_thread_in_integration( "from a thread at homeassistant/components/hue/light.py, line 23: " "self.light.is_on" ) in caplog.text + + +async def test_get_scheduled_timer_handles(hass: HomeAssistant) -> None: + """Test get_scheduled_timer_handles returns all scheduled timer handles.""" + loop = hass.loop + timer_handle = loop.call_later(10, lambda: None) + timer_handle2 = loop.call_later(5, lambda: None) + timer_handle3 = loop.call_later(15, lambda: None) + + handles = hasync.get_scheduled_timer_handles(loop) + assert set(handles).issuperset({timer_handle, timer_handle2, timer_handle3}) + timer_handle.cancel() + timer_handle2.cancel() + timer_handle3.cancel() diff --git a/tests/util/test_color.py b/tests/util/test_color.py index 53c243a1e4f..c8a5e0c8587 100644 --- a/tests/util/test_color.py +++ b/tests/util/test_color.py @@ -200,17 +200,17 @@ def test_color_hs_to_xy() -> None: def test_rgb_hex_to_rgb_list() -> None: """Test rgb_hex_to_rgb_list.""" - assert [255, 255, 255] == color_util.rgb_hex_to_rgb_list("ffffff") + assert color_util.rgb_hex_to_rgb_list("ffffff") == [255, 255, 255] - assert [0, 0, 0] == color_util.rgb_hex_to_rgb_list("000000") + assert color_util.rgb_hex_to_rgb_list("000000") == [0, 0, 0] - assert [255, 255, 255, 255] == color_util.rgb_hex_to_rgb_list("ffffffff") + assert color_util.rgb_hex_to_rgb_list("ffffffff") == [255, 255, 255, 255] - assert [0, 0, 0, 0] == color_util.rgb_hex_to_rgb_list("00000000") + assert color_util.rgb_hex_to_rgb_list("00000000") == [0, 0, 0, 0] - assert [51, 153, 255] == color_util.rgb_hex_to_rgb_list("3399ff") + assert color_util.rgb_hex_to_rgb_list("3399ff") == [51, 153, 255] - assert [51, 153, 255, 0] == color_util.rgb_hex_to_rgb_list("3399ff00") + assert color_util.rgb_hex_to_rgb_list("3399ff00") == [51, 153, 255, 0] def test_color_name_to_rgb_valid_name() -> None: diff --git a/tests/util/test_dt.py b/tests/util/test_dt.py index 6caca092517..0e8432bbb83 100644 --- a/tests/util/test_dt.py +++ b/tests/util/test_dt.py @@ -294,12 +294,12 @@ def test_parse_time_expression() -> None: assert list(range(0, 60, 5)) == dt_util.parse_time_expression("/5", 0, 59) - assert [1, 2, 3] == dt_util.parse_time_expression([2, 1, 3], 0, 59) + assert dt_util.parse_time_expression([2, 1, 3], 0, 59) == [1, 2, 3] assert list(range(24)) == dt_util.parse_time_expression("*", 0, 23) - assert [42] == dt_util.parse_time_expression(42, 0, 59) - assert [42] == dt_util.parse_time_expression("42", 0, 59) + assert dt_util.parse_time_expression(42, 0, 59) == [42] + assert dt_util.parse_time_expression("42", 0, 59) == [42] with pytest.raises(ValueError): dt_util.parse_time_expression(61, 0, 60) diff --git a/tests/util/test_json.py b/tests/util/test_json.py index 3a314bb5a1b..05dab46002d 100644 --- a/tests/util/test_json.py +++ b/tests/util/test_json.py @@ -131,34 +131,6 @@ def test_json_loads_object() -> None: json_loads_object("null") -async def test_deprecated_test_find_unserializable_data( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test deprecated test_find_unserializable_data logs a warning.""" - # pylint: disable-next=hass-deprecated-import,import-outside-toplevel - from homeassistant.util.json import find_paths_unserializable_data - - find_paths_unserializable_data(1) - assert ( - "uses find_paths_unserializable_data from homeassistant.util.json" - in caplog.text - ) - assert "should be updated to use homeassistant.helpers.json module" in caplog.text - - -async def test_deprecated_save_json( - caplog: pytest.LogCaptureFixture, tmp_path: Path -) -> None: - """Test deprecated save_json logs a warning.""" - # pylint: disable-next=hass-deprecated-import,import-outside-toplevel - from homeassistant.util.json import save_json - - fname = tmp_path / "test1.json" - save_json(fname, TEST_JSON_A) - assert "uses save_json from homeassistant.util.json" in caplog.text - assert "should be updated to use homeassistant.helpers.json module" in caplog.text - - async def test_loading_derived_class() -> None: """Test loading data from classes derived from str.""" diff --git a/tests/util/test_logging.py b/tests/util/test_logging.py index 4667dbcbec8..795444c89bd 100644 --- a/tests/util/test_logging.py +++ b/tests/util/test_logging.py @@ -80,8 +80,7 @@ async def test_async_create_catching_coro( """Test exception logging of wrapped coroutine.""" async def job(): - # pylint: disable-next=broad-exception-raised - raise Exception("This is a bad coroutine") + raise Exception("This is a bad coroutine") # noqa: TRY002 hass.async_create_task(logging_util.async_create_catching_coro(job())) await hass.async_block_till_done() diff --git a/tests/util/test_loop.py b/tests/util/test_loop.py index 585f32a965f..3ff7128938f 100644 --- a/tests/util/test_loop.py +++ b/tests/util/test_loop.py @@ -1,5 +1,7 @@ """Tests for async util methods from Python source.""" +from collections.abc import Generator +import contextlib import threading from unittest.mock import Mock, patch @@ -15,57 +17,14 @@ def banned_function(): """Mock banned function.""" -async def test_raise_for_blocking_call_async() -> None: - """Test raise_for_blocking_call detects when called from event loop without integration context.""" - with pytest.raises(RuntimeError): - haloop.raise_for_blocking_call(banned_function) - - -async def test_raise_for_blocking_call_async_non_strict_core( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test non_strict_core raise_for_blocking_call detects from event loop without integration context.""" - haloop.raise_for_blocking_call(banned_function, strict_core=False) - assert "Detected blocking call to banned_function" in caplog.text - assert "Traceback (most recent call last)" in caplog.text - assert ( - "Please create a bug report at https://github.com/home-assistant/core/issues" - in caplog.text - ) - assert ( - "For developers, please see " - "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" - ) in caplog.text - - -async def test_raise_for_blocking_call_async_integration( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test raise_for_blocking_call detects and raises when called from event loop from integration context.""" - frames = extract_stack_to_frame( - [ - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="23", - line="do_something()", - ), - Mock( - filename="/home/paulus/homeassistant/components/hue/light.py", - lineno="23", - line="self.light.is_on", - ), - Mock( - filename="/home/paulus/aiohue/lights.py", - lineno="2", - line="something()", - ), - ] - ) +@contextlib.contextmanager +def patch_get_current_frame(stack: list[Mock]) -> Generator[None]: + """Patch get_current_frame.""" + frames = extract_stack_to_frame(stack) with ( - pytest.raises(RuntimeError), patch( "homeassistant.helpers.frame.linecache.getline", - return_value="self.light.is_on", + return_value=stack[1].line, ), patch( "homeassistant.util.loop._get_line_from_cache", @@ -79,13 +38,104 @@ async def test_raise_for_blocking_call_async_integration( "homeassistant.helpers.frame.get_current_frame", return_value=frames, ), + ): + yield + + +async def test_raise_for_blocking_call_async() -> None: + """Test raise_for_blocking_call detects when called from event loop without integration context.""" + with pytest.raises(RuntimeError): + haloop.raise_for_blocking_call(banned_function) + + +async def test_raise_for_blocking_call_async_non_strict_core( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test non_strict_core raise_for_blocking_call detects from event loop without integration context.""" + stack = [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="12", + line="do_something()", + ), + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="12", + line="self.light.is_on", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + with patch_get_current_frame(stack): + haloop.raise_for_blocking_call(banned_function, strict_core=False) + assert "Detected blocking call to banned_function" in caplog.text + assert "Traceback (most recent call last)" in caplog.text + assert ( + "Please create a bug report at https://github.com/home-assistant/core/issues" + in caplog.text + ) + assert ( + "For developers, please see " + "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" + ) in caplog.text + + warnings = [ + record for record in caplog.get_records("call") if record.levelname == "WARNING" + ] + assert len(warnings) == 1 + caplog.clear() + + # Second call should log at debug + with patch_get_current_frame(stack): + haloop.raise_for_blocking_call(banned_function, strict_core=False) + + warnings = [ + record for record in caplog.get_records("call") if record.levelname == "WARNING" + ] + assert len(warnings) == 0 + assert ( + "For developers, please see " + "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" + ) in caplog.text + + # no expensive traceback on debug + assert "Traceback (most recent call last)" not in caplog.text + + +async def test_raise_for_blocking_call_async_integration( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test raise_for_blocking_call detects and raises when called from event loop from integration context.""" + stack = [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="18", + line="do_something()", + ), + Mock( + filename="/home/paulus/homeassistant/components/hue/light.py", + lineno="18", + line="self.light.is_on", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="8", + line="something()", + ), + ] + with ( + pytest.raises(RuntimeError), + patch_get_current_frame(stack), ): haloop.raise_for_blocking_call(banned_function) assert ( "Detected blocking call to banned_function with args None" " inside the event loop by integration" - " 'hue' at homeassistant/components/hue/light.py, line 23: self.light.is_on " - "(offender: /home/paulus/aiohue/lights.py, line 2: mock_line), please create " + " 'hue' at homeassistant/components/hue/light.py, line 18: self.light.is_on " + "(offender: /home/paulus/aiohue/lights.py, line 8: mock_line), please create " "a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" in caplog.text ) @@ -99,55 +149,37 @@ async def test_raise_for_blocking_call_async_integration_non_strict( caplog: pytest.LogCaptureFixture, ) -> None: """Test raise_for_blocking_call detects when called from event loop from integration context.""" - frames = extract_stack_to_frame( - [ - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="23", - line="do_something()", - ), - Mock( - filename="/home/paulus/homeassistant/components/hue/light.py", - lineno="23", - line="self.light.is_on", - ), - Mock( - filename="/home/paulus/aiohue/lights.py", - lineno="2", - line="something()", - ), - ] - ) - with ( - patch( - "homeassistant.helpers.frame.linecache.getline", - return_value="self.light.is_on", + stack = [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="15", + line="do_something()", ), - patch( - "homeassistant.util.loop._get_line_from_cache", - return_value="mock_line", + Mock( + filename="/home/paulus/homeassistant/components/hue/light.py", + lineno="15", + line="self.light.is_on", ), - patch( - "homeassistant.util.loop.get_current_frame", - return_value=frames, + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="1", + line="something()", ), - patch( - "homeassistant.helpers.frame.get_current_frame", - return_value=frames, - ), - ): + ] + with patch_get_current_frame(stack): haloop.raise_for_blocking_call(banned_function, strict=False) + assert ( "Detected blocking call to banned_function with args None" " inside the event loop by integration" - " 'hue' at homeassistant/components/hue/light.py, line 23: self.light.is_on " - "(offender: /home/paulus/aiohue/lights.py, line 2: mock_line), " + " 'hue' at homeassistant/components/hue/light.py, line 15: self.light.is_on " + "(offender: /home/paulus/aiohue/lights.py, line 1: mock_line), " "please create a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" in caplog.text ) assert "Traceback (most recent call last)" in caplog.text assert ( - 'File "/home/paulus/homeassistant/components/hue/light.py", line 23' + 'File "/home/paulus/homeassistant/components/hue/light.py", line 15' in caplog.text ) assert ( @@ -158,62 +190,62 @@ async def test_raise_for_blocking_call_async_integration_non_strict( "For developers, please see " "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" ) in caplog.text + warnings = [ + record for record in caplog.get_records("call") if record.levelname == "WARNING" + ] + assert len(warnings) == 1 + caplog.clear() + + # Second call should log at debug + with patch_get_current_frame(stack): + haloop.raise_for_blocking_call(banned_function, strict=False) + + warnings = [ + record for record in caplog.get_records("call") if record.levelname == "WARNING" + ] + assert len(warnings) == 0 + assert ( + "For developers, please see " + "https://developers.home-assistant.io/docs/asyncio_blocking_operations/#banned_function" + ) in caplog.text + # no expensive traceback on debug + assert "Traceback (most recent call last)" not in caplog.text async def test_raise_for_blocking_call_async_custom( caplog: pytest.LogCaptureFixture, ) -> None: """Test raise_for_blocking_call detects when called from event loop with custom component context.""" - frames = extract_stack_to_frame( - [ - Mock( - filename="/home/paulus/homeassistant/core.py", - lineno="23", - line="do_something()", - ), - Mock( - filename="/home/paulus/config/custom_components/hue/light.py", - lineno="23", - line="self.light.is_on", - ), - Mock( - filename="/home/paulus/aiohue/lights.py", - lineno="2", - line="something()", - ), - ] - ) - with ( - pytest.raises(RuntimeError), - patch( - "homeassistant.helpers.frame.linecache.getline", - return_value="self.light.is_on", + stack = [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="12", + line="do_something()", ), - patch( - "homeassistant.util.loop._get_line_from_cache", - return_value="mock_line", + Mock( + filename="/home/paulus/config/custom_components/hue/light.py", + lineno="12", + line="self.light.is_on", ), - patch( - "homeassistant.util.loop.get_current_frame", - return_value=frames, + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="3", + line="something()", ), - patch( - "homeassistant.helpers.frame.get_current_frame", - return_value=frames, - ), - ): + ] + with pytest.raises(RuntimeError), patch_get_current_frame(stack): haloop.raise_for_blocking_call(banned_function) assert ( "Detected blocking call to banned_function with args None" " inside the event loop by custom " - "integration 'hue' at custom_components/hue/light.py, line 23: self.light.is_on" - " (offender: /home/paulus/aiohue/lights.py, line 2: mock_line), " + "integration 'hue' at custom_components/hue/light.py, line 12: self.light.is_on" + " (offender: /home/paulus/aiohue/lights.py, line 3: mock_line), " "please create a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" ) in caplog.text assert "Traceback (most recent call last)" in caplog.text assert ( - 'File "/home/paulus/config/custom_components/hue/light.py", line 23' + 'File "/home/paulus/config/custom_components/hue/light.py", line 12' in caplog.text ) assert ( diff --git a/tests/util/test_process.py b/tests/util/test_process.py index ae28f5d82fc..999abe0476f 100644 --- a/tests/util/test_process.py +++ b/tests/util/test_process.py @@ -1,20 +1,25 @@ """Test process util.""" +from functools import partial import os import subprocess import pytest +from homeassistant.core import HomeAssistant from homeassistant.util import process -async def test_kill_process() -> None: +async def test_kill_process(hass: HomeAssistant) -> None: """Test killing a process.""" - sleeper = subprocess.Popen( - "sleep 1000", - shell=True, # noqa: S602 # shell by design - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, + sleeper = await hass.async_add_executor_job( + partial( # noqa: S604 # shell by design + subprocess.Popen, + "sleep 1000", + shell=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) ) pid = sleeper.pid diff --git a/tests/util/test_timeout.py b/tests/util/test_timeout.py index 797c849db3c..1c4b06d99b4 100644 --- a/tests/util/test_timeout.py +++ b/tests/util/test_timeout.py @@ -25,7 +25,7 @@ async def test_simple_global_timeout_with_executor_job(hass: HomeAssistant) -> N with pytest.raises(TimeoutError): async with timeout.async_timeout(0.1): - await hass.async_add_executor_job(lambda: time.sleep(0.2)) + await hass.async_add_executor_job(time.sleep, 0.2) async def test_simple_global_timeout_freeze() -> None: @@ -133,7 +133,7 @@ async def test_mix_global_timeout_freeze_and_zone_freeze_inside_executor_job_sec async with timeout.async_timeout(0.1): async with timeout.async_timeout(0.2, zone_name="recorder"): await hass.async_add_executor_job(_some_sync_work) - await hass.async_add_executor_job(lambda: time.sleep(0.2)) + await hass.async_add_executor_job(time.sleep, 0.2) async def test_simple_global_timeout_freeze_with_executor_job( @@ -143,7 +143,7 @@ async def test_simple_global_timeout_freeze_with_executor_job( timeout = TimeoutManager() async with timeout.async_timeout(0.2), timeout.async_freeze(): - await hass.async_add_executor_job(lambda: time.sleep(0.3)) + await hass.async_add_executor_job(time.sleep, 0.3) async def test_simple_global_timeout_freeze_reset() -> None: @@ -338,3 +338,24 @@ async def test_simple_zone_timeout_zone_with_timeout_exeption() -> None: raise RuntimeError await asyncio.sleep(0.3) + + +async def test_multiple_global_freezes(hass: HomeAssistant) -> None: + """Test multiple global freezes.""" + timeout = TimeoutManager() + + async def background(delay: float) -> None: + async with timeout.async_freeze(): + await asyncio.sleep(delay) + + async with timeout.async_timeout(0.1): + task = hass.async_create_task(background(0.2)) + async with timeout.async_freeze(): + await asyncio.sleep(0.1) + await task + + async with timeout.async_timeout(0.1): + task = hass.async_create_task(background(0.2)) + async with timeout.async_freeze(): + await asyncio.sleep(0.3) + await task diff --git a/tests/util/test_unit_system.py b/tests/util/test_unit_system.py index 033631563f4..15500777212 100644 --- a/tests/util/test_unit_system.py +++ b/tests/util/test_unit_system.py @@ -15,6 +15,7 @@ from homeassistant.const import ( WIND_SPEED, UnitOfLength, UnitOfMass, + UnitOfPrecipitationDepth, UnitOfPressure, UnitOfSpeed, UnitOfTemperature, @@ -42,7 +43,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -55,7 +56,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=INVALID_UNIT, mass=UnitOfMass.GRAMS, @@ -68,7 +69,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -81,7 +82,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -94,7 +95,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=INVALID_UNIT, @@ -107,7 +108,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 6ea3f1437af..ece65504ed6 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -1,5 +1,6 @@ """Test Home Assistant yaml loader.""" +from collections.abc import Generator import importlib import io import os @@ -9,7 +10,6 @@ import unittest from unittest.mock import Mock, patch import pytest -from typing_extensions import Generator import voluptuous as vol import yaml as pyyaml @@ -566,8 +566,8 @@ def test_no_recursive_secrets() -> None: def test_input_class() -> None: """Test input class.""" - yaml_input = yaml_loader.Input("hello") - yaml_input2 = yaml_loader.Input("hello") + yaml_input = yaml.Input("hello") + yaml_input2 = yaml.Input("hello") assert yaml_input.name == "hello" assert yaml_input == yaml_input2